feat(logging): add configurable logging with file output support
Introduce --log-level and --log-file CLI arguments. Add execution time tracking and detailed logs across all modules.
This commit is contained in:
@@ -4,10 +4,15 @@ from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from shutil import copy2, rmtree
|
||||
import json
|
||||
import logging
|
||||
from time import perf_counter
|
||||
|
||||
from .scanner import ScanResult, crawl_dependencies, find_entry_file, read_text, strip_comment_out_blocks
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PipelineReport:
|
||||
project_root: Path
|
||||
@@ -52,19 +57,41 @@ class ProjectPipeline:
|
||||
self.force = force
|
||||
|
||||
def run(self, analyzer=None) -> PipelineReport:
|
||||
started = perf_counter()
|
||||
logger.info(
|
||||
"Starting pipeline: project_root=%s output_root=%s entry=%s analysis=%s force=%s",
|
||||
self.project_root,
|
||||
self.output_root,
|
||||
self.entry_name,
|
||||
analyzer is not None,
|
||||
self.force,
|
||||
)
|
||||
entry_file = find_entry_file(self.project_root, self.entry_name)
|
||||
entry_rel = entry_file.relative_to(self.project_root)
|
||||
logger.debug("Using entry file %s", entry_rel.as_posix())
|
||||
|
||||
initial_scan = crawl_dependencies(self.project_root, entry_file)
|
||||
initial_rel_files = self._relative_files(initial_scan, self.project_root)
|
||||
logger.info(
|
||||
"Initial scan complete: files=%d warnings=%d",
|
||||
len(initial_rel_files),
|
||||
len(initial_scan.warnings),
|
||||
)
|
||||
|
||||
self._prepare_output_root()
|
||||
self._copy_files(initial_rel_files)
|
||||
cleaned_files = self._clean_copied_xaml_files(initial_rel_files)
|
||||
logger.info("Copied %d files and cleaned %d XAML files", len(initial_rel_files), len(cleaned_files))
|
||||
|
||||
final_scan = crawl_dependencies(self.code_root, self.code_root / entry_rel)
|
||||
final_rel_files = self._relative_files(final_scan, self.code_root)
|
||||
pruned_files = self._prune_unused_files(initial_rel_files, final_rel_files)
|
||||
logger.info(
|
||||
"Final scan complete: files=%d warnings=%d pruned=%d",
|
||||
len(final_rel_files),
|
||||
len(final_scan.warnings),
|
||||
len(pruned_files),
|
||||
)
|
||||
|
||||
analysis_files = self._write_analysis(final_rel_files, analyzer)
|
||||
warnings = initial_scan.warnings + final_scan.warnings
|
||||
@@ -83,15 +110,24 @@ class ProjectPipeline:
|
||||
analysis_files=analysis_files,
|
||||
)
|
||||
self._write_report_files(report)
|
||||
logger.info(
|
||||
"Pipeline completed in %.2fs: final_files=%d analysis_files=%d warnings=%d",
|
||||
perf_counter() - started,
|
||||
len(report.final_files),
|
||||
len(report.analysis_files),
|
||||
len(report.warnings),
|
||||
)
|
||||
return report
|
||||
|
||||
def _prepare_output_root(self) -> None:
|
||||
if self.output_root.exists():
|
||||
if not self.force:
|
||||
raise FileExistsError(f"Output directory already exists: {self.output_root}")
|
||||
logger.info("Removing existing output directory because force=True: %s", self.output_root)
|
||||
rmtree(self.output_root)
|
||||
self.code_root.mkdir(parents=True, exist_ok=True)
|
||||
self.docs_root.mkdir(parents=True, exist_ok=True)
|
||||
logger.debug("Prepared output directories: code=%s docs=%s", self.code_root, self.docs_root)
|
||||
|
||||
def _copy_files(self, relative_files: list[Path]) -> None:
|
||||
for relative_path in relative_files:
|
||||
@@ -99,6 +135,7 @@ class ProjectPipeline:
|
||||
destination = self.code_root / relative_path
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
copy2(source, destination)
|
||||
logger.debug("Copied file: %s -> %s", source, destination)
|
||||
|
||||
def _clean_copied_xaml_files(self, relative_files: list[Path]) -> list[Path]:
|
||||
cleaned: list[Path] = []
|
||||
@@ -111,6 +148,7 @@ class ProjectPipeline:
|
||||
if updated != original:
|
||||
output_file.write_text(updated, encoding="utf-8")
|
||||
cleaned.append(relative_path)
|
||||
logger.debug("Removed CommentOut blocks from %s", output_file)
|
||||
return cleaned
|
||||
|
||||
def _prune_unused_files(self, initial_files: list[Path], final_files: list[Path]) -> list[Path]:
|
||||
@@ -123,6 +161,7 @@ class ProjectPipeline:
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
pruned.append(relative_path)
|
||||
logger.debug("Pruned unreachable file: %s", target)
|
||||
self._cleanup_empty_dirs()
|
||||
return pruned
|
||||
|
||||
@@ -136,9 +175,11 @@ class ProjectPipeline:
|
||||
if any(directory.iterdir()):
|
||||
continue
|
||||
directory.rmdir()
|
||||
logger.debug("Removed empty directory: %s", directory)
|
||||
|
||||
def _write_analysis(self, final_files: list[Path], analyzer) -> list[Path]:
|
||||
if analyzer is None:
|
||||
logger.info("Skipping Gemini analysis because analyzer is disabled")
|
||||
return []
|
||||
|
||||
output_files: list[Path] = []
|
||||
@@ -149,11 +190,13 @@ class ProjectPipeline:
|
||||
analysis_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
analysis_path.write_text(analysis, encoding="utf-8")
|
||||
output_files.append(Path(f"{relative_path.as_posix()}.analysis.md"))
|
||||
logger.debug("Wrote analysis file: %s", analysis_path)
|
||||
return output_files
|
||||
|
||||
def _write_report_files(self, report: PipelineReport) -> None:
|
||||
(self.docs_root / "manifest.json").write_text(report.to_json(), encoding="utf-8")
|
||||
(self.docs_root / "OVERVIEW.md").write_text(self._build_overview(report), encoding="utf-8")
|
||||
logger.debug("Wrote report files to %s", self.docs_root)
|
||||
|
||||
def _build_overview(self, report: PipelineReport) -> str:
|
||||
warnings = "\n".join(f"- {item}" for item in report.warnings) or "- 无"
|
||||
|
||||
Reference in New Issue
Block a user