feat: add initial uipath explainator implementation
Scaffold project with pyproject.toml and environment configuration Implement core modules including CLI, Gemini integration, and scanner
This commit is contained in:
181
src/uipath_explainator/pipeline.py
Normal file
181
src/uipath_explainator/pipeline.py
Normal file
@@ -0,0 +1,181 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from shutil import copy2, rmtree
|
||||
import json
|
||||
|
||||
from .scanner import ScanResult, crawl_dependencies, find_entry_file, read_text, strip_comment_out_blocks
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PipelineReport:
|
||||
project_root: Path
|
||||
output_root: Path
|
||||
entry_file: Path
|
||||
initial_files: list[Path]
|
||||
final_files: list[Path]
|
||||
pruned_files: list[Path]
|
||||
cleaned_files: list[Path]
|
||||
warnings: list[str]
|
||||
analysis_files: list[Path]
|
||||
|
||||
def to_json(self) -> str:
|
||||
return json.dumps(
|
||||
{
|
||||
"project_root": self.project_root.as_posix(),
|
||||
"output_root": self.output_root.as_posix(),
|
||||
"entry_file": self.entry_file.as_posix(),
|
||||
"initial_files": [item.as_posix() for item in self.initial_files],
|
||||
"final_files": [item.as_posix() for item in self.final_files],
|
||||
"pruned_files": [item.as_posix() for item in self.pruned_files],
|
||||
"cleaned_files": [item.as_posix() for item in self.cleaned_files],
|
||||
"warnings": self.warnings,
|
||||
"analysis_files": [item.as_posix() for item in self.analysis_files],
|
||||
},
|
||||
ensure_ascii=False,
|
||||
indent=2,
|
||||
)
|
||||
|
||||
|
||||
class ProjectPipeline:
|
||||
def __init__(self, project_root: Path, output_root: Path, entry_name: str, force: bool = False) -> None:
|
||||
self.project_root = project_root.resolve()
|
||||
self.output_root = output_root.resolve()
|
||||
self.entry_name = entry_name
|
||||
self.force = force
|
||||
|
||||
def run(self, analyzer=None) -> PipelineReport:
|
||||
entry_file = find_entry_file(self.project_root, self.entry_name)
|
||||
entry_rel = entry_file.relative_to(self.project_root)
|
||||
|
||||
initial_scan = crawl_dependencies(self.project_root, entry_file)
|
||||
initial_rel_files = self._relative_files(initial_scan, self.project_root)
|
||||
|
||||
self._prepare_output_root()
|
||||
self._copy_files(initial_rel_files)
|
||||
cleaned_files = self._clean_copied_xaml_files(initial_rel_files)
|
||||
|
||||
final_scan = crawl_dependencies(self.output_root, self.output_root / entry_rel)
|
||||
final_rel_files = self._relative_files(final_scan, self.output_root)
|
||||
pruned_files = self._prune_unused_files(initial_rel_files, final_rel_files)
|
||||
|
||||
analysis_files = self._write_analysis(final_rel_files, analyzer)
|
||||
warnings = initial_scan.warnings + final_scan.warnings
|
||||
|
||||
report = PipelineReport(
|
||||
project_root=self.project_root,
|
||||
output_root=self.output_root,
|
||||
entry_file=entry_rel,
|
||||
initial_files=initial_rel_files,
|
||||
final_files=final_rel_files,
|
||||
pruned_files=pruned_files,
|
||||
cleaned_files=cleaned_files,
|
||||
warnings=warnings,
|
||||
analysis_files=analysis_files,
|
||||
)
|
||||
self._write_report_files(report)
|
||||
return report
|
||||
|
||||
def _prepare_output_root(self) -> None:
|
||||
if self.output_root.exists():
|
||||
if not self.force:
|
||||
raise FileExistsError(f"Output directory already exists: {self.output_root}")
|
||||
rmtree(self.output_root)
|
||||
self.output_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _copy_files(self, relative_files: list[Path]) -> None:
|
||||
for relative_path in relative_files:
|
||||
source = self.project_root / relative_path
|
||||
destination = self.output_root / relative_path
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
copy2(source, destination)
|
||||
|
||||
def _clean_copied_xaml_files(self, relative_files: list[Path]) -> list[Path]:
|
||||
cleaned: list[Path] = []
|
||||
for relative_path in relative_files:
|
||||
if relative_path.suffix.lower() != ".xaml":
|
||||
continue
|
||||
output_file = self.output_root / relative_path
|
||||
original = read_text(output_file)
|
||||
updated = strip_comment_out_blocks(original)
|
||||
if updated != original:
|
||||
output_file.write_text(updated, encoding="utf-8")
|
||||
cleaned.append(relative_path)
|
||||
return cleaned
|
||||
|
||||
def _prune_unused_files(self, initial_files: list[Path], final_files: list[Path]) -> list[Path]:
|
||||
final_set = set(final_files)
|
||||
pruned: list[Path] = []
|
||||
for relative_path in initial_files:
|
||||
if relative_path in final_set:
|
||||
continue
|
||||
target = self.output_root / relative_path
|
||||
if target.exists():
|
||||
target.unlink()
|
||||
pruned.append(relative_path)
|
||||
self._cleanup_empty_dirs()
|
||||
return pruned
|
||||
|
||||
def _cleanup_empty_dirs(self) -> None:
|
||||
directories = sorted(
|
||||
[path for path in self.output_root.rglob("*") if path.is_dir()],
|
||||
key=lambda item: len(item.parts),
|
||||
reverse=True,
|
||||
)
|
||||
for directory in directories:
|
||||
if any(directory.iterdir()):
|
||||
continue
|
||||
directory.rmdir()
|
||||
|
||||
def _write_analysis(self, final_files: list[Path], analyzer) -> list[Path]:
|
||||
if analyzer is None:
|
||||
return []
|
||||
|
||||
output_files: list[Path] = []
|
||||
for relative_path in self._ordered_files(final_files):
|
||||
content = read_text(self.output_root / relative_path)
|
||||
analysis = analyzer.analyze(relative_path, content)
|
||||
analysis_path = self.output_root / f"{relative_path.as_posix()}.analysis.md"
|
||||
analysis_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
analysis_path.write_text(analysis, encoding="utf-8")
|
||||
output_files.append(Path(f"{relative_path.as_posix()}.analysis.md"))
|
||||
return output_files
|
||||
|
||||
def _write_report_files(self, report: PipelineReport) -> None:
|
||||
(self.output_root / "manifest.json").write_text(report.to_json(), encoding="utf-8")
|
||||
(self.output_root / "OVERVIEW.md").write_text(self._build_overview(report), encoding="utf-8")
|
||||
|
||||
def _build_overview(self, report: PipelineReport) -> str:
|
||||
warnings = "\n".join(f"- {item}" for item in report.warnings) or "- 无"
|
||||
pruned = "\n".join(f"- {item.as_posix()}" for item in report.pruned_files) or "- 无"
|
||||
analyses = "\n".join(f"- {item.as_posix()}" for item in report.analysis_files) or "- 未启用 Gemini 分析"
|
||||
return f"""# UiPath Explainator Overview
|
||||
|
||||
- Project Root: `{report.project_root.as_posix()}`
|
||||
- Output Root: `{report.output_root.as_posix()}`
|
||||
- Entry File: `{report.entry_file.as_posix()}`
|
||||
- Initial Files: {len(report.initial_files)}
|
||||
- Final Files: {len(report.final_files)}
|
||||
- Cleaned XAML Files: {len(report.cleaned_files)}
|
||||
- Pruned Files: {len(report.pruned_files)}
|
||||
- Analysis Files: {len(report.analysis_files)}
|
||||
|
||||
## Final Files
|
||||
{chr(10).join(f"- {item.as_posix()}" for item in report.final_files)}
|
||||
|
||||
## Pruned Files
|
||||
{pruned}
|
||||
|
||||
## Analysis Files
|
||||
{analyses}
|
||||
|
||||
## Warnings
|
||||
{warnings}
|
||||
"""
|
||||
|
||||
def _relative_files(self, scan: ScanResult, root: Path) -> list[Path]:
|
||||
return sorted(path.relative_to(root) for path in scan.files)
|
||||
|
||||
def _ordered_files(self, paths: list[Path]) -> list[Path]:
|
||||
return sorted(paths, key=lambda item: (item.suffix.lower() != ".xaml", item.as_posix().lower()))
|
||||
Reference in New Issue
Block a user