-
Notifications
You must be signed in to change notification settings - Fork 10
Expand file tree
/
Copy path__main__.py
More file actions
executable file
·127 lines (96 loc) · 4.3 KB
/
__main__.py
File metadata and controls
executable file
·127 lines (96 loc) · 4.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#!/usr/bin/env python3
"""
This script uses a plugin architecture to parse rules and generate documentation.
"""
import os
from pathlib import Path
from typing import Dict, List
from sync_ai_rules.core.plugin_manager import PluginManager
from sync_ai_rules.core.rule_metadata import RuleMetadata
from sync_ai_rules.file_updater import update_documentation_file
_GITATTRIBUTES_HEADER = "# Auto-generated by sync-ai-rules hook. Do not edit.\n"
def get_category(file_path: str, source_dir: str) -> str:
"""Extract category from file path relative to source directory."""
rel_path = os.path.relpath(file_path, source_dir)
folder = os.path.dirname(rel_path)
return folder if folder and folder != "." else "root"
def group_by_category(rules: List[RuleMetadata]) -> Dict[str, List[RuleMetadata]]:
"""Group rules by category."""
groups: Dict[str, List[RuleMetadata]] = {}
for rule in rules:
groups.setdefault(rule.category, []).append(rule)
return groups
def scan_and_parse(parser, source_dir: str, project_root: str) -> List[RuleMetadata]:
"""Scan directory and parse files with given parser."""
rules = []
if not os.path.exists(source_dir):
return rules
for root, _, files in os.walk(source_dir):
# Skip generated/personal directories
if "generated" in Path(root).parts or "personal" in Path(root).parts:
continue
for file in files:
file_path = os.path.join(root, file)
if not parser.can_parse(file_path):
continue
context = {
"project_root": project_root,
"relative_path": os.path.relpath(file_path, project_root),
"category": get_category(file_path, source_dir),
}
rule = parser.parse(file_path, context)
if rule:
rules.append(rule)
return rules
def _write_gitattributes(project_root: str, patterns: List[str]) -> None:
"""Write .gitattributes marking generated files as linguist-generated."""
lines = [_GITATTRIBUTES_HEADER]
lines.extend(f"{p} linguist-generated\n" for p in patterns)
file_path = os.path.join(project_root, ".gitattributes")
with open(file_path, "w", encoding="utf-8") as f:
f.writelines(lines)
def main():
"""Main orchestration: load pipelines → parse → generate → update files."""
# Setup
project_root = str(Path.cwd())
script_dir = os.path.dirname(os.path.abspath(__file__))
plugin_manager = PluginManager()
plugin_manager.load_plugins(script_dir)
# Process each pipeline
print()
for pipeline in plugin_manager.pipelines:
print(f"Processing pipeline: {pipeline.name}")
# Scan and parse using pipeline's parser
all_rules = []
for rel_dir in pipeline.parser.source_directories:
source_dir = os.path.join(project_root, rel_dir)
print(f" Scanning {rel_dir}...")
rules = scan_and_parse(pipeline.parser, source_dir, project_root)
all_rules.extend(rules)
if not all_rules:
print(" No rules found, skipping")
continue
# Group rules by category
grouped_rules = group_by_category(all_rules)
print(f" Found {len(all_rules)} rules in {len(grouped_rules)} categories")
# Generate output
if pipeline.generator.is_multi_file:
pipeline.generator.generate_files(grouped_rules, project_root)
else:
content = pipeline.generator.generate(grouped_rules, {})
for filename in pipeline.generator.default_filenames:
file_path = os.path.join(project_root, filename)
success, message = update_documentation_file(
file_path, content, pipeline.generator.get_section_markers()
)
status = "✓" if success else "✗"
print(f" {status} {message}")
# Collect gitattributes patterns from all generators and write .gitattributes
all_patterns: List[str] = []
for pipeline in plugin_manager.pipelines:
all_patterns.extend(pipeline.generator.gitattributes_patterns)
if all_patterns:
_write_gitattributes(project_root, sorted(set(all_patterns)))
print("\n✓ Rules synchronization completed!")
if __name__ == "__main__":
main()