Skip to content

Commit d23a02a

Browse files
committed
Update plugins config
1 parent e355481 commit d23a02a

4 files changed

Lines changed: 102 additions & 127 deletions

File tree

sync_ai_rules/__main__.py

Lines changed: 23 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -72,74 +72,55 @@ def scan_and_parse(parser, source_dir: str, project_root: str) -> List[RuleMetad
7272

7373

7474
def main():
75-
"""Main orchestration: load plugins → parse → generate → update files."""
75+
"""Main orchestration: load pipelines → parse → generate → update files."""
7676
# Setup
7777
project_root = find_project_root()
7878
script_dir = os.path.dirname(os.path.abspath(__file__))
7979

8080
plugin_manager = PluginManager()
8181
plugin_manager.load_plugins(script_dir)
8282

83-
# Process each parser → generator pair
84-
results = {}
83+
if not plugin_manager.pipelines:
84+
print("Error: No pipelines configured")
85+
sys.exit(1)
8586

86-
for parser in plugin_manager.parsers.values():
87-
# Get source directories from parser
88-
source_dirs = parser.source_directories
89-
if not source_dirs:
90-
continue
87+
# Get target files (all generators use same files)
88+
first_generator = plugin_manager.pipelines[0].generator
89+
output_files = [
90+
os.path.join(project_root, filename) for filename in first_generator.default_filenames
91+
]
9192

93+
# Process each pipeline
94+
print()
95+
for pipeline in plugin_manager.pipelines:
96+
print(f"Processing pipeline: {pipeline.name}")
97+
98+
# Scan and parse using pipeline's parser
9299
all_rules = []
93-
for rel_dir in source_dirs:
100+
for rel_dir in pipeline.parser.source_directories:
94101
source_dir = os.path.join(project_root, rel_dir)
95-
print(f"Scanning {rel_dir}...")
96-
rules = scan_and_parse(parser, source_dir, project_root)
102+
print(f" Scanning {rel_dir}...")
103+
rules = scan_and_parse(pipeline.parser, source_dir, project_root)
97104
all_rules.extend(rules)
98105

99106
if not all_rules:
107+
print(" No rules found, skipping")
100108
continue
101109

102110
# Group rules by category
103111
grouped_rules = group_by_category(all_rules)
104-
105112
print(f" Found {len(all_rules)} rules in {len(grouped_rules)} categories")
106113

107-
# Store for generator
108-
results[parser.name] = grouped_rules
109-
110-
if not results:
111-
print("Error: No rules found in any source directory")
112-
sys.exit(1)
113-
114-
# Generate and update documentation
115-
print("\nGenerating documentation...")
116-
117-
# Get target files (all generators use same files)
118-
first_generator = next(iter(plugin_manager.generators.values()))
119-
output_files = [
120-
os.path.join(project_root, filename) for filename in first_generator.default_filenames
121-
]
122-
123-
# Generate content from each generator
124-
for parser_name, grouped_rules in results.items():
125-
# Get the generator for this parser
126-
generator_name = plugin_manager.parser_to_generator.get(parser_name)
127-
if not generator_name:
128-
continue
129-
130-
generator = plugin_manager.generators.get(generator_name)
131-
if not generator:
132-
continue
133-
134-
content = generator.generate(grouped_rules, {})
114+
# Generate content using pipeline's generator
115+
content = pipeline.generator.generate(grouped_rules, {})
135116

136117
# Update all target files
137118
for file_path in output_files:
138119
success, message = update_documentation_file(
139-
file_path, content, generator.get_section_markers()
120+
file_path, content, pipeline.generator.get_section_markers()
140121
)
141122
status = "✓" if success else "✗"
142-
print(f"{status} {generator.name}: {message}")
123+
print(f" {status} {message}")
143124

144125
print("\n✓ Rules synchronization completed!")
145126

sync_ai_rules/core/interfaces.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,10 @@
66

77
from abc import ABC, abstractmethod
88
from dataclasses import dataclass
9-
from typing import Any, Dict, List, Optional
9+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
10+
11+
if TYPE_CHECKING:
12+
from .interfaces import InputParser, OutputGenerator
1013

1114

1215
@dataclass
@@ -28,6 +31,16 @@ def __post_init__(self):
2831
self.metadata = {}
2932

3033

34+
@dataclass
35+
class Pipeline:
36+
"""Represents a parser-generator pipeline."""
37+
38+
name: str
39+
description: str
40+
parser: "InputParser"
41+
generator: "OutputGenerator"
42+
43+
3144
class InputParser(ABC):
3245
"""Abstract base class for all input parsers."""
3346

Lines changed: 50 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,25 @@
11
#!/usr/bin/env python3
22
"""
3-
Plugin manager for loading parsers and generators from explicit configuration.
3+
Plugin manager for loading parser-generator pipelines from configuration.
44
"""
55

66
import importlib.util
77
from pathlib import Path
8-
from typing import Dict, Optional
8+
from typing import List
99

1010
import yaml
1111

12-
from .interfaces import InputParser, OutputGenerator
12+
from .interfaces import InputParser, OutputGenerator, Pipeline
1313

1414

1515
class PluginManager:
16-
"""Loads and manages parser and generator plugins from configuration."""
16+
"""Loads and manages parser-generator pipelines from configuration."""
1717

1818
def __init__(self):
19-
self.parsers: Dict[str, InputParser] = {}
20-
self.generators: Dict[str, OutputGenerator] = {}
21-
self.parser_to_generator: Dict[str, str] = {} # Maps parser name to generator name
19+
self.pipelines: List[Pipeline] = []
2220

2321
def load_plugins(self, base_path: str):
24-
"""Load all plugins from plugins.yaml configuration file."""
22+
"""Load all pipelines from plugins.yaml configuration file."""
2523
config_path = Path(base_path) / "plugins.yaml"
2624

2725
if not config_path.exists():
@@ -32,65 +30,55 @@ def load_plugins(self, base_path: str):
3230
with open(config_path) as f:
3331
config = yaml.safe_load(f)
3432

35-
# Load parsers
36-
for parser_config in config.get("parsers", []):
37-
self._load_parser(base_path, parser_config)
38-
# Store parser -> generator mapping
39-
if "generator" in parser_config:
40-
self.parser_to_generator[parser_config["name"]] = parser_config["generator"]
41-
42-
# Load generators
43-
for generator_config in config.get("generators", []):
44-
self._load_generator(base_path, generator_config)
33+
# Load pipelines
34+
for pipeline_config in config.get("pipelines", []):
35+
pipeline = self._load_pipeline(base_path, pipeline_config)
36+
if pipeline:
37+
self.pipelines.append(pipeline)
38+
print(f"✓ Loaded pipeline: {pipeline.name} - {pipeline.description}")
4539

4640
except Exception as e:
4741
print(f"✗ Failed to load plugin configuration: {e}")
4842

49-
def _load_parser(self, base_path: str, config: dict):
50-
"""Load a specific parser from configuration."""
51-
try:
52-
module_path = Path(base_path) / "parsers" / f"{config['module']}.py"
53-
spec = importlib.util.spec_from_file_location(
54-
f"parsers.{config['module']}", module_path
55-
)
56-
module = importlib.util.module_from_spec(spec)
57-
spec.loader.exec_module(module)
58-
59-
# Get the specified class
60-
parser_class = getattr(module, config["class"])
61-
parser = parser_class()
62-
self.parsers[parser.name] = parser
63-
print(f"✓ Loaded parser: {parser.name} ({config['description']})")
64-
65-
except Exception as e:
66-
print(f"✗ Failed to load parser {config['name']}: {e}")
67-
68-
def _load_generator(self, base_path: str, config: dict):
69-
"""Load a specific generator from configuration."""
43+
def _load_pipeline(self, base_path: str, config: dict) -> Pipeline:
44+
"""Load a single parser-generator pipeline."""
7045
try:
71-
module_path = Path(base_path) / "generators" / f"{config['module']}.py"
72-
spec = importlib.util.spec_from_file_location(
73-
f"generators.{config['module']}", module_path
46+
# Load parser
47+
parser_config = config["parser"]
48+
parser = self._load_parser(base_path, parser_config)
49+
50+
# Load generator
51+
generator_config = config["generator"]
52+
generator = self._load_generator(base_path, generator_config)
53+
54+
# Create pipeline
55+
return Pipeline(
56+
name=config["name"],
57+
description=config["description"],
58+
parser=parser,
59+
generator=generator,
7460
)
75-
module = importlib.util.module_from_spec(spec)
76-
spec.loader.exec_module(module)
77-
78-
# Get the specified class
79-
generator_class = getattr(module, config["class"])
80-
generator = generator_class()
81-
self.generators[generator.name] = generator
82-
print(f"✓ Loaded generator: {generator.name} ({config['description']})")
8361

8462
except Exception as e:
85-
print(f"✗ Failed to load generator {config['name']}: {e}")
86-
87-
def get_parser_for_file(self, file_path: str) -> Optional[InputParser]:
88-
"""Get appropriate parser for a file."""
89-
for parser in self.parsers.values():
90-
if parser.can_parse(file_path):
91-
return parser
92-
return None
93-
94-
def get_generator(self, name: str) -> Optional[OutputGenerator]:
95-
"""Get generator by name."""
96-
return self.generators.get(name)
63+
print(f"✗ Failed to load pipeline {config.get('name', 'unknown')}: {e}")
64+
return None
65+
66+
def _load_parser(self, base_path: str, config: dict) -> InputParser:
67+
"""Load a parser from configuration."""
68+
module_path = Path(base_path) / "parsers" / f"{config['module']}.py"
69+
spec = importlib.util.spec_from_file_location(f"parsers.{config['module']}", module_path)
70+
module = importlib.util.module_from_spec(spec)
71+
spec.loader.exec_module(module)
72+
73+
parser_class = getattr(module, config["class"])
74+
return parser_class()
75+
76+
def _load_generator(self, base_path: str, config: dict) -> OutputGenerator:
77+
"""Load a generator from configuration."""
78+
module_path = Path(base_path) / "generators" / f"{config['module']}.py"
79+
spec = importlib.util.spec_from_file_location(f"generators.{config['module']}", module_path)
80+
module = importlib.util.module_from_spec(spec)
81+
spec.loader.exec_module(module)
82+
83+
generator_class = getattr(module, config["class"])
84+
return generator_class()

sync_ai_rules/plugins.yaml

Lines changed: 15 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,22 @@
11
# Plugin configuration for sync_ai_rules
2-
# This file explicitly lists which parsers and generators to load,
3-
# making the plugin system more traceable and explicit.
2+
# Define rule processing pipelines: each pipeline has a parser and generator.
3+
# Parsers and generators can be reused across multiple pipelines.
44

5-
parsers:
6-
- name: mdc
7-
module: mdc_parser
8-
class: MDCParser
9-
description: Parse MDC files with YAML frontmatter
10-
generator: development-rules # Which generator uses this parser's output
11-
12-
- name: code-review
13-
module: code_review_parser
14-
class: CodeReviewParser
15-
description: Parse code review markdown files from .code_review/
16-
generator: code-review-guidelines # Which generator uses this parser's output
17-
18-
generators:
5+
pipelines:
196
- name: development-rules
20-
module: development_rules_generator
21-
class: DevelopmentRulesGenerator
227
description: Generate development rules documentation from .cursor/rules/
23-
parser: mdc # Which parser provides input for this generator
8+
parser:
9+
module: mdc_parser
10+
class: MDCParser
11+
generator:
12+
module: development_rules_generator
13+
class: DevelopmentRulesGenerator
2414

2515
- name: code-review-guidelines
26-
module: code_review_guidelines_generator
27-
class: CodeReviewGuidelinesGenerator
2816
description: Generate code review guidelines documentation from .code_review/
29-
parser: code-review # Which parser provides input for this generator
17+
parser:
18+
module: code_review_parser
19+
class: CodeReviewParser
20+
generator:
21+
module: code_review_guidelines_generator
22+
class: CodeReviewGuidelinesGenerator

0 commit comments

Comments
 (0)