semantic markup update
This commit is contained in:
@@ -18,9 +18,29 @@ from typing import Dict, List, Optional, Any, Pattern, Tuple, Set
|
||||
|
||||
# Mock belief_scope for the script itself to avoid import issues
|
||||
class belief_scope:
|
||||
def __init__(self, name): self.name = name
|
||||
def __enter__(self): return self
|
||||
def __exit__(self, *args): pass
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Mock init.
|
||||
# @PRE: name is a string.
|
||||
# @POST: Instance initialized.
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:__enter__:Function]
|
||||
# @PURPOSE: Mock enter.
|
||||
# @PRE: Instance initialized.
|
||||
# @POST: Returns self.
|
||||
def __enter__(self):
|
||||
return self
|
||||
# [/DEF:__enter__:Function]
|
||||
|
||||
# [DEF:__exit__:Function]
|
||||
# @PURPOSE: Mock exit.
|
||||
# @PRE: Context entered.
|
||||
# @POST: Context exited.
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
# [/DEF:__exit__:Function]
|
||||
# [/SECTION]
|
||||
|
||||
# [SECTION: CONFIGURATION]
|
||||
@@ -162,7 +182,7 @@ def get_patterns(lang: str) -> Dict[str, Pattern]:
|
||||
"tag": re.compile(r"#\s*@(?P<tag>[A-Z_]+):\s*(?P<value>.*)"),
|
||||
"relation": re.compile(r"#\s*@RELATION:\s*(?P<type>\w+)\s*->\s*(?P<target>.*)"),
|
||||
"func_def": re.compile(r"^\s*(async\s+)?def\s+(?P<name>\w+)"),
|
||||
"belief_scope": re.compile(r"with\s+belief_scope\("),
|
||||
"belief_scope": re.compile(r"with\s+(\w+\.)?belief_scope\("),
|
||||
}
|
||||
else:
|
||||
return {
|
||||
@@ -180,124 +200,127 @@ def get_patterns(lang: str) -> Dict[str, Pattern]:
|
||||
|
||||
# [DEF:parse_file:Function]
|
||||
# @PURPOSE: Parses a single file to extract semantic entities.
|
||||
# @PRE: full_path, rel_path, lang are valid strings.
|
||||
# @POST: Returns extracted entities and list of issues.
|
||||
# @PARAM: full_path - Absolute path to file.
|
||||
# @PARAM: rel_path - Relative path from project root.
|
||||
# @PARAM: lang - Language identifier.
|
||||
# @RETURN: Tuple[List[SemanticEntity], List[str]] - Entities found and global issues.
|
||||
def parse_file(full_path: str, rel_path: str, lang: str) -> Tuple[List[SemanticEntity], List[str]]:
|
||||
issues: List[str] = []
|
||||
try:
|
||||
with open(full_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
except Exception as e:
|
||||
return [], [f"Could not read file {rel_path}: {e}"]
|
||||
with belief_scope("parse_file"):
|
||||
issues: List[str] = []
|
||||
try:
|
||||
with open(full_path, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
except Exception as e:
|
||||
return [], [f"Could not read file {rel_path}: {e}"]
|
||||
|
||||
stack: List[SemanticEntity] = []
|
||||
file_entities: List[SemanticEntity] = []
|
||||
patterns = get_patterns(lang)
|
||||
stack: List[SemanticEntity] = []
|
||||
file_entities: List[SemanticEntity] = []
|
||||
patterns = get_patterns(lang)
|
||||
|
||||
for i, line in enumerate(lines):
|
||||
lineno = i + 1
|
||||
line = line.strip()
|
||||
|
||||
# 1. Check for Anchor Start
|
||||
match_start = None
|
||||
if lang == "python":
|
||||
match_start = patterns["anchor_start"].search(line)
|
||||
else:
|
||||
match_start = patterns["html_anchor_start"].search(line) or patterns["js_anchor_start"].search(line)
|
||||
|
||||
if match_start:
|
||||
name = match_start.group("name")
|
||||
type_ = match_start.group("type")
|
||||
entity = SemanticEntity(name, type_, lineno, rel_path)
|
||||
for i, line in enumerate(lines):
|
||||
lineno = i + 1
|
||||
line = line.strip()
|
||||
|
||||
if stack:
|
||||
parent = stack[-1]
|
||||
parent.children.append(entity)
|
||||
entity.parent = parent
|
||||
else:
|
||||
file_entities.append(entity)
|
||||
|
||||
stack.append(entity)
|
||||
continue
|
||||
|
||||
# 2. Check for Anchor End
|
||||
match_end = None
|
||||
if lang == "python":
|
||||
match_end = patterns["anchor_end"].search(line)
|
||||
else:
|
||||
match_end = patterns["html_anchor_end"].search(line) or patterns["js_anchor_end"].search(line)
|
||||
|
||||
if match_end:
|
||||
name = match_end.group("name")
|
||||
type_ = match_end.group("type")
|
||||
|
||||
if not stack:
|
||||
issues.append(f"{rel_path}:{lineno} Found closing anchor [/DEF:{name}:{type_}] without opening anchor.")
|
||||
continue
|
||||
|
||||
top = stack[-1]
|
||||
if top.name == name and top.type == type_:
|
||||
top.end_line = lineno
|
||||
stack.pop()
|
||||
else:
|
||||
issues.append(f"{rel_path}:{lineno} Mismatched closing anchor. Expected [/DEF:{top.name}:{top.type}], found [/DEF:{name}:{type_}].")
|
||||
continue
|
||||
|
||||
# 3. Check for Naked Functions (Missing Contracts)
|
||||
if "func_def" in patterns:
|
||||
match_func = patterns["func_def"].search(line)
|
||||
if match_func:
|
||||
func_name = match_func.group("name")
|
||||
is_covered = False
|
||||
if stack:
|
||||
current = stack[-1]
|
||||
# Check if we are inside a Function anchor that matches the name
|
||||
if current.type == "Function" and current.name == func_name:
|
||||
is_covered = True
|
||||
|
||||
if not is_covered:
|
||||
issues.append(f"{rel_path}:{lineno} Function '{func_name}' implementation found without matching [DEF:{func_name}:Function] contract.")
|
||||
|
||||
# 4. Check for Tags/Relations
|
||||
if stack:
|
||||
current = stack[-1]
|
||||
|
||||
match_rel = patterns["relation"].search(line)
|
||||
if match_rel:
|
||||
current.relations.append({
|
||||
"type": match_rel.group("type"),
|
||||
"target": match_rel.group("target")
|
||||
})
|
||||
continue
|
||||
|
||||
match_tag = None
|
||||
# 1. Check for Anchor Start
|
||||
match_start = None
|
||||
if lang == "python":
|
||||
match_tag = patterns["tag"].search(line)
|
||||
elif lang == "svelte_js":
|
||||
match_tag = patterns["html_tag"].search(line)
|
||||
if not match_tag and ("/*" in line or "*" in line or "//" in line):
|
||||
match_tag = patterns["jsdoc_tag"].search(line)
|
||||
match_start = patterns["anchor_start"].search(line)
|
||||
else:
|
||||
match_start = patterns["html_anchor_start"].search(line) or patterns["js_anchor_start"].search(line)
|
||||
|
||||
if match_tag:
|
||||
tag_name = match_tag.group("tag").upper()
|
||||
tag_value = match_tag.group("value").strip()
|
||||
current.tags[tag_name] = tag_value
|
||||
if match_start:
|
||||
name = match_start.group("name")
|
||||
type_ = match_start.group("type")
|
||||
entity = SemanticEntity(name, type_, lineno, rel_path)
|
||||
|
||||
if stack:
|
||||
parent = stack[-1]
|
||||
parent.children.append(entity)
|
||||
entity.parent = parent
|
||||
else:
|
||||
file_entities.append(entity)
|
||||
|
||||
stack.append(entity)
|
||||
continue
|
||||
|
||||
# Check for belief scope in implementation
|
||||
if lang == "python" and "belief_scope" in patterns:
|
||||
if patterns["belief_scope"].search(line):
|
||||
current.has_belief_scope = True
|
||||
# 2. Check for Anchor End
|
||||
match_end = None
|
||||
if lang == "python":
|
||||
match_end = patterns["anchor_end"].search(line)
|
||||
else:
|
||||
match_end = patterns["html_anchor_end"].search(line) or patterns["js_anchor_end"].search(line)
|
||||
|
||||
# End of file check
|
||||
if stack:
|
||||
for unclosed in stack:
|
||||
unclosed.compliance_issues.append(f"Unclosed Anchor at end of file (started line {unclosed.start_line})")
|
||||
if unclosed.parent is None and unclosed not in file_entities:
|
||||
file_entities.append(unclosed)
|
||||
if match_end:
|
||||
name = match_end.group("name")
|
||||
type_ = match_end.group("type")
|
||||
|
||||
if not stack:
|
||||
issues.append(f"{rel_path}:{lineno} Found closing anchor [/DEF:{name}:{type_}] without opening anchor.")
|
||||
continue
|
||||
|
||||
top = stack[-1]
|
||||
if top.name == name and top.type == type_:
|
||||
top.end_line = lineno
|
||||
stack.pop()
|
||||
else:
|
||||
issues.append(f"{rel_path}:{lineno} Mismatched closing anchor. Expected [/DEF:{top.name}:{top.type}], found [/DEF:{name}:{type_}].")
|
||||
continue
|
||||
|
||||
return file_entities, issues
|
||||
# 3. Check for Naked Functions (Missing Contracts)
|
||||
if "func_def" in patterns:
|
||||
match_func = patterns["func_def"].search(line)
|
||||
if match_func:
|
||||
func_name = match_func.group("name")
|
||||
is_covered = False
|
||||
if stack:
|
||||
current = stack[-1]
|
||||
# Check if we are inside a Function anchor that matches the name
|
||||
if current.type == "Function" and current.name == func_name:
|
||||
is_covered = True
|
||||
|
||||
if not is_covered:
|
||||
issues.append(f"{rel_path}:{lineno} Function '{func_name}' implementation found without matching [DEF:{func_name}:Function] contract.")
|
||||
|
||||
# 4. Check for Tags/Relations
|
||||
if stack:
|
||||
current = stack[-1]
|
||||
|
||||
match_rel = patterns["relation"].search(line)
|
||||
if match_rel:
|
||||
current.relations.append({
|
||||
"type": match_rel.group("type"),
|
||||
"target": match_rel.group("target")
|
||||
})
|
||||
continue
|
||||
|
||||
match_tag = None
|
||||
if lang == "python":
|
||||
match_tag = patterns["tag"].search(line)
|
||||
elif lang == "svelte_js":
|
||||
match_tag = patterns["html_tag"].search(line)
|
||||
if not match_tag and ("/*" in line or "*" in line or "//" in line):
|
||||
match_tag = patterns["jsdoc_tag"].search(line)
|
||||
|
||||
if match_tag:
|
||||
tag_name = match_tag.group("tag").upper()
|
||||
tag_value = match_tag.group("value").strip()
|
||||
current.tags[tag_name] = tag_value
|
||||
|
||||
# Check for belief scope in implementation
|
||||
if lang == "python" and "belief_scope" in patterns:
|
||||
if patterns["belief_scope"].search(line):
|
||||
current.has_belief_scope = True
|
||||
|
||||
# End of file check
|
||||
if stack:
|
||||
for unclosed in stack:
|
||||
unclosed.compliance_issues.append(f"Unclosed Anchor at end of file (started line {unclosed.start_line})")
|
||||
if unclosed.parent is None and unclosed not in file_entities:
|
||||
file_entities.append(unclosed)
|
||||
|
||||
return file_entities, issues
|
||||
# [/DEF:parse_file:Function]
|
||||
|
||||
|
||||
@@ -309,26 +332,30 @@ class SemanticMapGenerator:
|
||||
# @PRE: root_dir is a valid path string.
|
||||
# @POST: Generator instance is ready.
|
||||
def __init__(self, root_dir: str):
|
||||
self.root_dir = root_dir
|
||||
self.entities: List[SemanticEntity] = []
|
||||
self.file_scores: Dict[str, float] = {}
|
||||
self.global_issues: List[str] = []
|
||||
self.ignored_patterns = self._load_gitignore()
|
||||
with belief_scope("__init__"):
|
||||
self.root_dir = root_dir
|
||||
self.entities: List[SemanticEntity] = []
|
||||
self.file_scores: Dict[str, float] = {}
|
||||
self.global_issues: List[str] = []
|
||||
self.ignored_patterns = self._load_gitignore()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_load_gitignore:Function]
|
||||
# @PURPOSE: Loads patterns from .gitignore file.
|
||||
# @PRE: .gitignore exists in root_dir.
|
||||
# @POST: Returns set of ignore patterns.
|
||||
# @RETURN: Set of patterns to ignore.
|
||||
def _load_gitignore(self) -> Set[str]:
|
||||
patterns = set()
|
||||
ignore_file = os.path.join(self.root_dir, ".gitignore")
|
||||
if os.path.exists(ignore_file):
|
||||
with open(ignore_file, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith("#"):
|
||||
patterns.add(line)
|
||||
return patterns
|
||||
with belief_scope("_load_gitignore"):
|
||||
patterns = set()
|
||||
ignore_file = os.path.join(self.root_dir, ".gitignore")
|
||||
if os.path.exists(ignore_file):
|
||||
with open(ignore_file, 'r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith("#"):
|
||||
patterns.add(line)
|
||||
return patterns
|
||||
# [/DEF:_load_gitignore:Function]
|
||||
|
||||
# [DEF:_is_ignored:Function]
|
||||
@@ -338,39 +365,40 @@ class SemanticMapGenerator:
|
||||
# @PARAM: rel_path (str) - Path relative to root.
|
||||
# @RETURN: bool - True if ignored.
|
||||
def _is_ignored(self, rel_path: str) -> bool:
|
||||
# Normalize path for matching
|
||||
rel_path = rel_path.replace(os.sep, '/')
|
||||
with belief_scope("_is_ignored"):
|
||||
# Normalize path for matching
|
||||
rel_path = rel_path.replace(os.sep, '/')
|
||||
|
||||
# Check hardcoded defaults
|
||||
parts = rel_path.split('/')
|
||||
for part in parts:
|
||||
if part in IGNORE_DIRS:
|
||||
return True
|
||||
|
||||
if os.path.basename(rel_path) in IGNORE_FILES:
|
||||
return True
|
||||
|
||||
# Check gitignore patterns
|
||||
for pattern in self.ignored_patterns:
|
||||
# Handle directory patterns like 'node_modules/'
|
||||
if pattern.endswith('/'):
|
||||
dir_pattern = pattern.rstrip('/')
|
||||
if rel_path == dir_pattern or rel_path.startswith(pattern):
|
||||
# Check hardcoded defaults
|
||||
parts = rel_path.split('/')
|
||||
for part in parts:
|
||||
if part in IGNORE_DIRS:
|
||||
return True
|
||||
|
||||
# Check for patterns in frontend/ or backend/
|
||||
if rel_path.startswith("frontend/") and fnmatch.fnmatch(rel_path[9:], pattern):
|
||||
return True
|
||||
if rel_path.startswith("backend/") and fnmatch.fnmatch(rel_path[8:], pattern):
|
||||
if os.path.basename(rel_path) in IGNORE_FILES:
|
||||
return True
|
||||
|
||||
# Use fnmatch for glob patterns
|
||||
if fnmatch.fnmatch(rel_path, pattern) or \
|
||||
fnmatch.fnmatch(os.path.basename(rel_path), pattern) or \
|
||||
any(fnmatch.fnmatch(part, pattern) for part in parts):
|
||||
return True
|
||||
|
||||
return False
|
||||
# Check gitignore patterns
|
||||
for pattern in self.ignored_patterns:
|
||||
# Handle directory patterns like 'node_modules/'
|
||||
if pattern.endswith('/'):
|
||||
dir_pattern = pattern.rstrip('/')
|
||||
if rel_path == dir_pattern or rel_path.startswith(pattern):
|
||||
return True
|
||||
|
||||
# Check for patterns in frontend/ or backend/
|
||||
if rel_path.startswith("frontend/") and fnmatch.fnmatch(rel_path[9:], pattern):
|
||||
return True
|
||||
if rel_path.startswith("backend/") and fnmatch.fnmatch(rel_path[8:], pattern):
|
||||
return True
|
||||
|
||||
# Use fnmatch for glob patterns
|
||||
if fnmatch.fnmatch(rel_path, pattern) or \
|
||||
fnmatch.fnmatch(os.path.basename(rel_path), pattern) or \
|
||||
any(fnmatch.fnmatch(part, pattern) for part in parts):
|
||||
return True
|
||||
|
||||
return False
|
||||
# [/DEF:_is_ignored:Function]
|
||||
|
||||
# [DEF:run:Function]
|
||||
@@ -380,10 +408,11 @@ class SemanticMapGenerator:
|
||||
# @RELATION: CALLS -> _walk_and_parse
|
||||
# @RELATION: CALLS -> _generate_artifacts
|
||||
def run(self):
|
||||
print(f"Starting Semantic Map Generation in {self.root_dir}...")
|
||||
self._walk_and_parse()
|
||||
self._generate_artifacts()
|
||||
print("Done.")
|
||||
with belief_scope("run"):
|
||||
print(f"Starting Semantic Map Generation in {self.root_dir}...")
|
||||
self._walk_and_parse()
|
||||
self._generate_artifacts()
|
||||
print("Done.")
|
||||
# [/DEF:run:Function]
|
||||
|
||||
# [DEF:_walk_and_parse:Function]
|
||||
@@ -391,29 +420,30 @@ class SemanticMapGenerator:
|
||||
# @PRE: root_dir exists.
|
||||
# @POST: All files are scanned and entities extracted.
|
||||
def _walk_and_parse(self):
|
||||
for root, dirs, files in os.walk(self.root_dir):
|
||||
# Optimization: don't enter ignored directories
|
||||
dirs[:] = [d for d in dirs if not self._is_ignored(os.path.relpath(os.path.join(root, d), self.root_dir) + "/")]
|
||||
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
with belief_scope("_walk_and_parse"):
|
||||
for root, dirs, files in os.walk(self.root_dir):
|
||||
# Optimization: don't enter ignored directories
|
||||
dirs[:] = [d for d in dirs if not self._is_ignored(os.path.relpath(os.path.join(root, d), self.root_dir) + "/")]
|
||||
|
||||
if self._is_ignored(rel_path):
|
||||
continue
|
||||
|
||||
lang = None
|
||||
if file.endswith(".py"):
|
||||
lang = "python"
|
||||
elif file.endswith((".svelte", ".js", ".ts")):
|
||||
lang = "svelte_js"
|
||||
|
||||
if lang:
|
||||
entities, issues = parse_file(file_path, rel_path, lang)
|
||||
self.global_issues.extend(issues)
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
rel_path = os.path.relpath(file_path, self.root_dir)
|
||||
|
||||
if entities:
|
||||
self._process_file_results(rel_path, entities)
|
||||
if self._is_ignored(rel_path):
|
||||
continue
|
||||
|
||||
lang = None
|
||||
if file.endswith(".py"):
|
||||
lang = "python"
|
||||
elif file.endswith((".svelte", ".js", ".ts")):
|
||||
lang = "svelte_js"
|
||||
|
||||
if lang:
|
||||
entities, issues = parse_file(file_path, rel_path, lang)
|
||||
self.global_issues.extend(issues)
|
||||
|
||||
if entities:
|
||||
self._process_file_results(rel_path, entities)
|
||||
# [/DEF:_walk_and_parse:Function]
|
||||
|
||||
# [DEF:_process_file_results:Function]
|
||||
@@ -421,26 +451,28 @@ class SemanticMapGenerator:
|
||||
# @PRE: Entities have been parsed from the file.
|
||||
# @POST: File score is calculated and issues collected.
|
||||
def _process_file_results(self, rel_path: str, entities: List[SemanticEntity]):
|
||||
total_score = 0
|
||||
count = 0
|
||||
|
||||
# [DEF:validate_recursive:Function]
|
||||
# @PURPOSE: Recursively validates a list of entities.
|
||||
# @PRE: ent_list is a list of SemanticEntity objects.
|
||||
# @POST: All entities and their children are validated.
|
||||
def validate_recursive(ent_list):
|
||||
nonlocal total_score, count
|
||||
for e in ent_list:
|
||||
e.validate()
|
||||
total_score += e.get_score()
|
||||
count += 1
|
||||
validate_recursive(e.children)
|
||||
# [/DEF:validate_recursive:Function]
|
||||
with belief_scope("_process_file_results"):
|
||||
total_score = 0
|
||||
count = 0
|
||||
|
||||
# [DEF:validate_recursive:Function]
|
||||
# @PURPOSE: Recursively validates a list of entities.
|
||||
# @PRE: ent_list is a list of SemanticEntity objects.
|
||||
# @POST: All entities and their children are validated.
|
||||
def validate_recursive(ent_list):
|
||||
with belief_scope("validate_recursive"):
|
||||
nonlocal total_score, count
|
||||
for e in ent_list:
|
||||
e.validate()
|
||||
total_score += e.get_score()
|
||||
count += 1
|
||||
validate_recursive(e.children)
|
||||
# [/DEF:validate_recursive:Function]
|
||||
|
||||
validate_recursive(entities)
|
||||
|
||||
self.entities.extend(entities)
|
||||
self.file_scores[rel_path] = (total_score / count) if count > 0 else 0.0
|
||||
validate_recursive(entities)
|
||||
|
||||
self.entities.extend(entities)
|
||||
self.file_scores[rel_path] = (total_score / count) if count > 0 else 0.0
|
||||
# [/DEF:_process_file_results:Function]
|
||||
|
||||
# [DEF:_generate_artifacts:Function]
|
||||
@@ -448,23 +480,24 @@ class SemanticMapGenerator:
|
||||
# @PRE: Parsing and validation are complete.
|
||||
# @POST: JSON and Markdown artifacts are written to disk.
|
||||
def _generate_artifacts(self):
|
||||
# 1. Full JSON Map
|
||||
full_map = {
|
||||
"project_root": self.root_dir,
|
||||
"generated_at": datetime.datetime.now().isoformat(),
|
||||
"modules": [e.to_dict() for e in self.entities]
|
||||
}
|
||||
|
||||
os.makedirs(os.path.dirname(OUTPUT_JSON), exist_ok=True)
|
||||
with open(OUTPUT_JSON, 'w', encoding='utf-8') as f:
|
||||
json.dump(full_map, f, indent=2)
|
||||
print(f"Generated {OUTPUT_JSON}")
|
||||
with belief_scope("_generate_artifacts"):
|
||||
# 1. Full JSON Map
|
||||
full_map = {
|
||||
"project_root": self.root_dir,
|
||||
"generated_at": datetime.datetime.now().isoformat(),
|
||||
"modules": [e.to_dict() for e in self.entities]
|
||||
}
|
||||
|
||||
os.makedirs(os.path.dirname(OUTPUT_JSON), exist_ok=True)
|
||||
with open(OUTPUT_JSON, 'w', encoding='utf-8') as f:
|
||||
json.dump(full_map, f, indent=2)
|
||||
print(f"Generated {OUTPUT_JSON}")
|
||||
|
||||
# 2. Compliance Report
|
||||
self._generate_report()
|
||||
# 2. Compliance Report
|
||||
self._generate_report()
|
||||
|
||||
# 3. Compressed Map (Markdown)
|
||||
self._generate_compressed_map()
|
||||
# 3. Compressed Map (Markdown)
|
||||
self._generate_compressed_map()
|
||||
# [/DEF:_generate_artifacts:Function]
|
||||
|
||||
# [DEF:_generate_report:Function]
|
||||
@@ -472,40 +505,41 @@ class SemanticMapGenerator:
|
||||
# @PRE: File scores and issues are available.
|
||||
# @POST: Markdown report is created in reports directory.
|
||||
def _generate_report(self):
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
report_path = os.path.join(REPORTS_DIR, f"semantic_report_{timestamp}.md")
|
||||
os.makedirs(REPORTS_DIR, exist_ok=True)
|
||||
with belief_scope("_generate_report"):
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
report_path = os.path.join(REPORTS_DIR, f"semantic_report_{timestamp}.md")
|
||||
os.makedirs(REPORTS_DIR, exist_ok=True)
|
||||
|
||||
total_files = len(self.file_scores)
|
||||
avg_score = sum(self.file_scores.values()) / total_files if total_files > 0 else 0
|
||||
|
||||
with open(report_path, 'w', encoding='utf-8') as f:
|
||||
f.write(f"# Semantic Compliance Report\n\n")
|
||||
f.write(f"**Generated At:** {datetime.datetime.now().isoformat()}\n")
|
||||
f.write(f"**Global Compliance Score:** {avg_score:.1%}\n")
|
||||
f.write(f"**Scanned Files:** {total_files}\n\n")
|
||||
|
||||
if self.global_issues:
|
||||
f.write("## Critical Parsing Errors\n")
|
||||
for issue in self.global_issues:
|
||||
f.write(f"- 🔴 {issue}\n")
|
||||
f.write("\n")
|
||||
|
||||
f.write("## File Compliance Status\n")
|
||||
f.write("| File | Score | Issues |\n")
|
||||
f.write("|------|-------|--------|\n")
|
||||
total_files = len(self.file_scores)
|
||||
avg_score = sum(self.file_scores.values()) / total_files if total_files > 0 else 0
|
||||
|
||||
sorted_files = sorted(self.file_scores.items(), key=lambda x: x[1])
|
||||
|
||||
for file_path, score in sorted_files:
|
||||
issues = []
|
||||
self._collect_issues(self.entities, file_path, issues)
|
||||
with open(report_path, 'w', encoding='utf-8') as f:
|
||||
f.write(f"# Semantic Compliance Report\n\n")
|
||||
f.write(f"**Generated At:** {datetime.datetime.now().isoformat()}\n")
|
||||
f.write(f"**Global Compliance Score:** {avg_score:.1%}\n")
|
||||
f.write(f"**Scanned Files:** {total_files}\n\n")
|
||||
|
||||
if self.global_issues:
|
||||
f.write("## Critical Parsing Errors\n")
|
||||
for issue in self.global_issues:
|
||||
f.write(f"- 🔴 {issue}\n")
|
||||
f.write("\n")
|
||||
|
||||
f.write("## File Compliance Status\n")
|
||||
f.write("| File | Score | Issues |\n")
|
||||
f.write("|------|-------|--------|\n")
|
||||
|
||||
status_icon = "🟢" if score == 1.0 else "🟡" if score > 0.5 else "🔴"
|
||||
issue_text = "<br>".join(issues) if issues else "OK"
|
||||
f.write(f"| {file_path} | {status_icon} {score:.0%} | {issue_text} |\n")
|
||||
sorted_files = sorted(self.file_scores.items(), key=lambda x: x[1])
|
||||
|
||||
for file_path, score in sorted_files:
|
||||
issues = []
|
||||
self._collect_issues(self.entities, file_path, issues)
|
||||
|
||||
status_icon = "🟢" if score == 1.0 else "🟡" if score > 0.5 else "🔴"
|
||||
issue_text = "<br>".join(issues) if issues else "OK"
|
||||
f.write(f"| {file_path} | {status_icon} {score:.0%} | {issue_text} |\n")
|
||||
|
||||
print(f"Generated {report_path}")
|
||||
print(f"Generated {report_path}")
|
||||
# [/DEF:_generate_report:Function]
|
||||
|
||||
# [DEF:_collect_issues:Function]
|
||||
@@ -513,10 +547,11 @@ class SemanticMapGenerator:
|
||||
# @PRE: entities list and file_path are valid.
|
||||
# @POST: issues list is populated with compliance issues.
|
||||
def _collect_issues(self, entities: List[SemanticEntity], file_path: str, issues: List[str]):
|
||||
for e in entities:
|
||||
if e.file_path == file_path:
|
||||
issues.extend([f"[{e.name}] {i}" for i in e.compliance_issues])
|
||||
self._collect_issues(e.children, file_path, issues)
|
||||
with belief_scope("_collect_issues"):
|
||||
for e in entities:
|
||||
if e.file_path == file_path:
|
||||
issues.extend([f"[{e.name}] {i}" for i in e.compliance_issues])
|
||||
self._collect_issues(e.children, file_path, issues)
|
||||
# [/DEF:_collect_issues:Function]
|
||||
|
||||
# [DEF:_generate_compressed_map:Function]
|
||||
@@ -524,16 +559,17 @@ class SemanticMapGenerator:
|
||||
# @PRE: Entities have been processed.
|
||||
# @POST: Markdown project map is written.
|
||||
def _generate_compressed_map(self):
|
||||
os.makedirs(os.path.dirname(OUTPUT_COMPRESSED_MD), exist_ok=True)
|
||||
with belief_scope("_generate_compressed_map"):
|
||||
os.makedirs(os.path.dirname(OUTPUT_COMPRESSED_MD), exist_ok=True)
|
||||
|
||||
with open(OUTPUT_COMPRESSED_MD, 'w', encoding='utf-8') as f:
|
||||
f.write("# Project Semantic Map\n\n")
|
||||
f.write("> Compressed view for AI Context. Generated automatically.\n\n")
|
||||
|
||||
for entity in self.entities:
|
||||
self._write_entity_md(f, entity, level=0)
|
||||
with open(OUTPUT_COMPRESSED_MD, 'w', encoding='utf-8') as f:
|
||||
f.write("# Project Semantic Map\n\n")
|
||||
f.write("> Compressed view for AI Context. Generated automatically.\n\n")
|
||||
|
||||
for entity in self.entities:
|
||||
self._write_entity_md(f, entity, level=0)
|
||||
|
||||
print(f"Generated {OUTPUT_COMPRESSED_MD}")
|
||||
print(f"Generated {OUTPUT_COMPRESSED_MD}")
|
||||
# [/DEF:_generate_compressed_map:Function]
|
||||
|
||||
# [DEF:_write_entity_md:Function]
|
||||
@@ -541,30 +577,31 @@ class SemanticMapGenerator:
|
||||
# @PRE: f is an open file handle, entity is valid.
|
||||
# @POST: Entity details are written to the file.
|
||||
def _write_entity_md(self, f, entity: SemanticEntity, level: int):
|
||||
indent = " " * level
|
||||
with belief_scope("_write_entity_md"):
|
||||
indent = " " * level
|
||||
|
||||
icon = "📦"
|
||||
if entity.type == "Component": icon = "🧩"
|
||||
elif entity.type == "Function": icon = "ƒ"
|
||||
elif entity.type == "Class": icon = "ℂ"
|
||||
|
||||
f.write(f"{indent}- {icon} **{entity.name}** (`{entity.type}`)\n")
|
||||
|
||||
purpose = entity.tags.get("PURPOSE") or entity.tags.get("purpose")
|
||||
layer = entity.tags.get("LAYER") or entity.tags.get("layer")
|
||||
|
||||
if purpose:
|
||||
f.write(f"{indent} - 📝 {purpose}\n")
|
||||
if layer:
|
||||
f.write(f"{indent} - 🏗️ Layer: {layer}\n")
|
||||
icon = "📦"
|
||||
if entity.type == "Component": icon = "🧩"
|
||||
elif entity.type == "Function": icon = "ƒ"
|
||||
elif entity.type == "Class": icon = "ℂ"
|
||||
|
||||
for rel in entity.relations:
|
||||
if rel['type'] in ['DEPENDS_ON', 'CALLS', 'INHERITS_FROM']:
|
||||
f.write(f"{indent} - 🔗 {rel['type']} -> `{rel['target']}`\n")
|
||||
f.write(f"{indent}- {icon} **{entity.name}** (`{entity.type}`)\n")
|
||||
|
||||
purpose = entity.tags.get("PURPOSE") or entity.tags.get("purpose")
|
||||
layer = entity.tags.get("LAYER") or entity.tags.get("layer")
|
||||
|
||||
if purpose:
|
||||
f.write(f"{indent} - 📝 {purpose}\n")
|
||||
if layer:
|
||||
f.write(f"{indent} - 🏗️ Layer: {layer}\n")
|
||||
|
||||
for rel in entity.relations:
|
||||
if rel['type'] in ['DEPENDS_ON', 'CALLS', 'INHERITS_FROM']:
|
||||
f.write(f"{indent} - 🔗 {rel['type']} -> `{rel['target']}`\n")
|
||||
|
||||
if level < 2:
|
||||
for child in entity.children:
|
||||
self._write_entity_md(f, child, level + 1)
|
||||
if level < 2:
|
||||
for child in entity.children:
|
||||
self._write_entity_md(f, child, level + 1)
|
||||
# [/DEF:_write_entity_md:Function]
|
||||
|
||||
# [/DEF:SemanticMapGenerator:Class]
|
||||
|
||||
Reference in New Issue
Block a user