semantic checker script update

This commit is contained in:
2026-01-13 17:33:57 +03:00
parent b2529973eb
commit 11c59fb420
11 changed files with 3013 additions and 1044 deletions

View File

@@ -12,12 +12,19 @@ from ..config_manager import ConfigManager
# [DEF:TaskCleanupService:Class]
# @PURPOSE: Provides methods to clean up old task records.
class TaskCleanupService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the cleanup service with dependencies.
# @PRE: persistence_service and config_manager are valid.
# @POST: Cleanup service is ready.
def __init__(self, persistence_service: TaskPersistenceService, config_manager: ConfigManager):
self.persistence_service = persistence_service
self.config_manager = config_manager
# [/DEF:__init__:Function]
# [DEF:TaskCleanupService.run_cleanup:Function]
# [DEF:run_cleanup:Function]
# @PURPOSE: Deletes tasks older than the configured retention period.
# @PRE: Config manager has valid settings.
# @POST: Old tasks are deleted from persistence.
def run_cleanup(self):
with belief_scope("TaskCleanupService.run_cleanup"):
settings = self.config_manager.get_config().settings
@@ -34,7 +41,7 @@ class TaskCleanupService:
to_delete = [t.id for t in tasks[settings.task_retention_limit:]]
self.persistence_service.delete_tasks(to_delete)
logger.info(f"Deleted {len(to_delete)} tasks exceeding limit of {settings.task_retention_limit}")
# [/DEF:TaskCleanupService.run_cleanup:Function]
# [/DEF:run_cleanup:Function]
# [/DEF:TaskCleanupService:Class]
# [/DEF:TaskCleanupModule:Module]

View File

@@ -25,7 +25,7 @@ class TaskManager:
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
"""
# [DEF:TaskManager.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Initialize the TaskManager with dependencies.
# @PRE: plugin_loader is initialized.
# @POST: TaskManager is ready to accept tasks.
@@ -46,9 +46,9 @@ class TaskManager:
# Load persisted tasks on startup
self.load_persisted_tasks()
# [/DEF:TaskManager.__init__:Function]
# [/DEF:__init__:Function]
# [DEF:TaskManager.create_task:Function]
# [DEF:create_task:Function]
# @PURPOSE: Creates and queues a new task for execution.
# @PRE: Plugin with plugin_id exists. Params are valid.
# @POST: Task is created, added to registry, and scheduled for execution.
@@ -75,9 +75,9 @@ class TaskManager:
logger.info(f"Task {task.id} created and scheduled for execution")
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task
# [/DEF:TaskManager.create_task:Function]
# [/DEF:create_task:Function]
# [DEF:TaskManager._run_task:Function]
# [DEF:_run_task:Function]
# @PURPOSE: Internal method to execute a task.
# @PRE: Task exists in registry.
# @POST: Task is executed, status updated to SUCCESS or FAILED.
@@ -117,9 +117,9 @@ class TaskManager:
task.finished_at = datetime.utcnow()
self.persistence_service.persist_task(task)
logger.info(f"Task {task_id} execution finished with status: {task.status}")
# [/DEF:TaskManager._run_task:Function]
# [/DEF:_run_task:Function]
# [DEF:TaskManager.resolve_task:Function]
# [DEF:resolve_task:Function]
# @PURPOSE: Resumes a task that is awaiting mapping.
# @PRE: Task exists and is in AWAITING_MAPPING state.
# @POST: Task status updated to RUNNING, params updated, execution resumed.
@@ -141,9 +141,9 @@ class TaskManager:
# Signal the future to continue
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# [/DEF:TaskManager.resolve_task:Function]
# [/DEF:resolve_task:Function]
# [DEF:TaskManager.wait_for_resolution:Function]
# [DEF:wait_for_resolution:Function]
# @PURPOSE: Pauses execution and waits for a resolution signal.
# @PRE: Task exists.
# @POST: Execution pauses until future is set.
@@ -162,9 +162,9 @@ class TaskManager:
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:TaskManager.wait_for_resolution:Function]
# [/DEF:wait_for_resolution:Function]
# [DEF:TaskManager.wait_for_input:Function]
# [DEF:wait_for_input:Function]
# @PURPOSE: Pauses execution and waits for user input.
# @PRE: Task exists.
# @POST: Execution pauses until future is set via resume_task_with_password.
@@ -182,24 +182,24 @@ class TaskManager:
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:TaskManager.wait_for_input:Function]
# [/DEF:wait_for_input:Function]
# [DEF:TaskManager.get_task:Function]
# [DEF:get_task:Function]
# @PURPOSE: Retrieves a task by its ID.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: Optional[Task] - The task or None.
def get_task(self, task_id: str) -> Optional[Task]:
return self.tasks.get(task_id)
# [/DEF:TaskManager.get_task:Function]
# [/DEF:get_task:Function]
# [DEF:TaskManager.get_all_tasks:Function]
# [DEF:get_all_tasks:Function]
# @PURPOSE: Retrieves all registered tasks.
# @RETURN: List[Task] - All tasks.
def get_all_tasks(self) -> List[Task]:
return list(self.tasks.values())
# [/DEF:TaskManager.get_all_tasks:Function]
# [/DEF:get_all_tasks:Function]
# [DEF:TaskManager.get_tasks:Function]
# [DEF:get_tasks:Function]
# @PURPOSE: Retrieves tasks with pagination and optional status filter.
# @PRE: limit and offset are non-negative integers.
# @POST: Returns a list of tasks sorted by start_time descending.
@@ -214,18 +214,18 @@ class TaskManager:
# Sort by start_time descending (most recent first)
tasks.sort(key=lambda t: t.started_at or datetime.min, reverse=True)
return tasks[offset:offset + limit]
# [/DEF:TaskManager.get_tasks:Function]
# [/DEF:get_tasks:Function]
# [DEF:TaskManager.get_task_logs:Function]
# [DEF:get_task_logs:Function]
# @PURPOSE: Retrieves logs for a specific task.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: List[LogEntry] - List of log entries.
def get_task_logs(self, task_id: str) -> List[LogEntry]:
task = self.tasks.get(task_id)
return task.logs if task else []
# [/DEF:TaskManager.get_task_logs:Function]
# [/DEF:get_task_logs:Function]
# [DEF:TaskManager._add_log:Function]
# [DEF:_add_log:Function]
# @PURPOSE: Adds a log entry to a task and notifies subscribers.
# @PRE: Task exists.
# @POST: Log added to task and pushed to queues.
@@ -246,9 +246,9 @@ class TaskManager:
if task_id in self.subscribers:
for queue in self.subscribers[task_id]:
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
# [/DEF:TaskManager._add_log:Function]
# [/DEF:_add_log:Function]
# [DEF:TaskManager.subscribe_logs:Function]
# [DEF:subscribe_logs:Function]
# @PURPOSE: Subscribes to real-time logs for a task.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: asyncio.Queue - Queue for log entries.
@@ -258,9 +258,9 @@ class TaskManager:
self.subscribers[task_id] = []
self.subscribers[task_id].append(queue)
return queue
# [/DEF:TaskManager.subscribe_logs:Function]
# [/DEF:subscribe_logs:Function]
# [DEF:TaskManager.unsubscribe_logs:Function]
# [DEF:unsubscribe_logs:Function]
# @PURPOSE: Unsubscribes from real-time logs for a task.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: queue (asyncio.Queue) - Queue to remove.
@@ -270,18 +270,18 @@ class TaskManager:
self.subscribers[task_id].remove(queue)
if not self.subscribers[task_id]:
del self.subscribers[task_id]
# [/DEF:TaskManager.unsubscribe_logs:Function]
# [/DEF:unsubscribe_logs:Function]
# [DEF:TaskManager.load_persisted_tasks:Function]
# [DEF:load_persisted_tasks:Function]
# @PURPOSE: Load persisted tasks using persistence service.
def load_persisted_tasks(self) -> None:
loaded_tasks = self.persistence_service.load_tasks(limit=100)
for task in loaded_tasks:
if task.id not in self.tasks:
self.tasks[task.id] = task
# [/DEF:TaskManager.load_persisted_tasks:Function]
# [/DEF:load_persisted_tasks:Function]
# [DEF:TaskManager.await_input:Function]
# [DEF:await_input:Function]
# @PURPOSE: Transition a task to AWAITING_INPUT state with input request.
# @PRE: Task exists and is in RUNNING state.
# @POST: Task status changed to AWAITING_INPUT, input_request set, persisted.
@@ -301,9 +301,9 @@ class TaskManager:
task.input_request = input_request
self.persistence_service.persist_task(task)
self._add_log(task_id, "INFO", "Task paused for user input", {"input_request": input_request})
# [/DEF:TaskManager.await_input:Function]
# [/DEF:await_input:Function]
# [DEF:TaskManager.resume_task_with_password:Function]
# [DEF:resume_task_with_password:Function]
# @PURPOSE: Resume a task that is awaiting input with provided passwords.
# @PRE: Task exists and is in AWAITING_INPUT state.
# @POST: Task status changed to RUNNING, passwords injected, task resumed.
@@ -330,9 +330,9 @@ class TaskManager:
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# [/DEF:TaskManager.resume_task_with_password:Function]
# [/DEF:resume_task_with_password:Function]
# [DEF:TaskManager.clear_tasks:Function]
# [DEF:clear_tasks:Function]
# @PURPOSE: Clears tasks based on status filter.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @RETURN: int - Number of tasks cleared.
@@ -370,7 +370,7 @@ class TaskManager:
logger.info(f"Cleared {len(tasks_to_remove)} tasks.")
return len(tasks_to_remove)
# [/DEF:TaskManager.clear_tasks:Function]
# [/DEF:clear_tasks:Function]
# [/DEF:TaskManager:Class]
# [/DEF:TaskManagerModule:Module]

View File

@@ -53,7 +53,7 @@ class Task(BaseModel):
input_request: Optional[Dict[str, Any]] = None
result: Optional[Dict[str, Any]] = None
# [DEF:Task.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status.
# @PRE: If status is AWAITING_INPUT, input_request must be provided.
# @POST: Task instance is created or ValueError is raised.
@@ -62,7 +62,7 @@ class Task(BaseModel):
super().__init__(**data)
if self.status == TaskStatus.AWAITING_INPUT and not self.input_request:
raise ValueError("input_request is required when status is AWAITING_INPUT")
# [/DEF:Task.__init__:Function]
# [/DEF:__init__:Function]
# [/DEF:Task:Class]
# [/DEF:TaskManagerModels:Module]

View File

@@ -21,11 +21,16 @@ from ..logger import logger, belief_scope
# @SEMANTICS: persistence, service, database, sqlalchemy
# @PURPOSE: Provides methods to save and load tasks from the tasks.db database using SQLAlchemy.
class TaskPersistenceService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the persistence service.
# @PRE: None.
# @POST: Service is ready.
def __init__(self):
# We use TasksSessionLocal from database.py
pass
# [/DEF:__init__:Function]
# [DEF:TaskPersistenceService.persist_task:Function]
# [DEF:persist_task:Function]
# @PURPOSE: Persists or updates a single task in the database.
# @PARAM: task (Task) - The task object to persist.
def persist_task(self, task: Task) -> None:
@@ -66,17 +71,17 @@ class TaskPersistenceService:
logger.error(f"Failed to persist task {task.id}: {e}")
finally:
session.close()
# [/DEF:TaskPersistenceService.persist_task:Function]
# [/DEF:persist_task:Function]
# [DEF:TaskPersistenceService.persist_tasks:Function]
# [DEF:persist_tasks:Function]
# @PURPOSE: Persists multiple tasks.
# @PARAM: tasks (List[Task]) - The list of tasks to persist.
def persist_tasks(self, tasks: List[Task]) -> None:
for task in tasks:
self.persist_task(task)
# [/DEF:TaskPersistenceService.persist_tasks:Function]
# [/DEF:persist_tasks:Function]
# [DEF:TaskPersistenceService.load_tasks:Function]
# [DEF:load_tasks:Function]
# @PURPOSE: Loads tasks from the database.
# @PARAM: limit (int) - Max tasks to load.
# @PARAM: status (Optional[TaskStatus]) - Filter by status.
@@ -119,9 +124,9 @@ class TaskPersistenceService:
return loaded_tasks
finally:
session.close()
# [/DEF:TaskPersistenceService.load_tasks:Function]
# [/DEF:load_tasks:Function]
# [DEF:TaskPersistenceService.delete_tasks:Function]
# [DEF:delete_tasks:Function]
# @PURPOSE: Deletes specific tasks from the database.
# @PARAM: task_ids (List[str]) - List of task IDs to delete.
def delete_tasks(self, task_ids: List[str]) -> None:
@@ -137,7 +142,7 @@ class TaskPersistenceService:
logger.error(f"Failed to delete tasks: {e}")
finally:
session.close()
# [/DEF:TaskPersistenceService.delete_tasks:Function]
# [/DEF:delete_tasks:Function]
# [/DEF:TaskPersistenceService:Class]
# [/DEF:TaskPersistenceModule:Module]

View File

@@ -13,7 +13,14 @@ import os
import re
import json
import datetime
from typing import Dict, List, Optional, Any, Pattern, Tuple
import fnmatch
from typing import Dict, List, Optional, Any, Pattern, Tuple, Set
# Mock belief_scope for the script itself to avoid import issues
class belief_scope:
def __init__(self, name): self.name = name
def __enter__(self): return self
def __exit__(self, *args): pass
# [/SECTION]
# [SECTION: CONFIGURATION]
@@ -30,9 +37,9 @@ OUTPUT_COMPRESSED_MD = "specs/project_map.md"
REPORTS_DIR = "semantics/reports"
MANDATORY_TAGS = {
"Module": ["PURPOSE", "LAYER"],
"Component": ["PURPOSE", "LAYER"],
"Function": ["PURPOSE"],
"Module": ["PURPOSE", "LAYER", "SEMANTICS"],
"Component": ["PURPOSE", "LAYER", "SEMANTICS"],
"Function": ["PURPOSE", "PRE", "POST"],
"Class": ["PURPOSE"]
}
# [/SECTION]
@@ -41,108 +48,133 @@ MANDATORY_TAGS = {
# @PURPOSE: Represents a code entity (Module, Function, Component) found during parsing.
# @INVARIANT: start_line is always set; end_line is set upon closure.
class SemanticEntity:
# [DEF:__init__:Function]
# @PURPOSE: Initializes a new SemanticEntity instance.
# @PRE: name, type_, start_line, file_path are provided.
# @POST: Instance is initialized with default values.
def __init__(self, name: str, type_: str, start_line: int, file_path: str):
self.name = name
self.type = type_
self.start_line = start_line
self.end_line: Optional[int] = None
self.file_path = file_path
self.tags: Dict[str, str] = {}
self.relations: List[Dict[str, str]] = []
self.children: List['SemanticEntity'] = []
self.parent: Optional['SemanticEntity'] = None
self.compliance_issues: List[str] = []
with belief_scope("__init__"):
self.name = name
self.type = type_
self.start_line = start_line
self.end_line: Optional[int] = None
self.file_path = file_path
self.tags: Dict[str, str] = {}
self.relations: List[Dict[str, str]] = []
self.children: List['SemanticEntity'] = []
self.parent: Optional['SemanticEntity'] = None
self.compliance_issues: List[str] = []
# [/DEF:__init__:Function]
# [DEF:to_dict:Function]
# @PURPOSE: Serializes the entity to a dictionary for JSON output.
# @PRE: Entity is fully populated.
# @POST: Returns a dictionary representation.
# @RETURN: Dict representation of the entity.
def to_dict(self) -> Dict[str, Any]:
return {
"name": self.name,
"type": self.type,
"start_line": self.start_line,
"end_line": self.end_line,
"tags": self.tags,
"relations": self.relations,
"children": [c.to_dict() for c in self.children],
"compliance": {
"valid": len(self.compliance_issues) == 0,
"issues": self.compliance_issues
with belief_scope("to_dict"):
return {
"name": self.name,
"type": self.type,
"start_line": self.start_line,
"end_line": self.end_line,
"tags": self.tags,
"relations": self.relations,
"children": [c.to_dict() for c in self.children],
"compliance": {
"valid": len(self.compliance_issues) == 0,
"issues": self.compliance_issues
}
}
}
# [/DEF:to_dict:Function]
# [DEF:validate:Function]
# @PURPOSE: Checks for semantic compliance (closure, mandatory tags).
# @PURPOSE: Checks for semantic compliance (closure, mandatory tags, belief state).
# @PRE: Entity structure is complete.
# @POST: Populates self.compliance_issues.
def validate(self):
# 1. Check Closure
if self.end_line is None:
self.compliance_issues.append(f"Unclosed Anchor: [DEF:{self.name}:{self.type}] started at line {self.start_line}")
# 2. Check Mandatory Tags
required = MANDATORY_TAGS.get(self.type, [])
for req_tag in required:
found = False
for existing_tag in self.tags:
if existing_tag.upper() == req_tag:
found = True
break
if not found:
self.compliance_issues.append(f"Missing Mandatory Tag: @{req_tag}")
with belief_scope("validate"):
# 1. Check Closure
if self.end_line is None:
self.compliance_issues.append(f"Unclosed Anchor: [DEF:{self.name}:{self.type}] started at line {self.start_line}")
# 2. Check Mandatory Tags
required = MANDATORY_TAGS.get(self.type, [])
for req_tag in required:
found = False
for existing_tag in self.tags:
if existing_tag.upper() == req_tag:
found = True
break
if not found:
self.compliance_issues.append(f"Missing Mandatory Tag: @{req_tag}")
# Recursive validation
for child in self.children:
child.validate()
# 3. Check for Belief State Logging (Python only)
if self.type == "Function" and self.file_path.endswith(".py"):
if not getattr(self, 'has_belief_scope', False):
self.compliance_issues.append("Missing Belief State Logging: Function should use belief_scope context manager.")
# Recursive validation
for child in self.children:
child.validate()
# [/DEF:validate:Function]
# [DEF:get_score:Function]
# @PURPOSE: Calculates a compliance score (0.0 to 1.0).
# @PRE: validate() has been called.
# @POST: Returns a float score.
# @RETURN: Float score.
def get_score(self) -> float:
if self.end_line is None:
return 0.0
score = 1.0
required = MANDATORY_TAGS.get(self.type, [])
if required:
found_count = 0
for req_tag in required:
for existing_tag in self.tags:
if existing_tag.upper() == req_tag:
found_count += 1
break
if found_count < len(required):
# Penalty proportional to missing tags
score -= 0.5 * (1 - (found_count / len(required)))
return max(0.0, score)
with belief_scope("get_score"):
if self.end_line is None:
return 0.0
score = 1.0
required = MANDATORY_TAGS.get(self.type, [])
if required:
found_count = 0
for req_tag in required:
for existing_tag in self.tags:
if existing_tag.upper() == req_tag:
found_count += 1
break
if found_count < len(required):
# Penalty proportional to missing tags
score -= 0.5 * (1 - (found_count / len(required)))
return max(0.0, score)
# [/DEF:get_score:Function]
# [/DEF:SemanticEntity:Class]
# [DEF:get_patterns:Function]
# @PURPOSE: Returns regex patterns for a specific language.
# @PRE: lang is either 'python' or 'svelte_js'.
# @POST: Returns a dictionary of compiled regex patterns.
# @PARAM: lang (str) - 'python' or 'svelte_js'
# @RETURN: Dict containing compiled regex patterns.
def get_patterns(lang: str) -> Dict[str, Pattern]:
if lang == "python":
return {
"anchor_start": re.compile(r"#\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"anchor_end": re.compile(r"#\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"tag": re.compile(r"#\s*@(?P<tag>[A-Z_]+):\s*(?P<value>.*)"),
"relation": re.compile(r"#\s*@RELATION:\s*(?P<type>\w+)\s*->\s*(?P<target>.*)"),
}
else:
return {
"html_anchor_start": re.compile(r"<!--\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]\s*-->"),
"html_anchor_end": re.compile(r"<!--\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]\s*-->"),
"js_anchor_start": re.compile(r"//\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"js_anchor_end": re.compile(r"//\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"html_tag": re.compile(r"@(?P<tag>[A-Z_]+):\s*(?P<value>.*)"),
"jsdoc_tag": re.compile(r"\*\s*@(?P<tag>[a-zA-Z]+)\s+(?P<value>.*)"),
"relation": re.compile(r"//\s*@RELATION:\s*(?P<type>\w+)\s*->\s*(?P<target>.*)"),
}
with belief_scope("get_patterns"):
if lang == "python":
return {
"anchor_start": re.compile(r"#\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"anchor_end": re.compile(r"#\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"tag": re.compile(r"#\s*@(?P<tag>[A-Z_]+):\s*(?P<value>.*)"),
"relation": re.compile(r"#\s*@RELATION:\s*(?P<type>\w+)\s*->\s*(?P<target>.*)"),
"func_def": re.compile(r"^\s*(async\s+)?def\s+(?P<name>\w+)"),
"belief_scope": re.compile(r"with\s+belief_scope\("),
}
else:
return {
"html_anchor_start": re.compile(r"<!--\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]\s*-->"),
"html_anchor_end": re.compile(r"<!--\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]\s*-->"),
"js_anchor_start": re.compile(r"//\s*\[DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"js_anchor_end": re.compile(r"//\s*\[/DEF:(?P<name>[\w\.]+):(?P<type>\w+)\]"),
"html_tag": re.compile(r"@(?P<tag>[A-Z_]+):\s*(?P<value>.*)"),
"jsdoc_tag": re.compile(r"\*\s*@(?P<tag>[a-zA-Z]+)\s+(?P<value>.*)"),
"relation": re.compile(r"//\s*@RELATION:\s*(?P<type>\w+)\s*->\s*(?P<target>.*)"),
"func_def": re.compile(r"^\s*(export\s+)?(async\s+)?function\s+(?P<name>\w+)"),
}
# [/DEF:get_patterns:Function]
@@ -213,7 +245,22 @@ def parse_file(full_path: str, rel_path: str, lang: str) -> Tuple[List[SemanticE
issues.append(f"{rel_path}:{lineno} Mismatched closing anchor. Expected [/DEF:{top.name}:{top.type}], found [/DEF:{name}:{type_}].")
continue
# 3. Check for Tags/Relations
# 3. Check for Naked Functions (Missing Contracts)
if "func_def" in patterns:
match_func = patterns["func_def"].search(line)
if match_func:
func_name = match_func.group("name")
is_covered = False
if stack:
current = stack[-1]
# Check if we are inside a Function anchor that matches the name
if current.type == "Function" and current.name == func_name:
is_covered = True
if not is_covered:
issues.append(f"{rel_path}:{lineno} Function '{func_name}' implementation found without matching [DEF:{func_name}:Function] contract.")
# 4. Check for Tags/Relations
if stack:
current = stack[-1]
@@ -238,6 +285,11 @@ def parse_file(full_path: str, rel_path: str, lang: str) -> Tuple[List[SemanticE
tag_value = match_tag.group("value").strip()
current.tags[tag_name] = tag_value
# Check for belief scope in implementation
if lang == "python" and "belief_scope" in patterns:
if patterns["belief_scope"].search(line):
current.has_belief_scope = True
# End of file check
if stack:
for unclosed in stack:
@@ -252,14 +304,79 @@ def parse_file(full_path: str, rel_path: str, lang: str) -> Tuple[List[SemanticE
# [DEF:SemanticMapGenerator:Class]
# @PURPOSE: Orchestrates the mapping process.
class SemanticMapGenerator:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the generator with a root directory.
# @PRE: root_dir is a valid path string.
# @POST: Generator instance is ready.
def __init__(self, root_dir: str):
self.root_dir = root_dir
self.entities: List[SemanticEntity] = []
self.file_scores: Dict[str, float] = {}
self.global_issues: List[str] = []
self.ignored_patterns = self._load_gitignore()
# [/DEF:__init__:Function]
# [DEF:_load_gitignore:Function]
# @PURPOSE: Loads patterns from .gitignore file.
# @RETURN: Set of patterns to ignore.
def _load_gitignore(self) -> Set[str]:
patterns = set()
ignore_file = os.path.join(self.root_dir, ".gitignore")
if os.path.exists(ignore_file):
with open(ignore_file, 'r') as f:
for line in f:
line = line.strip()
if line and not line.startswith("#"):
patterns.add(line)
return patterns
# [/DEF:_load_gitignore:Function]
# [DEF:_is_ignored:Function]
# @PURPOSE: Checks if a path should be ignored based on .gitignore or hardcoded defaults.
# @PRE: rel_path is a valid relative path string.
# @POST: Returns True if the path should be ignored.
# @PARAM: rel_path (str) - Path relative to root.
# @RETURN: bool - True if ignored.
def _is_ignored(self, rel_path: str) -> bool:
# Normalize path for matching
rel_path = rel_path.replace(os.sep, '/')
# Check hardcoded defaults
parts = rel_path.split('/')
for part in parts:
if part in IGNORE_DIRS:
return True
if os.path.basename(rel_path) in IGNORE_FILES:
return True
# Check gitignore patterns
for pattern in self.ignored_patterns:
# Handle directory patterns like 'node_modules/'
if pattern.endswith('/'):
dir_pattern = pattern.rstrip('/')
if rel_path == dir_pattern or rel_path.startswith(pattern):
return True
# Check for patterns in frontend/ or backend/
if rel_path.startswith("frontend/") and fnmatch.fnmatch(rel_path[9:], pattern):
return True
if rel_path.startswith("backend/") and fnmatch.fnmatch(rel_path[8:], pattern):
return True
# Use fnmatch for glob patterns
if fnmatch.fnmatch(rel_path, pattern) or \
fnmatch.fnmatch(os.path.basename(rel_path), pattern) or \
any(fnmatch.fnmatch(part, pattern) for part in parts):
return True
return False
# [/DEF:_is_ignored:Function]
# [DEF:run:Function]
# @PURPOSE: Main execution flow.
# @PRE: Generator is initialized.
# @POST: Semantic map and reports are generated.
# @RELATION: CALLS -> _walk_and_parse
# @RELATION: CALLS -> _generate_artifacts
def run(self):
@@ -271,17 +388,20 @@ class SemanticMapGenerator:
# [DEF:_walk_and_parse:Function]
# @PURPOSE: Recursively walks directories and triggers parsing.
# @PRE: root_dir exists.
# @POST: All files are scanned and entities extracted.
def _walk_and_parse(self):
for root, dirs, files in os.walk(self.root_dir):
dirs[:] = [d for d in dirs if d not in IGNORE_DIRS]
# Optimization: don't enter ignored directories
dirs[:] = [d for d in dirs if not self._is_ignored(os.path.relpath(os.path.join(root, d), self.root_dir) + "/")]
for file in files:
if file in IGNORE_FILES:
continue
file_path = os.path.join(root, file)
rel_path = os.path.relpath(file_path, self.root_dir)
if self._is_ignored(rel_path):
continue
lang = None
if file.endswith(".py"):
lang = "python"
@@ -298,10 +418,16 @@ class SemanticMapGenerator:
# [DEF:_process_file_results:Function]
# @PURPOSE: Validates entities and calculates file scores.
# @PRE: Entities have been parsed from the file.
# @POST: File score is calculated and issues collected.
def _process_file_results(self, rel_path: str, entities: List[SemanticEntity]):
total_score = 0
count = 0
# [DEF:validate_recursive:Function]
# @PURPOSE: Recursively validates a list of entities.
# @PRE: ent_list is a list of SemanticEntity objects.
# @POST: All entities and their children are validated.
def validate_recursive(ent_list):
nonlocal total_score, count
for e in ent_list:
@@ -309,6 +435,7 @@ class SemanticMapGenerator:
total_score += e.get_score()
count += 1
validate_recursive(e.children)
# [/DEF:validate_recursive:Function]
validate_recursive(entities)
@@ -318,6 +445,8 @@ class SemanticMapGenerator:
# [DEF:_generate_artifacts:Function]
# @PURPOSE: Writes output files.
# @PRE: Parsing and validation are complete.
# @POST: JSON and Markdown artifacts are written to disk.
def _generate_artifacts(self):
# 1. Full JSON Map
full_map = {
@@ -340,6 +469,8 @@ class SemanticMapGenerator:
# [DEF:_generate_report:Function]
# @PURPOSE: Generates the Markdown compliance report.
# @PRE: File scores and issues are available.
# @POST: Markdown report is created in reports directory.
def _generate_report(self):
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
report_path = os.path.join(REPORTS_DIR, f"semantic_report_{timestamp}.md")
@@ -379,6 +510,8 @@ class SemanticMapGenerator:
# [DEF:_collect_issues:Function]
# @PURPOSE: Helper to collect issues for a specific file from the entity tree.
# @PRE: entities list and file_path are valid.
# @POST: issues list is populated with compliance issues.
def _collect_issues(self, entities: List[SemanticEntity], file_path: str, issues: List[str]):
for e in entities:
if e.file_path == file_path:
@@ -388,6 +521,8 @@ class SemanticMapGenerator:
# [DEF:_generate_compressed_map:Function]
# @PURPOSE: Generates the token-optimized project map.
# @PRE: Entities have been processed.
# @POST: Markdown project map is written.
def _generate_compressed_map(self):
os.makedirs(os.path.dirname(OUTPUT_COMPRESSED_MD), exist_ok=True)
@@ -403,6 +538,8 @@ class SemanticMapGenerator:
# [DEF:_write_entity_md:Function]
# @PURPOSE: Recursive helper to write entity tree to Markdown.
# @PRE: f is an open file handle, entity is valid.
# @POST: Entity details are written to the file.
def _write_entity_md(self, f, entity: SemanticEntity, level: int):
indent = " " * level

View File

@@ -30,8 +30,9 @@ class Migration:
"""
Интерактивный процесс миграции дашбордов.
"""
# [DEF:Migration.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует сервис миграции, настраивает логгер и начальные состояния.
# @PRE: None.
# @POST: `self.logger` готов к использованию; `enable_delete_on_failure` = `False`.
def __init__(self) -> None:
default_log_dir = Path.cwd() / "logs"
@@ -47,9 +48,9 @@ class Migration:
self.dashboards_to_migrate: List[dict] = []
self.db_config_replacement: Optional[dict] = None
self._failed_imports: List[dict] = []
# [/DEF:Migration.__init__:Function]
# [/DEF:__init__:Function]
# [DEF:Migration.run:Function]
# [DEF:run:Function]
# @PURPOSE: Точка входа последовательный запуск всех шагов миграции.
# @PRE: Логгер готов.
# @POST: Скрипт завершён, пользователю выведено сообщение.
@@ -66,9 +67,9 @@ class Migration:
self.confirm_db_config_replacement()
self.execute_migration()
self.logger.info("[run][Exit] Скрипт миграции завершён.")
# [/DEF:Migration.run:Function]
# [/DEF:run:Function]
# [DEF:Migration.ask_delete_on_failure:Function]
# [DEF:ask_delete_on_failure:Function]
# @PURPOSE: Запрашивает у пользователя, следует ли удалять дашборд при ошибке импорта.
# @POST: `self.enable_delete_on_failure` установлен.
# @RELATION: CALLS -> yesno
@@ -81,9 +82,9 @@ class Migration:
"[ask_delete_on_failure][State] Delete-on-failure = %s",
self.enable_delete_on_failure,
)
# [/DEF:Migration.ask_delete_on_failure:Function]
# [/DEF:ask_delete_on_failure:Function]
# [DEF:Migration.select_environments:Function]
# [DEF:select_environments:Function]
# @PURPOSE: Позволяет пользователю выбрать исходное и целевое окружения Superset.
# @PRE: `setup_clients` успешно инициализирует все клиенты.
# @POST: `self.from_c` и `self.to_c` установлены.
@@ -122,9 +123,9 @@ class Migration:
self.to_c = all_clients[to_env_name]
self.logger.info("[select_environments][State] to = %s", to_env_name)
self.logger.info("[select_environments][Exit] Шаг 1 завершён.")
# [/DEF:Migration.select_environments:Function]
# [/DEF:select_environments:Function]
# [DEF:Migration.select_dashboards:Function]
# [DEF:select_dashboards:Function]
# @PURPOSE: Позволяет пользователю выбрать набор дашбордов для миграции.
# @PRE: `self.from_c` инициализирован.
# @POST: `self.dashboards_to_migrate` заполнен.
@@ -179,9 +180,9 @@ class Migration:
self.logger.error("[select_dashboards][Failure] %s", e, exc_info=True)
msgbox("Ошибка", "Не удалось получить список дашбордов.")
self.logger.info("[select_dashboards][Exit] Шаг 2 завершён.")
# [/DEF:Migration.select_dashboards:Function]
# [/DEF:select_dashboards:Function]
# [DEF:Migration.confirm_db_config_replacement:Function]
# [DEF:confirm_db_config_replacement:Function]
# @PURPOSE: Запрашивает у пользователя, требуется ли заменить имена БД в YAML-файлах.
# @POST: `self.db_config_replacement` либо `None`, либо заполнен.
# @RELATION: CALLS -> yesno
@@ -214,9 +215,9 @@ class Migration:
self.logger.info("[confirm_db_config_replacement][State] Replacement set: %s", self.db_config_replacement)
else:
self.logger.info("[confirm_db_config_replacement][State] Skipped.")
# [/DEF:Migration.confirm_db_config_replacement:Function]
# [/DEF:confirm_db_config_replacement:Function]
# [DEF:Migration._select_databases:Function]
# [DEF:_select_databases:Function]
# @PURPOSE: Позволяет пользователю выбрать исходную и целевую БД через API.
# @POST: Возвращает кортеж (старая БД, новая БД) или (None, None) при отмене.
# @RELATION: CALLS -> self.from_c.get_databases
@@ -293,9 +294,9 @@ class Migration:
self.logger.info("[_select_databases][Exit] Selected databases: %s -> %s", from_db.get("database_name", "Без имени"), to_db.get("database_name", "Без имени"))
return from_db, to_db
# [/DEF:Migration._select_databases:Function]
# [/DEF:_select_databases:Function]
# [DEF:Migration._batch_delete_by_ids:Function]
# [DEF:_batch_delete_by_ids:Function]
# @PURPOSE: Удаляет набор дашбордов по их ID единым запросом.
# @PRE: `ids` непустой список целых чисел.
# @POST: Все указанные дашборды удалены (если они существовали).
@@ -319,9 +320,9 @@ class Migration:
self.logger.warning("[_batch_delete_by_ids][Warning] Unexpected delete response: %s", response)
else:
self.logger.info("[_batch_delete_by_ids][Success] Delete request completed.")
# [/DEF:Migration._batch_delete_by_ids:Function]
# [/DEF:_batch_delete_by_ids:Function]
# [DEF:Migration.execute_migration:Function]
# [DEF:execute_migration:Function]
# @PURPOSE: Выполняет экспорт-импорт дашбордов, обрабатывает ошибки и, при необходимости, выполняет процедуру восстановления.
# @PRE: `self.dashboards_to_migrate` не пуст; `self.from_c` и `self.to_c` инициализированы.
# @POST: Успешные дашборды импортированы; неудачные - восстановлены или залогированы.
@@ -391,7 +392,7 @@ class Migration:
self.logger.info("[execute_migration][Exit] Migration finished.")
msgbox("Информация", "Миграция завершена!")
# [/DEF:Migration.execute_migration:Function]
# [/DEF:execute_migration:Function]
# [/DEF:Migration:Class]

File diff suppressed because it is too large Load Diff

View File

@@ -7,10 +7,12 @@
- 🏗️ Layer: DevOps/Tooling
- **SemanticEntity** (`Class`)
- 📝 Represents a code entity (Module, Function, Component) found during parsing.
- ƒ **__init__** (`Function`)
- 📝 Initializes a new SemanticEntity instance.
- ƒ **to_dict** (`Function`)
- 📝 Serializes the entity to a dictionary for JSON output.
- ƒ **validate** (`Function`)
- 📝 Checks for semantic compliance (closure, mandatory tags).
- 📝 Checks for semantic compliance (closure, mandatory tags, belief state).
- ƒ **get_score** (`Function`)
- 📝 Calculates a compliance score (0.0 to 1.0).
- ƒ **get_patterns** (`Function`)
@@ -19,6 +21,12 @@
- 📝 Parses a single file to extract semantic entities.
- **SemanticMapGenerator** (`Class`)
- 📝 Orchestrates the mapping process.
- ƒ **__init__** (`Function`)
- 📝 Initializes the generator with a root directory.
- ƒ **_load_gitignore** (`Function`)
- 📝 Loads patterns from .gitignore file.
- ƒ **_is_ignored** (`Function`)
- 📝 Checks if a path should be ignored based on .gitignore or hardcoded defaults.
- ƒ **run** (`Function`)
- 📝 Main execution flow.
- 🔗 CALLS -> `_walk_and_parse`
@@ -44,41 +52,41 @@
- 🔗 DEPENDS_ON -> `superset_tool.utils`
- **Migration** (`Class`)
- 📝 Инкапсулирует логику интерактивной миграции дашбордов с возможностью «удалить‑и‑перезаписать» при ошибке импорта.
- ƒ **Migration.__init__** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Инициализирует сервис миграции, настраивает логгер и начальные состояния.
- ƒ **Migration.run** (`Function`)
- ƒ **run** (`Function`)
- 📝 Точка входа последовательный запуск всех шагов миграции.
- 🔗 CALLS -> `self.ask_delete_on_failure`
- 🔗 CALLS -> `self.select_environments`
- 🔗 CALLS -> `self.select_dashboards`
- 🔗 CALLS -> `self.confirm_db_config_replacement`
- 🔗 CALLS -> `self.execute_migration`
- ƒ **Migration.ask_delete_on_failure** (`Function`)
- ƒ **ask_delete_on_failure** (`Function`)
- 📝 Запрашивает у пользователя, следует ли удалять дашборд при ошибке импорта.
- 🔗 CALLS -> `yesno`
- ƒ **Migration.select_environments** (`Function`)
- ƒ **select_environments** (`Function`)
- 📝 Позволяет пользователю выбрать исходное и целевое окружения Superset.
- 🔗 CALLS -> `setup_clients`
- 🔗 CALLS -> `menu`
- ƒ **Migration.select_dashboards** (`Function`)
- ƒ **select_dashboards** (`Function`)
- 📝 Позволяет пользователю выбрать набор дашбордов для миграции.
- 🔗 CALLS -> `self.from_c.get_dashboards`
- 🔗 CALLS -> `checklist`
- ƒ **Migration.confirm_db_config_replacement** (`Function`)
- ƒ **confirm_db_config_replacement** (`Function`)
- 📝 Запрашивает у пользователя, требуется ли заменить имена БД в YAML-файлах.
- 🔗 CALLS -> `yesno`
- 🔗 CALLS -> `self._select_databases`
- ƒ **Migration._select_databases** (`Function`)
- ƒ **_select_databases** (`Function`)
- 📝 Позволяет пользователю выбрать исходную и целевую БД через API.
- 🔗 CALLS -> `self.from_c.get_databases`
- 🔗 CALLS -> `self.to_c.get_databases`
- 🔗 CALLS -> `self.from_c.get_database`
- 🔗 CALLS -> `self.to_c.get_database`
- 🔗 CALLS -> `menu`
- ƒ **Migration._batch_delete_by_ids** (`Function`)
- ƒ **_batch_delete_by_ids** (`Function`)
- 📝 Удаляет набор дашбордов по их ID единым запросом.
- 🔗 CALLS -> `self.to_c.network.request`
- ƒ **Migration.execute_migration** (`Function`)
- ƒ **execute_migration** (`Function`)
- 📝 Выполняет экспорт-импорт дашбордов, обрабатывает ошибки и, при необходимости, выполняет процедуру восстановления.
- 🔗 CALLS -> `self.from_c.export_dashboard`
- 🔗 CALLS -> `create_temp_file`
@@ -92,39 +100,48 @@
- **SupersetToolError** (`Class`)
- 📝 Базовый класс для всех ошибок, генерируемых инструментом.
- 🔗 INHERITS_FROM -> `Exception`
- ƒ **__init__** (`Function`)
- 📝 Initializes the base tool error.
- **AuthenticationError** (`Class`)
- 📝 Ошибки, связанные с аутентификацией или авторизацией.
- 🔗 INHERITS_FROM -> `SupersetToolError`
- ƒ **__init__** (`Function`)
- 📝 Initializes an authentication error.
- **PermissionDeniedError** (`Class`)
- 📝 Ошибка, возникающая при отказе в доступе к ресурсу.
- 🔗 INHERITS_FROM -> `AuthenticationError`
- ƒ **__init__** (`Function`)
- 📝 Initializes a permission denied error.
- **SupersetAPIError** (`Class`)
- 📝 Общие ошибки при взаимодействии с Superset API.
- 🔗 INHERITS_FROM -> `SupersetToolError`
- ƒ **__init__** (`Function`)
- 📝 Initializes a Superset API error.
- **ExportError** (`Class`)
- 📝 Ошибки, специфичные для операций экспорта.
- 🔗 INHERITS_FROM -> `SupersetAPIError`
- ƒ **__init__** (`Function`)
- 📝 Initializes an export error.
- **DashboardNotFoundError** (`Class`)
- 📝 Ошибка, когда запрошенный дашборд или ресурс не найден (404).
- 🔗 INHERITS_FROM -> `SupersetAPIError`
- ƒ **__init__** (`Function`)
- 📝 Initializes a dashboard not found error.
- **DatasetNotFoundError** (`Class`)
- 📝 Ошибка, когда запрашиваемый набор данных не существует (404).
- 🔗 INHERITS_FROM -> `SupersetAPIError`
- ƒ **__init__** (`Function`)
- 📝 Initializes a dataset not found error.
- **InvalidZipFormatError** (`Class`)
- 📝 Ошибка, указывающая на некорректный формат или содержимое ZIP-архива.
- 🔗 INHERITS_FROM -> `SupersetToolError`
- ƒ **__init__** (`Function`)
- 📝 Initializes an invalid ZIP format error.
- **NetworkError** (`Class`)
- 📝 Ошибки, связанные с сетевым соединением.
- 🔗 INHERITS_FROM -> `SupersetToolError`
- **FileOperationError** (`Class`)
- 📝 Общие ошибки файловых операций (I/O).
- 🔗 INHERITS_FROM -> `SupersetToolError`
- **InvalidFileStructureError** (`Class`)
- 📝 Ошибка, указывающая на некорректную структуру файлов или директорий.
- 🔗 INHERITS_FROM -> `FileOperationError`
- **ConfigurationError** (`Class`)
- 📝 Ошибки, связанные с неверной конфигурацией инструмента.
- 🔗 INHERITS_FROM -> `SupersetToolError`
- ƒ **__init__** (`Function`)
- 📝 Initializes a network error.
- 📦 **superset_tool.models** (`Module`)
- 📝 Определяет Pydantic-модели для конфигурации инструмента, обеспечивая валидацию данных.
- 🏗️ Layer: Infra
@@ -133,14 +150,14 @@
- **SupersetConfig** (`Class`)
- 📝 Модель конфигурации для подключения к одному экземпляру Superset API.
- 🔗 INHERITS_FROM -> `pydantic.BaseModel`
- ƒ **SupersetConfig.validate_auth** (`Function`)
- ƒ **validate_auth** (`Function`)
- 📝 Проверяет, что словарь `auth` содержит все необходимые для аутентификации поля.
- ƒ **SupersetConfig.normalize_base_url** (`Function`)
- ƒ **normalize_base_url** (`Function`)
- 📝 Нормализует `base_url`, добавляя `/api/v1`, если он отсутствует.
- **DatabaseConfig** (`Class`)
- 📝 Модель для параметров трансформации баз данных при миграции дашбордов.
- 🔗 INHERITS_FROM -> `pydantic.BaseModel`
- ƒ **DatabaseConfig.validate_config** (`Function`)
- ƒ **validate_config** (`Function`)
- 📝 Проверяет, что словарь `database_config` содержит ключи 'old' и 'new'.
- 📦 **superset_tool.client** (`Module`)
- 📝 Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
@@ -150,62 +167,62 @@
- 🔗 DEPENDS_ON -> `superset_tool.utils`
- **SupersetClient** (`Class`)
- 📝 Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
- ƒ **SupersetClient.__init__** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
- ƒ **SupersetClient._validate_config** (`Function`)
- ƒ **_validate_config** (`Function`)
- 📝 Проверяет, что переданный объект конфигурации имеет корректный тип.
- ƒ **SupersetClient.headers** (`Function`)
- ƒ **headers** (`Function`)
- 📝 Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
- ƒ **SupersetClient.get_dashboards** (`Function`)
- ƒ **get_dashboards** (`Function`)
- 📝 Получает полный список дашбордов, автоматически обрабатывая пагинацию.
- 🔗 CALLS -> `self._fetch_total_object_count`
- 🔗 CALLS -> `self._fetch_all_pages`
- ƒ **SupersetClient.export_dashboard** (`Function`)
- ƒ **export_dashboard** (`Function`)
- 📝 Экспортирует дашборд в виде ZIP-архива.
- 🔗 CALLS -> `self.network.request`
- ƒ **SupersetClient.import_dashboard** (`Function`)
- ƒ **import_dashboard** (`Function`)
- 📝 Импортирует дашборд из ZIP-файла с возможностью автоматического удаления и повторной попытки при ошибке.
- 🔗 CALLS -> `self._do_import`
- 🔗 CALLS -> `self.delete_dashboard`
- 🔗 CALLS -> `self.get_dashboards`
- ƒ **SupersetClient._resolve_target_id_for_delete** (`Function`)
- ƒ **_resolve_target_id_for_delete** (`Function`)
- 📝 Определяет ID дашборда для удаления, используя ID или slug.
- ƒ **SupersetClient._do_import** (`Function`)
- ƒ **_do_import** (`Function`)
- 📝 Выполняет один запрос на импорт без обработки исключений.
- ƒ **SupersetClient.delete_dashboard** (`Function`)
- ƒ **delete_dashboard** (`Function`)
- 📝 Удаляет дашборд по его ID или slug.
- 🔗 CALLS -> `self.network.request`
- ƒ **SupersetClient._extract_dashboard_id_from_zip** (`Function`)
- ƒ **_extract_dashboard_id_from_zip** (`Function`)
- 📝 Извлекает ID дашборда из `metadata.yaml` внутри ZIP-архива.
- ƒ **SupersetClient._extract_dashboard_slug_from_zip** (`Function`)
- ƒ **_extract_dashboard_slug_from_zip** (`Function`)
- 📝 Извлекает slug дашборда из `metadata.yaml` внутри ZIP-архива.
- ƒ **SupersetClient._validate_export_response** (`Function`)
- ƒ **_validate_export_response** (`Function`)
- 📝 Проверяет, что HTTP-ответ на экспорт является валидным ZIP-архивом.
- ƒ **SupersetClient._resolve_export_filename** (`Function`)
- ƒ **_resolve_export_filename** (`Function`)
- 📝 Определяет имя файла для экспорта из заголовков или генерирует его.
- ƒ **SupersetClient._validate_query_params** (`Function`)
- ƒ **_validate_query_params** (`Function`)
- 📝 Формирует корректный набор параметров запроса с пагинацией.
- ƒ **SupersetClient._fetch_total_object_count** (`Function`)
- ƒ **_fetch_total_object_count** (`Function`)
- 📝 Получает общее количество объектов по указанному эндпоинту для пагинации.
- ƒ **SupersetClient._fetch_all_pages** (`Function`)
- ƒ **_fetch_all_pages** (`Function`)
- 📝 Итерируется по всем страницам пагинированного API и собирает все данные.
- ƒ **SupersetClient._validate_import_file** (`Function`)
- ƒ **_validate_import_file** (`Function`)
- 📝 Проверяет, что файл существует, является ZIP-архивом и содержит `metadata.yaml`.
- ƒ **SupersetClient.get_datasets** (`Function`)
- ƒ **get_datasets** (`Function`)
- 📝 Получает полный список датасетов, автоматически обрабатывая пагинацию.
- 🔗 CALLS -> `self._fetch_total_object_count`
- 🔗 CALLS -> `self._fetch_all_pages`
- ƒ **SupersetClient.get_databases** (`Function`)
- ƒ **get_databases** (`Function`)
- 📝 Получает полный список баз данных, автоматически обрабатывая пагинацию.
- 🔗 CALLS -> `self._fetch_total_object_count`
- 🔗 CALLS -> `self._fetch_all_pages`
- ƒ **SupersetClient.get_dataset** (`Function`)
- ƒ **get_dataset** (`Function`)
- 📝 Получает информацию о конкретном датасете по его ID.
- 🔗 CALLS -> `self.network.request`
- ƒ **SupersetClient.get_database** (`Function`)
- ƒ **get_database** (`Function`)
- 📝 Получает информацию о конкретной базе данных по её ID.
- 🔗 CALLS -> `self.network.request`
- ƒ **SupersetClient.update_dataset** (`Function`)
- ƒ **update_dataset** (`Function`)
- 📝 Обновляет данные датасета по его ID.
- 🔗 CALLS -> `self.network.request`
- 📦 **superset_tool** (`Module`)
@@ -345,12 +362,6 @@
- 📦 **superset_tool.utils** (`Module`)
- 📝 Utility package for superset_tool.
- 🏗️ Layer: Infra
- ƒ **handleSort** (`Function`)
- 📝 Toggles sort direction or changes sort column.
- ƒ **handleSelectionChange** (`Function`)
- 📝 Handles individual checkbox changes.
- ƒ **handleSelectAll** (`Function`)
- 📝 Handles select all checkbox.
- 📦 **main** (`Module`)
- 📝 Entry point for the Svelte application.
- 🏗️ Layer: UI-Entry
@@ -782,52 +793,54 @@
- 🏗️ Layer: Core
- **TaskPersistenceService** (`Class`)
- 📝 Provides methods to save and load tasks from the tasks.db database using SQLAlchemy.
- ƒ **TaskPersistenceService.persist_task** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Initializes the persistence service.
- ƒ **persist_task** (`Function`)
- 📝 Persists or updates a single task in the database.
- ƒ **TaskPersistenceService.persist_tasks** (`Function`)
- ƒ **persist_tasks** (`Function`)
- 📝 Persists multiple tasks.
- ƒ **TaskPersistenceService.load_tasks** (`Function`)
- ƒ **load_tasks** (`Function`)
- 📝 Loads tasks from the database.
- ƒ **TaskPersistenceService.delete_tasks** (`Function`)
- ƒ **delete_tasks** (`Function`)
- 📝 Deletes specific tasks from the database.
- 📦 **TaskManagerModule** (`Module`)
- 📝 Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
- 🏗️ Layer: Core
- **TaskManager** (`Class`)
- 📝 Manages the lifecycle of tasks, including their creation, execution, and state tracking.
- ƒ **TaskManager.__init__** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Initialize the TaskManager with dependencies.
- ƒ **TaskManager.create_task** (`Function`)
- ƒ **create_task** (`Function`)
- 📝 Creates and queues a new task for execution.
- ƒ **TaskManager._run_task** (`Function`)
- ƒ **_run_task** (`Function`)
- 📝 Internal method to execute a task.
- ƒ **TaskManager.resolve_task** (`Function`)
- ƒ **resolve_task** (`Function`)
- 📝 Resumes a task that is awaiting mapping.
- ƒ **TaskManager.wait_for_resolution** (`Function`)
- ƒ **wait_for_resolution** (`Function`)
- 📝 Pauses execution and waits for a resolution signal.
- ƒ **TaskManager.wait_for_input** (`Function`)
- ƒ **wait_for_input** (`Function`)
- 📝 Pauses execution and waits for user input.
- ƒ **TaskManager.get_task** (`Function`)
- ƒ **get_task** (`Function`)
- 📝 Retrieves a task by its ID.
- ƒ **TaskManager.get_all_tasks** (`Function`)
- ƒ **get_all_tasks** (`Function`)
- 📝 Retrieves all registered tasks.
- ƒ **TaskManager.get_tasks** (`Function`)
- ƒ **get_tasks** (`Function`)
- 📝 Retrieves tasks with pagination and optional status filter.
- ƒ **TaskManager.get_task_logs** (`Function`)
- ƒ **get_task_logs** (`Function`)
- 📝 Retrieves logs for a specific task.
- ƒ **TaskManager._add_log** (`Function`)
- ƒ **_add_log** (`Function`)
- 📝 Adds a log entry to a task and notifies subscribers.
- ƒ **TaskManager.subscribe_logs** (`Function`)
- ƒ **subscribe_logs** (`Function`)
- 📝 Subscribes to real-time logs for a task.
- ƒ **TaskManager.unsubscribe_logs** (`Function`)
- ƒ **unsubscribe_logs** (`Function`)
- 📝 Unsubscribes from real-time logs for a task.
- ƒ **TaskManager.load_persisted_tasks** (`Function`)
- ƒ **load_persisted_tasks** (`Function`)
- 📝 Load persisted tasks using persistence service.
- ƒ **TaskManager.await_input** (`Function`)
- ƒ **await_input** (`Function`)
- 📝 Transition a task to AWAITING_INPUT state with input request.
- ƒ **TaskManager.resume_task_with_password** (`Function`)
- ƒ **resume_task_with_password** (`Function`)
- 📝 Resume a task that is awaiting input with provided passwords.
- ƒ **TaskManager.clear_tasks** (`Function`)
- ƒ **clear_tasks** (`Function`)
- 📝 Clears tasks based on status filter.
- 📦 **TaskManagerModels** (`Module`)
- 📝 Defines the data models and enumerations used by the Task Manager.
@@ -838,14 +851,16 @@
- 📝 A Pydantic model representing a single, structured log entry associated with a task.
- **Task** (`Class`)
- 📝 A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
- ƒ **Task.__init__** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Initializes the Task model and validates input_request for AWAITING_INPUT status.
- 📦 **TaskCleanupModule** (`Module`)
- 📝 Implements task cleanup and retention policies.
- 🏗️ Layer: Core
- **TaskCleanupService** (`Class`)
- 📝 Provides methods to clean up old task records.
- ƒ **TaskCleanupService.run_cleanup** (`Function`)
- ƒ **__init__** (`Function`)
- 📝 Initializes the cleanup service with dependencies.
- ƒ **run_cleanup** (`Function`)
- 📝 Deletes tasks older than the configured retention period.
- 📦 **TaskManagerPackage** (`Module`)
- 📝 Exports the public API of the task manager package.

View File

@@ -29,7 +29,7 @@ from superset_tool.utils.network import APIClient
# @RELATION: CREATES_INSTANCE_OF -> APIClient
# @RELATION: USES -> SupersetConfig
class SupersetClient:
# [DEF:SupersetClient.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
# @PRE: `config` должен быть валидным объектом SupersetConfig.
# @POST: Атрибуты `logger`, `config`, и `network` созданы и готовы к работе.
@@ -48,9 +48,9 @@ class SupersetClient:
)
self.delete_before_reimport: bool = False
self.logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.")
# [/DEF:SupersetClient.__init__:Function]
# [/DEF:__init__:Function]
# [DEF:SupersetClient._validate_config:Function]
# [DEF:_validate_config:Function]
# @PURPOSE: Проверяет, что переданный объект конфигурации имеет корректный тип.
# @PRE: `config` должен быть передан.
# @POST: Если проверка пройдена, выполнение продолжается.
@@ -60,18 +60,18 @@ class SupersetClient:
self.logger.debug("[_validate_config][Enter] Validating SupersetConfig.")
assert isinstance(config, SupersetConfig), "Конфигурация должна быть экземпляром SupersetConfig"
self.logger.debug("[_validate_config][Exit] Config is valid.")
# [/DEF:SupersetClient._validate_config:Function]
# [/DEF:_validate_config:Function]
@property
def headers(self) -> dict:
# [DEF:SupersetClient.headers:Function]
# [DEF:headers:Function]
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
# @PRE: self.network должен быть инициализирован.
# @POST: Возвращаемый словарь содержит актуальные заголовки, включая токен авторизации.
return self.network.headers
# [/DEF:SupersetClient.headers:Function]
# [/DEF:headers:Function]
# [DEF:SupersetClient.get_dashboards:Function]
# [DEF:get_dashboards:Function]
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
# @RELATION: CALLS -> self._fetch_total_object_count
# @RELATION: CALLS -> self._fetch_all_pages
@@ -93,9 +93,9 @@ class SupersetClient:
)
self.logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_dashboards:Function]
# [/DEF:get_dashboards:Function]
# [DEF:SupersetClient.export_dashboard:Function]
# [DEF:export_dashboard:Function]
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
# @RELATION: CALLS -> self.network.request
# @PRE: dashboard_id должен быть положительным целым числом.
@@ -118,9 +118,9 @@ class SupersetClient:
filename = self._resolve_export_filename(response, dashboard_id)
self.logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
return response.content, filename
# [/DEF:SupersetClient.export_dashboard:Function]
# [/DEF:export_dashboard:Function]
# [DEF:SupersetClient.import_dashboard:Function]
# [DEF:import_dashboard:Function]
# @PURPOSE: Импортирует дашборд из ZIP-файла с возможностью автоматического удаления и повторной попытки при ошибке.
# @RELATION: CALLS -> self._do_import
# @RELATION: CALLS -> self.delete_dashboard
@@ -152,9 +152,9 @@ class SupersetClient:
self.delete_dashboard(target_id)
self.logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
return self._do_import(file_path)
# [/DEF:SupersetClient.import_dashboard:Function]
# [/DEF:import_dashboard:Function]
# [DEF:SupersetClient._resolve_target_id_for_delete:Function]
# [DEF:_resolve_target_id_for_delete:Function]
# @PURPOSE: Определяет ID дашборда для удаления, используя ID или slug.
# @PARAM: dash_id (Optional[int]) - ID дашборда.
# @PARAM: dash_slug (Optional[str]) - Slug дашборда.
@@ -177,9 +177,9 @@ class SupersetClient:
except Exception as e:
self.logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
return None
# [/DEF:SupersetClient._resolve_target_id_for_delete:Function]
# [/DEF:_resolve_target_id_for_delete:Function]
# [DEF:SupersetClient._do_import:Function]
# [DEF:_do_import:Function]
# @PURPOSE: Выполняет один запрос на импорт без обработки исключений.
# @PRE: Файл должен существовать.
# @POST: Файл успешно загружен, возвращен ответ API.
@@ -200,9 +200,9 @@ class SupersetClient:
extra_data={"overwrite": "true"},
timeout=self.config.timeout * 2,
)
# [/DEF:SupersetClient._do_import:Function]
# [/DEF:_do_import:Function]
# [DEF:SupersetClient.delete_dashboard:Function]
# [DEF:delete_dashboard:Function]
# @PURPOSE: Удаляет дашборд по его ID или slug.
# @RELATION: CALLS -> self.network.request
# @PRE: dashboard_id должен быть предоставлен.
@@ -218,9 +218,9 @@ class SupersetClient:
self.logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
else:
self.logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
# [/DEF:SupersetClient.delete_dashboard:Function]
# [/DEF:delete_dashboard:Function]
# [DEF:SupersetClient._extract_dashboard_id_from_zip:Function]
# [DEF:_extract_dashboard_id_from_zip:Function]
# @PURPOSE: Извлекает ID дашборда из `metadata.yaml` внутри ZIP-архива.
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-файлу.
# @PRE: Файл, указанный в `file_name`, должен быть валидным ZIP-архивом.
@@ -241,9 +241,9 @@ class SupersetClient:
except Exception as exc:
self.logger.error("[_extract_dashboard_id_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:SupersetClient._extract_dashboard_id_from_zip:Function]
# [/DEF:_extract_dashboard_id_from_zip:Function]
# [DEF:SupersetClient._extract_dashboard_slug_from_zip:Function]
# [DEF:_extract_dashboard_slug_from_zip:Function]
# @PURPOSE: Извлекает slug дашборда из `metadata.yaml` внутри ZIP-архива.
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-файлу.
# @PRE: Файл, указанный в `file_name`, должен быть валидным ZIP-архивом.
@@ -264,9 +264,9 @@ class SupersetClient:
except Exception as exc:
self.logger.error("[_extract_dashboard_slug_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:SupersetClient._extract_dashboard_slug_from_zip:Function]
# [/DEF:_extract_dashboard_slug_from_zip:Function]
# [DEF:SupersetClient._validate_export_response:Function]
# [DEF:_validate_export_response:Function]
# @PURPOSE: Проверяет, что HTTP-ответ на экспорт является валидным ZIP-архивом.
# @PRE: response должен быть объектом requests.Response.
# @POST: Проверка пройдена, если ответ является непустым ZIP-архивом.
@@ -280,9 +280,9 @@ class SupersetClient:
raise ExportError(f"Получен не ZIP-архив (Content-Type: {content_type})")
if not response.content:
raise ExportError("Получены пустые данные при экспорте")
# [/DEF:SupersetClient._validate_export_response:Function]
# [/DEF:_validate_export_response:Function]
# [DEF:SupersetClient._resolve_export_filename:Function]
# [DEF:_resolve_export_filename:Function]
# @PURPOSE: Определяет имя файла для экспорта из заголовков или генерирует его.
# @PRE: response должен быть объектом requests.Response.
# @POST: Возвращает непустое имя файла.
@@ -298,9 +298,9 @@ class SupersetClient:
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
self.logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
return filename
# [/DEF:SupersetClient._resolve_export_filename:Function]
# [/DEF:_resolve_export_filename:Function]
# [DEF:SupersetClient._validate_query_params:Function]
# [DEF:_validate_query_params:Function]
# @PURPOSE: Формирует корректный набор параметров запроса с пагинацией.
# @PARAM: query (Optional[Dict]) - Исходные параметры.
# @PRE: query, если предоставлен, должен быть словарем.
@@ -310,9 +310,9 @@ class SupersetClient:
assert query is None or isinstance(query, dict), "[_validate_query_params][PRE] query must be a dictionary or None."
base_query = {"page": 0, "page_size": 1000}
return {**base_query, **(query or {})}
# [/DEF:SupersetClient._validate_query_params:Function]
# [/DEF:_validate_query_params:Function]
# [DEF:SupersetClient._fetch_total_object_count:Function]
# [DEF:_fetch_total_object_count:Function]
# @PURPOSE: Получает общее количество объектов по указанному эндпоинту для пагинации.
# @PARAM: endpoint (str) - API эндпоинт.
# @PRE: endpoint должен быть непустой строкой.
@@ -326,9 +326,9 @@ class SupersetClient:
query_params={"page": 0, "page_size": 1},
count_field="count",
)
# [/DEF:SupersetClient._fetch_total_object_count:Function]
# [/DEF:_fetch_total_object_count:Function]
# [DEF:SupersetClient._fetch_all_pages:Function]
# [DEF:_fetch_all_pages:Function]
# @PURPOSE: Итерируется по всем страницам пагинированного API и собирает все данные.
# @PARAM: endpoint (str) - API эндпоинт.
# @PARAM: pagination_options (Dict) - Опции пагинации.
@@ -340,9 +340,9 @@ class SupersetClient:
assert endpoint and isinstance(endpoint, str), "[_fetch_all_pages][PRE] endpoint must be a non-empty string."
assert isinstance(pagination_options, dict), "[_fetch_all_pages][PRE] pagination_options must be a dictionary."
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
# [/DEF:SupersetClient._fetch_all_pages:Function]
# [/DEF:_fetch_all_pages:Function]
# [DEF:SupersetClient._validate_import_file:Function]
# [DEF:_validate_import_file:Function]
# @PURPOSE: Проверяет, что файл существует, является ZIP-архивом и содержит `metadata.yaml`.
# @PRE: zip_path должен быть предоставлен.
# @POST: Проверка пройдена, если файл существует, является ZIP и содержит `metadata.yaml`.
@@ -356,9 +356,9 @@ class SupersetClient:
assert zipfile.is_zipfile(path), f"Файл {zip_path} не является ZIP-архивом"
with zipfile.ZipFile(path, "r") as zf:
assert any(n.endswith("metadata.yaml") for n in zf.namelist()), f"Архив {zip_path} не содержит 'metadata.yaml'"
# [/DEF:SupersetClient._validate_import_file:Function]
# [/DEF:_validate_import_file:Function]
# [DEF:SupersetClient.get_datasets:Function]
# [DEF:get_datasets:Function]
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
# @RELATION: CALLS -> self._fetch_total_object_count
# @RELATION: CALLS -> self._fetch_all_pages
@@ -379,9 +379,9 @@ class SupersetClient:
)
self.logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_datasets:Function]
# [/DEF:get_datasets:Function]
# [DEF:SupersetClient.get_databases:Function]
# [DEF:get_databases:Function]
# @PURPOSE: Получает полный список баз данных, автоматически обрабатывая пагинацию.
# @RELATION: CALLS -> self._fetch_total_object_count
# @RELATION: CALLS -> self._fetch_all_pages
@@ -403,9 +403,9 @@ class SupersetClient:
)
self.logger.info("[get_databases][Exit] Found %d databases.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_databases:Function]
# [/DEF:get_databases:Function]
# [DEF:SupersetClient.get_dataset:Function]
# [DEF:get_dataset:Function]
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
# @RELATION: CALLS -> self.network.request
# @PARAM: dataset_id (int) - ID датасета.
@@ -420,9 +420,9 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
return response
# [/DEF:SupersetClient.get_dataset:Function]
# [/DEF:get_dataset:Function]
# [DEF:SupersetClient.get_database:Function]
# [DEF:get_database:Function]
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
# @RELATION: CALLS -> self.network.request
# @PARAM: database_id (int) - ID базы данных.
@@ -437,9 +437,9 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[get_database][Exit] Got database %s.", database_id)
return response
# [/DEF:SupersetClient.get_database:Function]
# [/DEF:get_database:Function]
# [DEF:SupersetClient.update_dataset:Function]
# [DEF:update_dataset:Function]
# @PURPOSE: Обновляет данные датасета по его ID.
# @RELATION: CALLS -> self.network.request
# @PARAM: dataset_id (int) - ID датасета.
@@ -461,7 +461,7 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
return response
# [/DEF:SupersetClient.update_dataset:Function]
# [/DEF:update_dataset:Function]
# [/DEF:SupersetClient:Class]

View File

@@ -14,9 +14,14 @@ from typing import Optional, Dict, Any, Union
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Optional[Dict[str, Any]]) - Дополнительный контекст ошибки.
class SupersetToolError(Exception):
# [DEF:__init__:Function]
# @PURPOSE: Initializes the base tool error.
# @PRE: message is a string, context is optional dict.
# @POST: Error is initialized with combined message and context.
def __init__(self, message: str, context: Optional[Dict[str, Any]] = None):
self.context = context or {}
super().__init__(f"{message} | Context: {self.context}")
# [/DEF:__init__:Function]
# [/DEF:SupersetToolError:Class]
# [DEF:AuthenticationError:Class]
@@ -25,8 +30,13 @@ class SupersetToolError(Exception):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class AuthenticationError(SupersetToolError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes an authentication error.
# @PRE: Optional message and context.
# @POST: Error is initialized with authentication context.
def __init__(self, message: str = "Authentication failed", **context: Any):
super().__init__(f"[AUTH_FAILURE] {message}", context={"type": "authentication", **context})
# [/DEF:__init__:Function]
# [/DEF:AuthenticationError:Class]
# [DEF:PermissionDeniedError:Class]
@@ -36,9 +46,14 @@ class AuthenticationError(SupersetToolError):
# @PARAM: required_permission (Optional[str]) - Требуемое разрешение.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class PermissionDeniedError(AuthenticationError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes a permission denied error.
# @PRE: Optional message, permission string, and context.
# @POST: Error is initialized with permission details.
def __init__(self, message: str = "Permission denied", required_permission: Optional[str] = None, **context: Any):
full_message = f"Permission denied: {required_permission}" if required_permission else message
super().__init__(full_message, context={"required_permission": required_permission, **context})
# [/DEF:__init__:Function]
# [/DEF:PermissionDeniedError:Class]
# [DEF:SupersetAPIError:Class]
@@ -47,8 +62,13 @@ class PermissionDeniedError(AuthenticationError):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class SupersetAPIError(SupersetToolError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes a Superset API error.
# @PRE: Optional message and context.
# @POST: Error is initialized with API failure context.
def __init__(self, message: str = "Superset API error", **context: Any):
super().__init__(f"[API_FAILURE] {message}", context={"type": "api_call", **context})
# [/DEF:__init__:Function]
# [/DEF:SupersetAPIError:Class]
# [DEF:ExportError:Class]
@@ -57,8 +77,13 @@ class SupersetAPIError(SupersetToolError):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class ExportError(SupersetAPIError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes an export error.
# @PRE: Optional message and context.
# @POST: Error is initialized with export failure subtype.
def __init__(self, message: str = "Dashboard export failed", **context: Any):
super().__init__(f"[EXPORT_FAILURE] {message}", context={"subtype": "export", **context})
# [/DEF:__init__:Function]
# [/DEF:ExportError:Class]
# [DEF:DashboardNotFoundError:Class]
@@ -68,8 +93,13 @@ class ExportError(SupersetAPIError):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class DashboardNotFoundError(SupersetAPIError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes a dashboard not found error.
# @PRE: dashboard_id_or_slug is provided.
# @POST: Error is initialized with resource identification.
def __init__(self, dashboard_id_or_slug: Union[int, str], message: str = "Dashboard not found", **context: Any):
super().__init__(f"[NOT_FOUND] Dashboard '{dashboard_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dashboard_id_or_slug, **context})
# [/DEF:__init__:Function]
# [/DEF:DashboardNotFoundError:Class]
# [DEF:DatasetNotFoundError:Class]
@@ -79,8 +109,13 @@ class DashboardNotFoundError(SupersetAPIError):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class DatasetNotFoundError(SupersetAPIError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes a dataset not found error.
# @PRE: dataset_id_or_slug is provided.
# @POST: Error is initialized with resource identification.
def __init__(self, dataset_id_or_slug: Union[int, str], message: str = "Dataset not found", **context: Any):
super().__init__(f"[NOT_FOUND] Dataset '{dataset_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dataset_id_or_slug, **context})
# [/DEF:__init__:Function]
# [/DEF:DatasetNotFoundError:Class]
# [DEF:InvalidZipFormatError:Class]
@@ -90,8 +125,13 @@ class DatasetNotFoundError(SupersetAPIError):
# @PARAM: file_path (Optional[Union[str, Path]]) - Путь к файлу.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class InvalidZipFormatError(SupersetToolError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes an invalid ZIP format error.
# @PRE: Optional message, file path, and context.
# @POST: Error is initialized with file validation context.
def __init__(self, message: str = "Invalid ZIP format or content", file_path: Optional[Union[str, Path]] = None, **context: Any):
super().__init__(f"[FILE_ERROR] {message}", context={"type": "file_validation", "file_path": str(file_path) if file_path else "N/A", **context})
# [/DEF:__init__:Function]
# [/DEF:InvalidZipFormatError:Class]
# [DEF:NetworkError:Class]
@@ -100,6 +140,10 @@ class InvalidZipFormatError(SupersetToolError):
# @PARAM: message (str) - Сообщение об ошибке.
# @PARAM: context (Any) - Дополнительный контекст ошибки.
class NetworkError(SupersetToolError):
# [DEF:__init__:Function]
# @PURPOSE: Initializes a network error.
# @PRE: Optional message and context.
# @POST: Error is initialized with network failure context.
def __init__(self, message: str = "Network connection failed", **context: Any):
super().__init__(f"[NETWORK_FAILURE] {message}", context={"type": "network", **context})
# [/DEF:NetworkError:Class]

View File

@@ -25,7 +25,7 @@ class SupersetConfig(BaseModel):
timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.")
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
# [DEF:SupersetConfig.validate_auth:Function]
# [DEF:validate_auth:Function]
# @PURPOSE: Проверяет, что словарь `auth` содержит все необходимые для аутентификации поля.
# @PRE: `v` должен быть словарем.
# @POST: Возвращает `v`, если все обязательные поля (`provider`, `username`, `password`, `refresh`) присутствуют.
@@ -37,9 +37,9 @@ class SupersetConfig(BaseModel):
if not required.issubset(v.keys()):
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")
return v
# [/DEF:SupersetConfig.validate_auth:Function]
# [/DEF:validate_auth:Function]
# [DEF:SupersetConfig.normalize_base_url:Function]
# [DEF:normalize_base_url:Function]
# @PURPOSE: Нормализует `base_url`, добавляя `/api/v1`, если он отсутствует.
# @PRE: `v` должна быть строкой.
# @POST: Возвращает нормализованный `v`.
@@ -54,7 +54,7 @@ class SupersetConfig(BaseModel):
if '/api/v1' not in v:
v = f"{v.rstrip('/')}/api/v1"
return v
# [/DEF:SupersetConfig.normalize_base_url:Function]
# [/DEF:normalize_base_url:Function]
class Config:
arbitrary_types_allowed = True
@@ -67,7 +67,7 @@ class DatabaseConfig(BaseModel):
database_config: Dict[str, Dict[str, Any]] = Field(..., description="Словарь, содержащий 'old' и 'new' конфигурации базы данных.")
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
# [DEF:DatabaseConfig.validate_config:Function]
# [DEF:validate_config:Function]
# @PURPOSE: Проверяет, что словарь `database_config` содержит ключи 'old' и 'new'.
# @PRE: `v` должен быть словарем.
# @POST: Возвращает `v`, если ключи 'old' и 'new' присутствуют.
@@ -78,7 +78,7 @@ class DatabaseConfig(BaseModel):
if not {'old', 'new'}.issubset(v.keys()):
raise ValueError("'database_config' должен содержать ключи 'old' и 'new'.")
return v
# [/DEF:DatabaseConfig.validate_config:Function]
# [/DEF:validate_config:Function]
class Config:
arbitrary_types_allowed = True