project map script | semantic parcer

This commit is contained in:
2026-01-01 16:58:21 +03:00
parent a747a163c8
commit 4c6fc8256d
84 changed files with 10178 additions and 537 deletions

View File

@@ -49,4 +49,4 @@ async def get_current_user(token: str = Depends(oauth2_scheme)):
)
# A real implementation would return a user object.
return {"placeholder_user": "user@example.com"}
# [/DEF]
# [/DEF:AuthModule:Module]

View File

@@ -24,7 +24,7 @@ router = APIRouter()
class ScheduleSchema(BaseModel):
enabled: bool = False
cron_expression: str = Field(..., pattern=r'^(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|((((\d+,)*\d+|(\d+(\/|-)\d+)|\d+|\*) ?){5,7})$')
# [/DEF:ScheduleSchema]
# [/DEF:ScheduleSchema:DataClass]
# [DEF:EnvironmentResponse:DataClass]
class EnvironmentResponse(BaseModel):
@@ -32,14 +32,14 @@ class EnvironmentResponse(BaseModel):
name: str
url: str
backup_schedule: Optional[ScheduleSchema] = None
# [/DEF:EnvironmentResponse]
# [/DEF:EnvironmentResponse:DataClass]
# [DEF:DatabaseResponse:DataClass]
class DatabaseResponse(BaseModel):
uuid: str
database_name: str
engine: Optional[str]
# [/DEF:DatabaseResponse]
# [/DEF:DatabaseResponse:DataClass]
# [DEF:get_environments:Function]
# @PURPOSE: List all configured environments.
@@ -61,7 +61,7 @@ async def get_environments(config_manager=Depends(get_config_manager)):
) if e.backup_schedule else None
) for e in envs
]
# [/DEF:get_environments]
# [/DEF:get_environments:Function]
# [DEF:update_environment_schedule:Function]
# @PURPOSE: Update backup schedule for an environment.
@@ -89,7 +89,7 @@ async def update_environment_schedule(
scheduler_service.load_schedules()
return {"message": "Schedule updated successfully"}
# [/DEF:update_environment_schedule]
# [/DEF:update_environment_schedule:Function]
# [DEF:get_environment_databases:Function]
# @PURPOSE: Fetch the list of databases from a specific environment.
@@ -119,6 +119,6 @@ async def get_environment_databases(id: str, config_manager=Depends(get_config_m
return client.get_databases_summary()
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch databases: {str(e)}")
# [/DEF:get_environment_databases]
# [/DEF:get_environment_databases:Function]
# [/DEF:backend.src.api.routes.environments]
# [/DEF:backend.src.api.routes.environments:Module]

View File

@@ -29,7 +29,7 @@ class MappingCreate(BaseModel):
target_db_uuid: str
source_db_name: str
target_db_name: str
# [/DEF:MappingCreate]
# [/DEF:MappingCreate:DataClass]
# [DEF:MappingResponse:DataClass]
class MappingResponse(BaseModel):
@@ -43,13 +43,13 @@ class MappingResponse(BaseModel):
class Config:
from_attributes = True
# [/DEF:MappingResponse]
# [/DEF:MappingResponse:DataClass]
# [DEF:SuggestRequest:DataClass]
class SuggestRequest(BaseModel):
source_env_id: str
target_env_id: str
# [/DEF:SuggestRequest]
# [/DEF:SuggestRequest:DataClass]
# [DEF:get_mappings:Function]
# @PURPOSE: List all saved database mappings.
@@ -65,7 +65,7 @@ async def get_mappings(
if target_env_id:
query = query.filter(DatabaseMapping.target_env_id == target_env_id)
return query.all()
# [/DEF:get_mappings]
# [/DEF:get_mappings:Function]
# [DEF:create_mapping:Function]
# @PURPOSE: Create or update a database mapping.
@@ -90,7 +90,7 @@ async def create_mapping(mapping: MappingCreate, db: Session = Depends(get_db)):
db.commit()
db.refresh(new_mapping)
return new_mapping
# [/DEF:create_mapping]
# [/DEF:create_mapping:Function]
# [DEF:suggest_mappings_api:Function]
# @PURPOSE: Get suggested mappings based on fuzzy matching.
@@ -105,6 +105,6 @@ async def suggest_mappings_api(
return await service.get_suggestions(request.source_env_id, request.target_env_id)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# [/DEF:suggest_mappings_api]
# [/DEF:suggest_mappings_api:Function]
# [/DEF:backend.src.api.routes.mappings]
# [/DEF:backend.src.api.routes.mappings:Module]

View File

@@ -37,7 +37,7 @@ async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)
client = SupersetClient(config)
dashboards = client.get_dashboards_summary()
return dashboards
# [/DEF:get_dashboards]
# [/DEF:get_dashboards:Function]
# [DEF:execute_migration:Function]
# @PURPOSE: Execute the migration of selected dashboards.
@@ -71,6 +71,6 @@ async def execute_migration(selection: DashboardSelection, config_manager=Depend
except Exception as e:
logger.error(f"Task creation failed: {e}")
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
# [/DEF:execute_migration]
# [/DEF:execute_migration:Function]
# [/DEF:backend.src.api.routes.migration]
# [/DEF:backend.src.api.routes.migration:Module]

View File

@@ -19,4 +19,4 @@ async def list_plugins(
Retrieve a list of all available plugins.
"""
return plugin_loader.get_all_plugin_configs()
# [/DEF]
# [/DEF:PluginsRouter:Module]

View File

@@ -35,7 +35,7 @@ async def get_settings(config_manager: ConfigManager = Depends(get_config_manage
if env.password:
env.password = "********"
return config
# [/DEF:get_settings]
# [/DEF:get_settings:Function]
# [DEF:update_global_settings:Function]
# @PURPOSE: Updates global application settings.
@@ -49,7 +49,7 @@ async def update_global_settings(
logger.info("[update_global_settings][Entry] Updating global settings")
config_manager.update_global_settings(settings)
return settings
# [/DEF:update_global_settings]
# [/DEF:update_global_settings:Function]
# [DEF:get_environments:Function]
# @PURPOSE: Lists all configured Superset environments.
@@ -58,7 +58,7 @@ async def update_global_settings(
async def get_environments(config_manager: ConfigManager = Depends(get_config_manager)):
logger.info("[get_environments][Entry] Fetching environments")
return config_manager.get_environments()
# [/DEF:get_environments]
# [/DEF:get_environments:Function]
# [DEF:add_environment:Function]
# @PURPOSE: Adds a new Superset environment.
@@ -91,7 +91,7 @@ async def add_environment(
config_manager.add_environment(env)
return env
# [/DEF:add_environment]
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @PURPOSE: Updates an existing Superset environment.
@@ -134,7 +134,7 @@ async def update_environment(
if config_manager.update_environment(id, env):
return env
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
# [/DEF:update_environment]
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @PURPOSE: Deletes a Superset environment.
@@ -147,7 +147,7 @@ async def delete_environment(
logger.info(f"[delete_environment][Entry] Deleting environment {id}")
config_manager.delete_environment(id)
return {"message": f"Environment {id} deleted"}
# [/DEF:delete_environment]
# [/DEF:delete_environment:Function]
# [DEF:test_environment_connection:Function]
# @PURPOSE: Tests the connection to a Superset environment.
@@ -190,7 +190,7 @@ async def test_environment_connection(
except Exception as e:
logger.error(f"[test_environment_connection][Coherence:Failed] Connection failed for {id}: {e}")
return {"status": "error", "message": str(e)}
# [/DEF:test_environment_connection]
# [/DEF:test_environment_connection:Function]
# [DEF:validate_backup_path:Function]
# @PURPOSE: Validates if a backup path exists and is writable.
@@ -213,6 +213,6 @@ async def validate_backup_path(
return {"status": "error", "message": message}
return {"status": "success", "message": message}
# [/DEF:validate_backup_path]
# [/DEF:validate_backup_path:Function]
# [/DEF:SettingsRouter]
# [/DEF:SettingsRouter:Module]

View File

@@ -117,4 +117,4 @@ async def clear_tasks(
"""
task_manager.clear_tasks(status)
return
# [/DEF]
# [/DEF:TasksRouter:Module]

View File

@@ -31,6 +31,7 @@ app = FastAPI(
description="API for managing Superset automation tools and plugins.",
version="1.0.0",
)
# [/DEF:App:Global]
# Startup event
@app.on_event("startup")
@@ -124,8 +125,7 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
logger.error(f"WebSocket error for task {task_id}: {e}")
finally:
task_manager.unsubscribe_logs(task_id, queue)
# [/DEF]
# [/DEF:WebSocketEndpoint:Endpoint]
# [DEF:StaticFiles:Mount]
# @SEMANTICS: static, frontend, spa
@@ -149,4 +149,6 @@ else:
@app.get("/")
async def read_root():
return {"message": "Superset Tools API is running (Frontend build not found)"}
# [/DEF]
# [/DEF:RootEndpoint:Endpoint]
# [/DEF:StaticFiles:Mount]
# [/DEF:AppModule:Module]

View File

@@ -46,7 +46,7 @@ class ConfigManager:
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
logger.info(f"[ConfigManager][Exit] Initialized")
# [/DEF:__init__]
# [/DEF:__init__:Function]
# [DEF:_load_config:Function]
# @PURPOSE: Loads the configuration from disk or creates a default one.
@@ -78,7 +78,7 @@ class ConfigManager:
environments=[],
settings=GlobalSettings(backup_path="backups")
)
# [/DEF:_load_config]
# [/DEF:_load_config:Function]
# [DEF:_save_config_to_disk:Function]
# @PURPOSE: Saves the provided configuration object to disk.
@@ -97,20 +97,20 @@ class ConfigManager:
logger.info(f"[_save_config_to_disk][Action] Configuration saved")
except Exception as e:
logger.error(f"[_save_config_to_disk][Coherence:Failed] Failed to save: {e}")
# [/DEF:_save_config_to_disk]
# [/DEF:_save_config_to_disk:Function]
# [DEF:save:Function]
# @PURPOSE: Saves the current configuration state to disk.
def save(self):
self._save_config_to_disk(self.config)
# [/DEF:save]
# [/DEF:save:Function]
# [DEF:get_config:Function]
# @PURPOSE: Returns the current configuration.
# @RETURN: AppConfig - The current configuration.
def get_config(self) -> AppConfig:
return self.config
# [/DEF:get_config]
# [/DEF:get_config:Function]
# [DEF:update_global_settings:Function]
# @PURPOSE: Updates the global settings and persists the change.
@@ -130,7 +130,7 @@ class ConfigManager:
configure_logger(settings.logging)
logger.info(f"[update_global_settings][Exit] Settings updated")
# [/DEF:update_global_settings]
# [/DEF:update_global_settings:Function]
# [DEF:validate_path:Function]
# @PURPOSE: Validates if a path exists and is writable.
@@ -148,21 +148,21 @@ class ConfigManager:
return False, "Path is not writable"
return True, "Path is valid and writable"
# [/DEF:validate_path]
# [/DEF:validate_path:Function]
# [DEF:get_environments:Function]
# @PURPOSE: Returns the list of configured environments.
# @RETURN: List[Environment] - List of environments.
def get_environments(self) -> List[Environment]:
return self.config.environments
# [/DEF:get_environments]
# [/DEF:get_environments:Function]
# [DEF:has_environments:Function]
# @PURPOSE: Checks if at least one environment is configured.
# @RETURN: bool - True if at least one environment exists.
def has_environments(self) -> bool:
return len(self.config.environments) > 0
# [/DEF:has_environments]
# [/DEF:has_environments:Function]
# [DEF:add_environment:Function]
# @PURPOSE: Adds a new environment to the configuration.
@@ -181,7 +181,7 @@ class ConfigManager:
self.save()
logger.info(f"[add_environment][Exit] Environment added")
# [/DEF:add_environment]
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @PURPOSE: Updates an existing environment.
@@ -210,7 +210,7 @@ class ConfigManager:
logger.warning(f"[update_environment][Coherence:Failed] Environment {env_id} not found")
return False
# [/DEF:update_environment]
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @PURPOSE: Deletes an environment by ID.
@@ -231,8 +231,8 @@ class ConfigManager:
logger.info(f"[delete_environment][Action] Deleted {env_id}")
else:
logger.warning(f"[delete_environment][Coherence:Failed] Environment {env_id} not found")
# [/DEF:delete_environment]
# [/DEF:delete_environment:Function]
# [/DEF:ConfigManager]
# [/DEF:ConfigManager:Class]
# [/DEF:ConfigManagerModule]
# [/DEF:ConfigManagerModule:Module]

View File

@@ -13,7 +13,7 @@ from typing import List, Optional
class Schedule(BaseModel):
enabled: bool = False
cron_expression: str = "0 0 * * *" # Default: daily at midnight
# [/DEF:Schedule]
# [/DEF:Schedule:DataClass]
# [DEF:Environment:DataClass]
# @PURPOSE: Represents a Superset environment configuration.
@@ -25,7 +25,7 @@ class Environment(BaseModel):
password: str # Will be masked in UI
is_default: bool = False
backup_schedule: Schedule = Field(default_factory=Schedule)
# [/DEF:Environment]
# [/DEF:Environment:DataClass]
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
@@ -35,7 +35,7 @@ class LoggingConfig(BaseModel):
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
# [/DEF:LoggingConfig]
# [/DEF:LoggingConfig:DataClass]
# [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings.
@@ -48,13 +48,13 @@ class GlobalSettings(BaseModel):
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# [/DEF:GlobalSettings]
# [/DEF:GlobalSettings:DataClass]
# [DEF:AppConfig:DataClass]
# @PURPOSE: The root configuration model containing all application settings.
class AppConfig(BaseModel):
environments: List[Environment] = []
settings: GlobalSettings
# [/DEF:AppConfig]
# [/DEF:AppConfig:DataClass]
# [/DEF:ConfigModels]
# [/DEF:ConfigModels:Module]

View File

@@ -19,34 +19,36 @@ import os
# [DEF:DATABASE_URL:Constant]
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mappings.db")
# [/DEF:DATABASE_URL]
# [/DEF:DATABASE_URL:Constant]
# [DEF:TASKS_DATABASE_URL:Constant]
TASKS_DATABASE_URL = os.getenv("TASKS_DATABASE_URL", "sqlite:///./tasks.db")
# [/DEF:TASKS_DATABASE_URL]
# [/DEF:TASKS_DATABASE_URL:Constant]
# [DEF:engine:Variable]
engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False})
# [/DEF:engine]
# [/DEF:engine:Variable]
# [DEF:tasks_engine:Variable]
tasks_engine = create_engine(TASKS_DATABASE_URL, connect_args={"check_same_thread": False})
# [/DEF:tasks_engine]
# [/DEF:tasks_engine:Variable]
# [DEF:SessionLocal:Class]
# @PURPOSE: A session factory for the main mappings database.
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# [/DEF:SessionLocal]
# [/DEF:SessionLocal:Class]
# [DEF:TasksSessionLocal:Class]
# @PURPOSE: A session factory for the tasks execution database.
TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_engine)
# [/DEF:TasksSessionLocal]
# [/DEF:TasksSessionLocal:Class]
# [DEF:init_db:Function]
# @PURPOSE: Initializes the database by creating all tables.
def init_db():
Base.metadata.create_all(bind=engine)
Base.metadata.create_all(bind=tasks_engine)
# [/DEF:init_db]
# [/DEF:init_db:Function]
# [DEF:get_db:Function]
# @PURPOSE: Dependency for getting a database session.
@@ -58,7 +60,7 @@ def get_db():
yield db
finally:
db.close()
# [/DEF:get_db]
# [/DEF:get_db:Function]
# [DEF:get_tasks_db:Function]
# @PURPOSE: Dependency for getting a tasks database session.
@@ -70,6 +72,6 @@ def get_tasks_db():
yield db
finally:
db.close()
# [/DEF:get_tasks_db]
# [/DEF:get_tasks_db:Function]
# [/DEF:backend.src.core.database]
# [/DEF:backend.src.core.database:Module]

View File

@@ -28,7 +28,7 @@ class BeliefFormatter(logging.Formatter):
if anchor_id:
msg = f"[{anchor_id}][Action] {msg}"
return msg
# [/DEF:BeliefFormatter]
# [/DEF:BeliefFormatter:Class]
# Re-using LogEntry from task_manager for consistency
# [DEF:LogEntry:Class]
@@ -40,7 +40,7 @@ class LogEntry(BaseModel):
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF]
# [/DEF:LogEntry:Class]
# [DEF:BeliefScope:Function]
# @PURPOSE: Context manager for structured Belief State logging.
@@ -71,7 +71,7 @@ def belief_scope(anchor_id: str, message: str = ""):
# Restore old anchor
_belief_state.anchor_id = old_anchor
# [/DEF:BeliefScope]
# [/DEF:BeliefScope:Function]
# [DEF:ConfigureLogger:Function]
# @PURPOSE: Configures the logger with the provided logging settings.
@@ -115,7 +115,7 @@ def configure_logger(config):
handler.setFormatter(BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
))
# [/DEF:ConfigureLogger]
# [/DEF:ConfigureLogger:Function]
# [DEF:WebSocketLogHandler:Class]
# @SEMANTICS: logging, handler, websocket, buffer
@@ -158,7 +158,7 @@ class WebSocketLogHandler(logging.Handler):
"""
return list(self.log_buffer)
# [/DEF]
# [/DEF:WebSocketLogHandler:Class]
# [DEF:Logger:Global]
# @SEMANTICS: logger, global, instance
@@ -184,4 +184,5 @@ logger.addHandler(websocket_log_handler)
# Example usage:
# logger.info("Application started", extra={"context_key": "context_value"})
# logger.error("An error occurred", exc_info=True)
# [/DEF]
# [/DEF:Logger:Global]
# [/DEF:LoggerModule:Module]

View File

@@ -73,6 +73,7 @@ class MigrationEngine:
except Exception as e:
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
return False
# [/DEF:MigrationEngine.transform_zip:Function]
# [DEF:MigrationEngine._transform_yaml:Function]
# @PURPOSE: Replaces database_uuid in a single YAML file.
@@ -90,8 +91,8 @@ class MigrationEngine:
data['database_uuid'] = db_mapping[source_uuid]
with open(file_path, 'w') as f:
yaml.dump(data, f)
# [/DEF:MigrationEngine._transform_yaml]
# [/DEF:MigrationEngine._transform_yaml:Function]
# [/DEF:MigrationEngine]
# [/DEF:MigrationEngine:Class]
# [/DEF:backend.src.core.migration_engine]
# [/DEF:backend.src.core.migration_engine:Module]

View File

@@ -54,7 +54,7 @@ class PluginBase(ABC):
The `params` argument will be validated against the schema returned by `get_schema()`.
"""
pass
# [/DEF]
# [/DEF:PluginBase:Class]
# [DEF:PluginConfig:Class]
# @SEMANTICS: plugin, config, schema, pydantic
@@ -68,4 +68,4 @@ class PluginConfig(BaseModel):
description: str = Field(..., description="Brief description of what the plugin does")
version: str = Field(..., description="Version of the plugin")
input_schema: Dict[str, Any] = Field(..., description="JSON schema for input parameters", alias="schema")
# [/DEF]
# [/DEF:PluginConfig:Class]

View File

@@ -16,12 +16,18 @@ class PluginLoader:
that inherit from PluginBase.
"""
# [DEF:PluginLoader.__init__:Function]
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
def __init__(self, plugin_dir: str):
self.plugin_dir = plugin_dir
self._plugins: Dict[str, PluginBase] = {}
self._plugin_configs: Dict[str, PluginConfig] = {}
self._load_plugins()
# [/DEF:PluginLoader.__init__:Function]
# [DEF:PluginLoader._load_plugins:Function]
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
def _load_plugins(self):
"""
Scans the plugin directory, imports modules, and registers valid plugins.
@@ -41,7 +47,12 @@ class PluginLoader:
module_name = filename[:-3]
file_path = os.path.join(self.plugin_dir, filename)
self._load_module(module_name, file_path)
# [/DEF:PluginLoader._load_plugins:Function]
# [DEF:PluginLoader._load_module:Function]
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
# @PARAM: module_name (str) - The name of the module.
# @PARAM: file_path (str) - The path to the module file.
def _load_module(self, module_name: str, file_path: str):
"""
Loads a single Python module and extracts PluginBase subclasses.
@@ -83,7 +94,11 @@ class PluginLoader:
self._register_plugin(plugin_instance)
except Exception as e:
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
# [/DEF:PluginLoader._load_module:Function]
# [DEF:PluginLoader._register_plugin:Function]
# @PURPOSE: Registers a PluginBase instance and its configuration.
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
def _register_plugin(self, plugin_instance: PluginBase):
"""
Registers a valid plugin instance.
@@ -116,22 +131,39 @@ class PluginLoader:
except Exception as e:
from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
# [/DEF:PluginLoader._register_plugin:Function]
# [DEF:PluginLoader.get_plugin:Function]
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
"""
Returns a loaded plugin instance by its ID.
"""
return self._plugins.get(plugin_id)
# [/DEF:PluginLoader.get_plugin:Function]
# [DEF:PluginLoader.get_all_plugin_configs:Function]
# @PURPOSE: Returns a list of all registered plugin configurations.
# @RETURN: List[PluginConfig] - A list of plugin configurations.
def get_all_plugin_configs(self) -> List[PluginConfig]:
"""
Returns a list of all loaded plugin configurations.
"""
return list(self._plugin_configs.values())
# [/DEF:PluginLoader.get_all_plugin_configs:Function]
# [DEF:PluginLoader.has_plugin:Function]
# @PURPOSE: Checks if a plugin with the given ID is registered.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: bool - True if the plugin is registered, False otherwise.
def has_plugin(self, plugin_id: str) -> bool:
"""
Checks if a plugin with the given ID is loaded.
"""
return plugin_id in self._plugins
return plugin_id in self._plugins
# [/DEF:PluginLoader.has_plugin:Function]
# [/DEF:PluginLoader:Class]

View File

@@ -32,6 +32,7 @@ class SchedulerService:
self.scheduler.start()
logger.info("Scheduler started.")
self.load_schedules()
# [/DEF:SchedulerService.start:Function]
# [DEF:SchedulerService.stop:Function]
# @PURPOSE: Stops the background scheduler.
@@ -40,6 +41,7 @@ class SchedulerService:
if self.scheduler.running:
self.scheduler.shutdown()
logger.info("Scheduler stopped.")
# [/DEF:SchedulerService.stop:Function]
# [DEF:SchedulerService.load_schedules:Function]
# @PURPOSE: Loads backup schedules from configuration and registers them.
@@ -52,6 +54,7 @@ class SchedulerService:
for env in config.environments:
if env.backup_schedule and env.backup_schedule.enabled:
self.add_backup_job(env.id, env.backup_schedule.cron_expression)
# [/DEF:SchedulerService.load_schedules:Function]
# [DEF:SchedulerService.add_backup_job:Function]
# @PURPOSE: Adds a scheduled backup job for an environment.
@@ -71,6 +74,7 @@ class SchedulerService:
logger.info(f"Scheduled backup job added for environment {env_id}: {cron_expression}")
except Exception as e:
logger.error(f"Failed to add backup job for environment {env_id}: {e}")
# [/DEF:SchedulerService.add_backup_job:Function]
# [DEF:SchedulerService._trigger_backup:Function]
# @PURPOSE: Triggered by the scheduler to start a backup task.
@@ -94,6 +98,7 @@ class SchedulerService:
self.task_manager.create_task("superset-backup", {"environment_id": env_id}),
self.loop
)
# [/DEF:SchedulerService._trigger_backup:Function]
# [/DEF:SchedulerService:Class]
# [/DEF:SchedulerModule:Module]

View File

@@ -35,7 +35,7 @@ class SupersetClient(BaseSupersetClient):
db['engine'] = db.pop('backend', None)
return databases
# [/DEF:SupersetClient.get_databases_summary]
# [/DEF:SupersetClient.get_databases_summary:Function]
# [DEF:SupersetClient.get_database_by_uuid:Function]
# @PURPOSE: Find a database by its UUID.
@@ -50,7 +50,7 @@ class SupersetClient(BaseSupersetClient):
}
_, databases = self.get_databases(query=query)
return databases[0] if databases else None
# [/DEF:SupersetClient.get_database_by_uuid]
# [/DEF:SupersetClient.get_database_by_uuid:Function]
# [DEF:SupersetClient.get_dashboards_summary:Function]
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
@@ -76,8 +76,8 @@ class SupersetClient(BaseSupersetClient):
"status": "published" if dash.get("published") else "draft"
})
return result
# [/DEF:SupersetClient.get_dashboards_summary]
# [/DEF:SupersetClient.get_dashboards_summary:Function]
# [/DEF:SupersetClient]
# [/DEF:SupersetClient:Class]
# [/DEF:backend.src.core.superset_client]
# [/DEF:backend.src.core.superset_client:Module]

View File

@@ -34,5 +34,7 @@ class TaskCleanupService:
to_delete = [t.id for t in tasks[settings.task_retention_limit:]]
self.persistence_service.delete_tasks(to_delete)
logger.info(f"Deleted {len(to_delete)} tasks exceeding limit of {settings.task_retention_limit}")
# [/DEF:TaskCleanupService.run_cleanup:Function]
# [/DEF:TaskCleanupService]
# [/DEF:TaskCleanupService:Class]
# [/DEF:TaskCleanupModule:Module]

View File

@@ -48,6 +48,6 @@ def suggest_mappings(source_databases: List[Dict], target_databases: List[Dict],
})
return suggestions
# [/DEF:suggest_mappings]
# [/DEF:suggest_mappings:Function]
# [/DEF:backend.src.core.utils.matching]
# [/DEF:backend.src.core.utils.matching:Module]

View File

@@ -47,4 +47,4 @@ def get_task_manager() -> TaskManager:
def get_scheduler_service() -> SchedulerService:
"""Dependency injector for the SchedulerService."""
return scheduler_service
# [/DEF]
# [/DEF:Dependencies:Module]

View File

@@ -14,7 +14,7 @@ class DashboardMetadata(BaseModel):
title: str
last_modified: str
status: str
# [/DEF:DashboardMetadata]
# [/DEF:DashboardMetadata:Class]
# [DEF:DashboardSelection:Class]
# @PURPOSE: Represents the user's selection of dashboards to migrate.
@@ -23,6 +23,6 @@ class DashboardSelection(BaseModel):
source_env_id: str
target_env_id: str
replace_db_config: bool = False
# [/DEF:DashboardSelection]
# [/DEF:DashboardSelection:Class]
# [/DEF:backend.src.models.dashboard]
# [/DEF:backend.src.models.dashboard:Module]

View File

@@ -26,7 +26,7 @@ class MigrationStatus(enum.Enum):
COMPLETED = "COMPLETED"
FAILED = "FAILED"
AWAITING_MAPPING = "AWAITING_MAPPING"
# [/DEF:MigrationStatus]
# [/DEF:MigrationStatus:Class]
# [DEF:Environment:Class]
# @PURPOSE: Represents a Superset instance environment.
@@ -37,7 +37,7 @@ class Environment(Base):
name = Column(String, nullable=False)
url = Column(String, nullable=False)
credentials_id = Column(String, nullable=False)
# [/DEF:Environment]
# [/DEF:Environment:Class]
# [DEF:DatabaseMapping:Class]
# @PURPOSE: Represents a mapping between source and target databases.
@@ -52,7 +52,7 @@ class DatabaseMapping(Base):
source_db_name = Column(String, nullable=False)
target_db_name = Column(String, nullable=False)
engine = Column(String, nullable=True)
# [/DEF:DatabaseMapping]
# [/DEF:DatabaseMapping:Class]
# [DEF:MigrationJob:Class]
# @PURPOSE: Represents a single migration execution job.
@@ -65,6 +65,6 @@ class MigrationJob(Base):
status = Column(SQLEnum(MigrationStatus), default=MigrationStatus.PENDING)
replace_db = Column(Boolean, default=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
# [/DEF:MigrationJob]
# [/DEF:MigrationJob:Class]
# [/DEF:backend.src.models.mapping]
# [/DEF:backend.src.models.mapping:Module]

View File

@@ -29,6 +29,6 @@ class TaskRecord(Base):
error = Column(String, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
params = Column(JSON, nullable=True)
# [/DEF:TaskRecord]
# [/DEF:TaskRecord:Class]
# [/DEF:backend.src.models.task]
# [/DEF:backend.src.models.task:Module]

View File

@@ -25,6 +25,8 @@ from superset_tool.utils.fileio import (
from superset_tool.utils.init_clients import setup_clients
from ..dependencies import get_config_manager
# [DEF:BackupPlugin:Class]
# @PURPOSE: Implementation of the backup plugin logic.
class BackupPlugin(PluginBase):
"""
A plugin to back up Superset dashboards.
@@ -143,4 +145,5 @@ class BackupPlugin(PluginBase):
except (RequestException, IOError, KeyError) as e:
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
raise e
# [/DEF:BackupPlugin]
# [/DEF:BackupPlugin:Class]
# [/DEF:BackupPlugin:Module]

View File

@@ -21,6 +21,8 @@ from ..core.migration_engine import MigrationEngine
from ..core.database import SessionLocal
from ..models.mapping import DatabaseMapping, Environment
# [DEF:MigrationPlugin:Class]
# @PURPOSE: Implementation of the migration plugin logic.
class MigrationPlugin(PluginBase):
"""
A plugin to migrate Superset dashboards between environments.
@@ -287,10 +289,12 @@ class MigrationPlugin(PluginBase):
continue
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
# [/DEF:MigrationPlugin.execute:Action]
logger.info("[MigrationPlugin][Exit] Migration finished.")
except Exception as e:
logger.critical(f"[MigrationPlugin][Failure] Fatal error during migration: {e}", exc_info=True)
raise e
# [/DEF:MigrationPlugin]
# [/DEF:MigrationPlugin:Class]
# [/DEF:MigrationPlugin:Module]

View File

@@ -20,8 +20,10 @@ from superset_tool.models import SupersetConfig
class MappingService:
# [DEF:MappingService.__init__:Function]
# @PURPOSE: Initializes the mapping service with a config manager.
def __init__(self, config_manager):
self.config_manager = config_manager
# [/DEF:MappingService.__init__:Function]
# [DEF:MappingService._get_client:Function]
# @PURPOSE: Helper to get an initialized SupersetClient for an environment.
@@ -42,6 +44,7 @@ class MappingService:
}
)
return SupersetClient(superset_config)
# [/DEF:MappingService._get_client:Function]
# [DEF:MappingService.get_suggestions:Function]
# @PURPOSE: Fetches databases from both environments and returns fuzzy matching suggestions.
@@ -59,8 +62,8 @@ class MappingService:
target_dbs = target_client.get_databases_summary()
return suggest_mappings(source_dbs, target_dbs)
# [/DEF:MappingService.get_suggestions]
# [/DEF:MappingService.get_suggestions:Function]
# [/DEF:MappingService]
# [/DEF:MappingService:Class]
# [/DEF:backend.src.services.mapping_service]
# [/DEF:backend.src.services.mapping_service:Module]