semantic markup update
This commit is contained in:
@@ -31,6 +31,12 @@ oauth2_scheme = OAuth2AuthorizationCodeBearer(
|
||||
tokenUrl="https://your-adfs-server/adfs/oauth2/token",
|
||||
)
|
||||
|
||||
# [DEF:get_current_user:Function]
|
||||
# @PURPOSE: Dependency to get the current user from the ADFS token.
|
||||
# @PARAM: token (str) - The OAuth2 bearer token.
|
||||
# @PRE: token should be provided via Authorization header.
|
||||
# @POST: Returns user details if authenticated, else raises 401.
|
||||
# @RETURN: Dict[str, str] - User information.
|
||||
async def get_current_user(token: str = Depends(oauth2_scheme)):
|
||||
"""
|
||||
Dependency to get the current user from the ADFS token.
|
||||
@@ -49,4 +55,5 @@ async def get_current_user(token: str = Depends(oauth2_scheme)):
|
||||
)
|
||||
# A real implementation would return a user object.
|
||||
return {"placeholder_user": "user@example.com"}
|
||||
# [/DEF:get_current_user:Function]
|
||||
# [/DEF:AuthModule:Module]
|
||||
@@ -43,10 +43,13 @@ class DatabaseResponse(BaseModel):
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: List all configured environments.
|
||||
# @PRE: config_manager is injected via Depends.
|
||||
# @POST: Returns a list of EnvironmentResponse objects.
|
||||
# @RETURN: List[EnvironmentResponse]
|
||||
@router.get("", response_model=List[EnvironmentResponse])
|
||||
async def get_environments(config_manager=Depends(get_config_manager)):
|
||||
envs = config_manager.get_environments()
|
||||
with belief_scope("get_environments"):
|
||||
envs = config_manager.get_environments()
|
||||
# Ensure envs is a list
|
||||
if not isinstance(envs, list):
|
||||
envs = []
|
||||
@@ -65,6 +68,8 @@ async def get_environments(config_manager=Depends(get_config_manager)):
|
||||
|
||||
# [DEF:update_environment_schedule:Function]
|
||||
# @PURPOSE: Update backup schedule for an environment.
|
||||
# @PRE: Environment id exists, schedule is valid ScheduleSchema.
|
||||
# @POST: Backup schedule updated and scheduler reloaded.
|
||||
# @PARAM: id (str) - The environment ID.
|
||||
# @PARAM: schedule (ScheduleSchema) - The new schedule.
|
||||
@router.put("/{id}/schedule")
|
||||
@@ -74,7 +79,8 @@ async def update_environment_schedule(
|
||||
config_manager=Depends(get_config_manager),
|
||||
scheduler_service=Depends(get_scheduler_service)
|
||||
):
|
||||
envs = config_manager.get_environments()
|
||||
with belief_scope("update_environment_schedule", f"id={id}"):
|
||||
envs = config_manager.get_environments()
|
||||
env = next((e for e in envs if e.id == id), None)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
@@ -93,11 +99,14 @@ async def update_environment_schedule(
|
||||
|
||||
# [DEF:get_environment_databases:Function]
|
||||
# @PURPOSE: Fetch the list of databases from a specific environment.
|
||||
# @PRE: Environment id exists.
|
||||
# @POST: Returns a list of database summaries from the environment.
|
||||
# @PARAM: id (str) - The environment ID.
|
||||
# @RETURN: List[Dict] - List of databases.
|
||||
@router.get("/{id}/databases")
|
||||
async def get_environment_databases(id: str, config_manager=Depends(get_config_manager)):
|
||||
envs = config_manager.get_environments()
|
||||
with belief_scope("get_environment_databases", f"id={id}"):
|
||||
envs = config_manager.get_environments()
|
||||
env = next((e for e in envs if e.id == id), None)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
@@ -53,13 +53,16 @@ class SuggestRequest(BaseModel):
|
||||
|
||||
# [DEF:get_mappings:Function]
|
||||
# @PURPOSE: List all saved database mappings.
|
||||
# @PRE: db session is injected.
|
||||
# @POST: Returns filtered list of DatabaseMapping records.
|
||||
@router.get("", response_model=List[MappingResponse])
|
||||
async def get_mappings(
|
||||
source_env_id: Optional[str] = None,
|
||||
target_env_id: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
query = db.query(DatabaseMapping)
|
||||
with belief_scope("get_mappings"):
|
||||
query = db.query(DatabaseMapping)
|
||||
if source_env_id:
|
||||
query = query.filter(DatabaseMapping.source_env_id == source_env_id)
|
||||
if target_env_id:
|
||||
@@ -69,42 +72,48 @@ async def get_mappings(
|
||||
|
||||
# [DEF:create_mapping:Function]
|
||||
# @PURPOSE: Create or update a database mapping.
|
||||
# @PRE: mapping is valid MappingCreate, db session is injected.
|
||||
# @POST: DatabaseMapping created or updated in database.
|
||||
@router.post("", response_model=MappingResponse)
|
||||
async def create_mapping(mapping: MappingCreate, db: Session = Depends(get_db)):
|
||||
# Check if mapping already exists
|
||||
existing = db.query(DatabaseMapping).filter(
|
||||
DatabaseMapping.source_env_id == mapping.source_env_id,
|
||||
DatabaseMapping.target_env_id == mapping.target_env_id,
|
||||
DatabaseMapping.source_db_uuid == mapping.source_db_uuid
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
existing.target_db_uuid = mapping.target_db_uuid
|
||||
existing.target_db_name = mapping.target_db_name
|
||||
with belief_scope("create_mapping"):
|
||||
# Check if mapping already exists
|
||||
existing = db.query(DatabaseMapping).filter(
|
||||
DatabaseMapping.source_env_id == mapping.source_env_id,
|
||||
DatabaseMapping.target_env_id == mapping.target_env_id,
|
||||
DatabaseMapping.source_db_uuid == mapping.source_db_uuid
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
existing.target_db_uuid = mapping.target_db_uuid
|
||||
existing.target_db_name = mapping.target_db_name
|
||||
db.commit()
|
||||
db.refresh(existing)
|
||||
return existing
|
||||
|
||||
new_mapping = DatabaseMapping(**mapping.dict())
|
||||
db.add(new_mapping)
|
||||
db.commit()
|
||||
db.refresh(existing)
|
||||
return existing
|
||||
|
||||
new_mapping = DatabaseMapping(**mapping.dict())
|
||||
db.add(new_mapping)
|
||||
db.commit()
|
||||
db.refresh(new_mapping)
|
||||
return new_mapping
|
||||
db.refresh(new_mapping)
|
||||
return new_mapping
|
||||
# [/DEF:create_mapping:Function]
|
||||
|
||||
# [DEF:suggest_mappings_api:Function]
|
||||
# @PURPOSE: Get suggested mappings based on fuzzy matching.
|
||||
# @PRE: request is valid SuggestRequest, config_manager is injected.
|
||||
# @POST: Returns mapping suggestions.
|
||||
@router.post("/suggest")
|
||||
async def suggest_mappings_api(
|
||||
request: SuggestRequest,
|
||||
config_manager=Depends(get_config_manager)
|
||||
):
|
||||
from backend.src.services.mapping_service import MappingService
|
||||
service = MappingService(config_manager)
|
||||
try:
|
||||
return await service.get_suggestions(request.source_env_id, request.target_env_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
with belief_scope("suggest_mappings_api"):
|
||||
from backend.src.services.mapping_service import MappingService
|
||||
service = MappingService(config_manager)
|
||||
try:
|
||||
return await service.get_suggestions(request.source_env_id, request.target_env_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
# [/DEF:suggest_mappings_api:Function]
|
||||
|
||||
# [/DEF:backend.src.api.routes.mappings:Module]
|
||||
|
||||
@@ -11,12 +11,19 @@ from ...dependencies import get_plugin_loader
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# [DEF:list_plugins:Function]
|
||||
# @PURPOSE: Retrieve a list of all available plugins.
|
||||
# @PRE: plugin_loader is injected via Depends.
|
||||
# @POST: Returns a list of PluginConfig objects.
|
||||
# @RETURN: List[PluginConfig] - List of registered plugins.
|
||||
@router.get("", response_model=List[PluginConfig])
|
||||
async def list_plugins(
|
||||
plugin_loader = Depends(get_plugin_loader)
|
||||
):
|
||||
"""
|
||||
Retrieve a list of all available plugins.
|
||||
"""
|
||||
return plugin_loader.get_all_plugin_configs()
|
||||
with belief_scope("list_plugins"):
|
||||
"""
|
||||
Retrieve a list of all available plugins.
|
||||
"""
|
||||
return plugin_loader.get_all_plugin_configs()
|
||||
# [/DEF:list_plugins:Function]
|
||||
# [/DEF:PluginsRouter:Module]
|
||||
@@ -15,7 +15,7 @@ from typing import List
|
||||
from ...core.config_models import AppConfig, Environment, GlobalSettings
|
||||
from ...dependencies import get_config_manager
|
||||
from ...core.config_manager import ConfigManager
|
||||
from ...core.logger import logger
|
||||
from ...core.logger import logger, belief_scope
|
||||
from ...core.superset_client import SupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
import os
|
||||
@@ -25,10 +25,13 @@ router = APIRouter()
|
||||
|
||||
# [DEF:get_settings:Function]
|
||||
# @PURPOSE: Retrieves all application settings.
|
||||
# @PRE: Config manager is available.
|
||||
# @POST: Returns masked AppConfig.
|
||||
# @RETURN: AppConfig - The current configuration.
|
||||
@router.get("/", response_model=AppConfig)
|
||||
async def get_settings(config_manager: ConfigManager = Depends(get_config_manager)):
|
||||
logger.info("[get_settings][Entry] Fetching all settings")
|
||||
with belief_scope("get_settings"):
|
||||
logger.info("[get_settings][Entry] Fetching all settings")
|
||||
config = config_manager.get_config().copy(deep=True)
|
||||
# Mask passwords
|
||||
for env in config.environments:
|
||||
@@ -39,29 +42,37 @@ async def get_settings(config_manager: ConfigManager = Depends(get_config_manage
|
||||
|
||||
# [DEF:update_global_settings:Function]
|
||||
# @PURPOSE: Updates global application settings.
|
||||
# @PRE: New settings are provided.
|
||||
# @POST: Global settings are updated.
|
||||
# @PARAM: settings (GlobalSettings) - The new global settings.
|
||||
# @RETURN: GlobalSettings - The updated settings.
|
||||
@router.patch("/global", response_model=GlobalSettings)
|
||||
async def update_global_settings(
|
||||
settings: GlobalSettings,
|
||||
settings: GlobalSettings,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
logger.info("[update_global_settings][Entry] Updating global settings")
|
||||
with belief_scope("update_global_settings"):
|
||||
logger.info("[update_global_settings][Entry] Updating global settings")
|
||||
config_manager.update_global_settings(settings)
|
||||
return settings
|
||||
# [/DEF:update_global_settings:Function]
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: Lists all configured Superset environments.
|
||||
# @PRE: Config manager is available.
|
||||
# @POST: Returns list of environments.
|
||||
# @RETURN: List[Environment] - List of environments.
|
||||
@router.get("/environments", response_model=List[Environment])
|
||||
async def get_environments(config_manager: ConfigManager = Depends(get_config_manager)):
|
||||
logger.info("[get_environments][Entry] Fetching environments")
|
||||
with belief_scope("get_environments"):
|
||||
logger.info("[get_environments][Entry] Fetching environments")
|
||||
return config_manager.get_environments()
|
||||
# [/DEF:get_environments:Function]
|
||||
|
||||
# [DEF:add_environment:Function]
|
||||
# @PURPOSE: Adds a new Superset environment.
|
||||
# @PRE: Environment data is valid and reachable.
|
||||
# @POST: Environment is added to config.
|
||||
# @PARAM: env (Environment) - The environment to add.
|
||||
# @RETURN: Environment - The added environment.
|
||||
@router.post("/environments", response_model=Environment)
|
||||
@@ -69,7 +80,8 @@ async def add_environment(
|
||||
env: Environment,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
with belief_scope("add_environment"):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
|
||||
# Validate connection before adding
|
||||
try:
|
||||
@@ -95,16 +107,19 @@ async def add_environment(
|
||||
|
||||
# [DEF:update_environment:Function]
|
||||
# @PURPOSE: Updates an existing Superset environment.
|
||||
# @PRE: ID and valid environment data are provided.
|
||||
# @POST: Environment is updated in config.
|
||||
# @PARAM: id (str) - The ID of the environment to update.
|
||||
# @PARAM: env (Environment) - The updated environment data.
|
||||
# @RETURN: Environment - The updated environment.
|
||||
@router.put("/environments/{id}", response_model=Environment)
|
||||
async def update_environment(
|
||||
id: str,
|
||||
env: Environment,
|
||||
id: str,
|
||||
env: Environment,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
logger.info(f"[update_environment][Entry] Updating environment {id}")
|
||||
with belief_scope("update_environment"):
|
||||
logger.info(f"[update_environment][Entry] Updating environment {id}")
|
||||
|
||||
# If password is masked, we need the real one for validation
|
||||
env_to_validate = env.copy(deep=True)
|
||||
@@ -138,19 +153,24 @@ async def update_environment(
|
||||
|
||||
# [DEF:delete_environment:Function]
|
||||
# @PURPOSE: Deletes a Superset environment.
|
||||
# @PRE: ID is provided.
|
||||
# @POST: Environment is removed from config.
|
||||
# @PARAM: id (str) - The ID of the environment to delete.
|
||||
@router.delete("/environments/{id}")
|
||||
async def delete_environment(
|
||||
id: str,
|
||||
id: str,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
logger.info(f"[delete_environment][Entry] Deleting environment {id}")
|
||||
with belief_scope("delete_environment"):
|
||||
logger.info(f"[delete_environment][Entry] Deleting environment {id}")
|
||||
config_manager.delete_environment(id)
|
||||
return {"message": f"Environment {id} deleted"}
|
||||
# [/DEF:delete_environment:Function]
|
||||
|
||||
# [DEF:test_environment_connection:Function]
|
||||
# @PURPOSE: Tests the connection to a Superset environment.
|
||||
# @PRE: ID is provided.
|
||||
# @POST: Returns success or error status.
|
||||
# @PARAM: id (str) - The ID of the environment to test.
|
||||
# @RETURN: dict - Success message or error.
|
||||
@router.post("/environments/{id}/test")
|
||||
@@ -158,7 +178,8 @@ async def test_environment_connection(
|
||||
id: str,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
logger.info(f"[test_environment_connection][Entry] Testing environment {id}")
|
||||
with belief_scope("test_environment_connection"):
|
||||
logger.info(f"[test_environment_connection][Entry] Testing environment {id}")
|
||||
|
||||
# Find environment
|
||||
env = next((e for e in config_manager.get_environments() if e.id == id), None)
|
||||
@@ -194,6 +215,8 @@ async def test_environment_connection(
|
||||
|
||||
# [DEF:validate_backup_path:Function]
|
||||
# @PURPOSE: Validates if a backup path exists and is writable.
|
||||
# @PRE: Path is provided in path_data.
|
||||
# @POST: Returns success or error status.
|
||||
# @PARAM: path (str) - The path to validate.
|
||||
# @RETURN: dict - Validation result.
|
||||
@router.post("/validate-path")
|
||||
@@ -201,11 +224,12 @@ async def validate_backup_path(
|
||||
path_data: dict,
|
||||
config_manager: ConfigManager = Depends(get_config_manager)
|
||||
):
|
||||
path = path_data.get("path")
|
||||
if not path:
|
||||
raise HTTPException(status_code=400, detail="Path is required")
|
||||
|
||||
logger.info(f"[validate_backup_path][Entry] Validating path: {path}")
|
||||
with belief_scope("validate_backup_path"):
|
||||
path = path_data.get("path")
|
||||
if not path:
|
||||
raise HTTPException(status_code=400, detail="Path is required")
|
||||
|
||||
logger.info(f"[validate_backup_path][Entry] Validating path: {path}")
|
||||
|
||||
valid, message = config_manager.validate_path(path)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from ...core.logger import belief_scope
|
||||
|
||||
from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
|
||||
from ...dependencies import get_task_manager
|
||||
@@ -23,6 +24,13 @@ class ResumeTaskRequest(BaseModel):
|
||||
passwords: Dict[str, str]
|
||||
|
||||
@router.post("", response_model=Task, status_code=status.HTTP_201_CREATED)
|
||||
# [DEF:create_task:Function]
|
||||
# @PURPOSE: Create and start a new task for a given plugin.
|
||||
# @PARAM: request (CreateTaskRequest) - The request body containing plugin_id and params.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: plugin_id must exist and params must be valid for that plugin.
|
||||
# @POST: A new task is created and started.
|
||||
# @RETURN: Task - The created task instance.
|
||||
async def create_task(
|
||||
request: CreateTaskRequest,
|
||||
task_manager: TaskManager = Depends(get_task_manager)
|
||||
@@ -30,16 +38,27 @@ async def create_task(
|
||||
"""
|
||||
Create and start a new task for a given plugin.
|
||||
"""
|
||||
try:
|
||||
task = await task_manager.create_task(
|
||||
plugin_id=request.plugin_id,
|
||||
params=request.params
|
||||
)
|
||||
return task
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
|
||||
with belief_scope("create_task"):
|
||||
try:
|
||||
task = await task_manager.create_task(
|
||||
plugin_id=request.plugin_id,
|
||||
params=request.params
|
||||
)
|
||||
return task
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
|
||||
# [/DEF:create_task:Function]
|
||||
|
||||
@router.get("", response_model=List[Task])
|
||||
# [DEF:list_tasks:Function]
|
||||
# @PURPOSE: Retrieve a list of tasks with pagination and optional status filter.
|
||||
# @PARAM: limit (int) - Maximum number of tasks to return.
|
||||
# @PARAM: offset (int) - Number of tasks to skip.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task_manager must be available.
|
||||
# @POST: Returns a list of tasks.
|
||||
# @RETURN: List[Task] - List of tasks.
|
||||
async def list_tasks(
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
@@ -49,9 +68,18 @@ async def list_tasks(
|
||||
"""
|
||||
Retrieve a list of tasks with pagination and optional status filter.
|
||||
"""
|
||||
return task_manager.get_tasks(limit=limit, offset=offset, status=status)
|
||||
with belief_scope("list_tasks"):
|
||||
return task_manager.get_tasks(limit=limit, offset=offset, status=status)
|
||||
# [/DEF:list_tasks:Function]
|
||||
|
||||
@router.get("/{task_id}", response_model=Task)
|
||||
# [DEF:get_task:Function]
|
||||
# @PURPOSE: Retrieve the details of a specific task.
|
||||
# @PARAM: task_id (str) - The unique identifier of the task.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task_id must exist.
|
||||
# @POST: Returns task details or raises 404.
|
||||
# @RETURN: Task - The task details.
|
||||
async def get_task(
|
||||
task_id: str,
|
||||
task_manager: TaskManager = Depends(get_task_manager)
|
||||
@@ -59,12 +87,21 @@ async def get_task(
|
||||
"""
|
||||
Retrieve the details of a specific task.
|
||||
"""
|
||||
task = task_manager.get_task(task_id)
|
||||
if not task:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||
return task
|
||||
with belief_scope("get_task"):
|
||||
task = task_manager.get_task(task_id)
|
||||
if not task:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||
return task
|
||||
# [/DEF:get_task:Function]
|
||||
|
||||
@router.get("/{task_id}/logs", response_model=List[LogEntry])
|
||||
# [DEF:get_task_logs:Function]
|
||||
# @PURPOSE: Retrieve logs for a specific task.
|
||||
# @PARAM: task_id (str) - The unique identifier of the task.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task_id must exist.
|
||||
# @POST: Returns a list of log entries or raises 404.
|
||||
# @RETURN: List[LogEntry] - List of log entries.
|
||||
async def get_task_logs(
|
||||
task_id: str,
|
||||
task_manager: TaskManager = Depends(get_task_manager)
|
||||
@@ -72,12 +109,22 @@ async def get_task_logs(
|
||||
"""
|
||||
Retrieve logs for a specific task.
|
||||
"""
|
||||
task = task_manager.get_task(task_id)
|
||||
if not task:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||
return task_manager.get_task_logs(task_id)
|
||||
with belief_scope("get_task_logs"):
|
||||
task = task_manager.get_task(task_id)
|
||||
if not task:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
|
||||
return task_manager.get_task_logs(task_id)
|
||||
# [/DEF:get_task_logs:Function]
|
||||
|
||||
@router.post("/{task_id}/resolve", response_model=Task)
|
||||
# [DEF:resolve_task:Function]
|
||||
# @PURPOSE: Resolve a task that is awaiting mapping.
|
||||
# @PARAM: task_id (str) - The unique identifier of the task.
|
||||
# @PARAM: request (ResolveTaskRequest) - The resolution parameters.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task must be in AWAITING_MAPPING status.
|
||||
# @POST: Task is resolved and resumes execution.
|
||||
# @RETURN: Task - The updated task object.
|
||||
async def resolve_task(
|
||||
task_id: str,
|
||||
request: ResolveTaskRequest,
|
||||
@@ -86,13 +133,23 @@ async def resolve_task(
|
||||
"""
|
||||
Resolve a task that is awaiting mapping.
|
||||
"""
|
||||
try:
|
||||
await task_manager.resolve_task(task_id, request.resolution_params)
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
with belief_scope("resolve_task"):
|
||||
try:
|
||||
await task_manager.resolve_task(task_id, request.resolution_params)
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
# [/DEF:resolve_task:Function]
|
||||
|
||||
@router.post("/{task_id}/resume", response_model=Task)
|
||||
# [DEF:resume_task:Function]
|
||||
# @PURPOSE: Resume a task that is awaiting input (e.g., passwords).
|
||||
# @PARAM: task_id (str) - The unique identifier of the task.
|
||||
# @PARAM: request (ResumeTaskRequest) - The input (passwords).
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task must be in AWAITING_INPUT status.
|
||||
# @POST: Task resumes execution with provided input.
|
||||
# @RETURN: Task - The updated task object.
|
||||
async def resume_task(
|
||||
task_id: str,
|
||||
request: ResumeTaskRequest,
|
||||
@@ -101,13 +158,21 @@ async def resume_task(
|
||||
"""
|
||||
Resume a task that is awaiting input (e.g., passwords).
|
||||
"""
|
||||
try:
|
||||
task_manager.resume_task_with_password(task_id, request.passwords)
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
with belief_scope("resume_task"):
|
||||
try:
|
||||
task_manager.resume_task_with_password(task_id, request.passwords)
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
# [/DEF:resume_task:Function]
|
||||
|
||||
@router.delete("", status_code=status.HTTP_204_NO_CONTENT)
|
||||
# [DEF:clear_tasks:Function]
|
||||
# @PURPOSE: Clear tasks matching the status filter.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @PARAM: task_manager (TaskManager) - The task manager instance.
|
||||
# @PRE: task_manager is available.
|
||||
# @POST: Tasks are removed from memory/persistence.
|
||||
async def clear_tasks(
|
||||
status: Optional[TaskStatus] = None,
|
||||
task_manager: TaskManager = Depends(get_task_manager)
|
||||
@@ -115,6 +180,8 @@ async def clear_tasks(
|
||||
"""
|
||||
Clear tasks matching the status filter. If no filter, clears all non-running tasks.
|
||||
"""
|
||||
task_manager.clear_tasks(status)
|
||||
return
|
||||
with belief_scope("clear_tasks", f"status={status}"):
|
||||
task_manager.clear_tasks(status)
|
||||
return
|
||||
# [/DEF:clear_tasks:Function]
|
||||
# [/DEF:TasksRouter:Module]
|
||||
@@ -19,7 +19,7 @@ import asyncio
|
||||
import os
|
||||
|
||||
from .dependencies import get_task_manager, get_scheduler_service
|
||||
from .core.logger import logger
|
||||
from .core.logger import logger, belief_scope
|
||||
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections
|
||||
from .core.database import init_db
|
||||
|
||||
@@ -33,17 +33,29 @@ app = FastAPI(
|
||||
)
|
||||
# [/DEF:App:Global]
|
||||
|
||||
# [DEF:startup_event:Function]
|
||||
# @PURPOSE: Handles application startup tasks, such as starting the scheduler.
|
||||
# @PRE: None.
|
||||
# @POST: Scheduler is started.
|
||||
# Startup event
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
scheduler = get_scheduler_service()
|
||||
with belief_scope("startup_event"):
|
||||
scheduler = get_scheduler_service()
|
||||
scheduler.start()
|
||||
# [/DEF:startup_event:Function]
|
||||
|
||||
# [DEF:shutdown_event:Function]
|
||||
# @PURPOSE: Handles application shutdown tasks, such as stopping the scheduler.
|
||||
# @PRE: None.
|
||||
# @POST: Scheduler is stopped.
|
||||
# Shutdown event
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
scheduler = get_scheduler_service()
|
||||
with belief_scope("shutdown_event"):
|
||||
scheduler = get_scheduler_service()
|
||||
scheduler.stop()
|
||||
# [/DEF:shutdown_event:Function]
|
||||
|
||||
# Configure CORS
|
||||
app.add_middleware(
|
||||
@@ -55,12 +67,20 @@ app.add_middleware(
|
||||
)
|
||||
|
||||
|
||||
# [DEF:log_requests:Function]
|
||||
# @PURPOSE: Middleware to log incoming HTTP requests and their response status.
|
||||
# @PRE: request is a FastAPI Request object.
|
||||
# @POST: Logs request and response details.
|
||||
# @PARAM: request (Request) - The incoming request object.
|
||||
# @PARAM: call_next (Callable) - The next middleware or route handler.
|
||||
@app.middleware("http")
|
||||
async def log_requests(request: Request, call_next):
|
||||
logger.info(f"[DEBUG] Incoming request: {request.method} {request.url.path}")
|
||||
response = await call_next(request)
|
||||
logger.info(f"[DEBUG] Response status: {response.status_code} for {request.url.path}")
|
||||
return response
|
||||
with belief_scope("log_requests", f"{request.method} {request.url.path}"):
|
||||
logger.info(f"[DEBUG] Incoming request: {request.method} {request.url.path}")
|
||||
response = await call_next(request)
|
||||
logger.info(f"[DEBUG] Response status: {response.status_code} for {request.url.path}")
|
||||
return response
|
||||
# [/DEF:log_requests:Function]
|
||||
|
||||
# Include API routes
|
||||
app.include_router(plugins.router, prefix="/api/plugins", tags=["Plugins"])
|
||||
@@ -71,12 +91,14 @@ app.include_router(environments.router, prefix="/api/environments", tags=["Envir
|
||||
app.include_router(mappings.router)
|
||||
app.include_router(migration.router)
|
||||
|
||||
# [DEF:WebSocketEndpoint:Endpoint]
|
||||
# @SEMANTICS: websocket, logs, streaming, real-time
|
||||
# @PURPOSE: Provides a WebSocket endpoint for clients to connect to and receive real-time log entries for a specific task.
|
||||
# [DEF:websocket_endpoint:Function]
|
||||
# @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task.
|
||||
# @PRE: task_id must be a valid task ID.
|
||||
# @POST: WebSocket connection is managed and logs are streamed until disconnect.
|
||||
@app.websocket("/ws/logs/{task_id}")
|
||||
async def websocket_endpoint(websocket: WebSocket, task_id: str):
|
||||
await websocket.accept()
|
||||
with belief_scope("websocket_endpoint", f"task_id={task_id}"):
|
||||
await websocket.accept()
|
||||
logger.info(f"WebSocket connection accepted for task {task_id}")
|
||||
task_manager = get_task_manager()
|
||||
queue = await task_manager.subscribe_logs(task_id)
|
||||
@@ -126,7 +148,7 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
|
||||
logger.error(f"WebSocket error for task {task_id}: {e}")
|
||||
finally:
|
||||
task_manager.unsubscribe_logs(task_id, queue)
|
||||
# [/DEF:WebSocketEndpoint:Endpoint]
|
||||
# [/DEF:websocket_endpoint:Function]
|
||||
|
||||
# [DEF:StaticFiles:Mount]
|
||||
# @SEMANTICS: static, frontend, spa
|
||||
@@ -136,24 +158,32 @@ if frontend_path.exists():
|
||||
app.mount("/_app", StaticFiles(directory=str(frontend_path / "_app")), name="static")
|
||||
|
||||
# Serve other static files from the root of build directory
|
||||
# [DEF:serve_spa:Function]
|
||||
# @PURPOSE: Serves frontend static files or index.html for SPA routing.
|
||||
# @PRE: file_path is requested by the client.
|
||||
# @POST: Returns the requested file or index.html as a fallback.
|
||||
@app.get("/{file_path:path}")
|
||||
async def serve_spa(file_path: str):
|
||||
# Don't serve SPA for API routes that fell through
|
||||
if file_path.startswith("api/"):
|
||||
raise HTTPException(status_code=404, detail="API endpoint not found")
|
||||
|
||||
full_path = frontend_path / file_path
|
||||
if full_path.is_file():
|
||||
return FileResponse(str(full_path))
|
||||
# Fallback to index.html for SPA routing
|
||||
return FileResponse(str(frontend_path / "index.html"))
|
||||
with belief_scope("serve_spa", f"path={file_path}"):
|
||||
# Don't serve SPA for API routes that fell through
|
||||
if file_path.startswith("api/"):
|
||||
raise HTTPException(status_code=404, detail="API endpoint not found")
|
||||
|
||||
full_path = frontend_path / file_path
|
||||
if full_path.is_file():
|
||||
return FileResponse(str(full_path))
|
||||
# Fallback to index.html for SPA routing
|
||||
return FileResponse(str(frontend_path / "index.html"))
|
||||
# [/DEF:serve_spa:Function]
|
||||
else:
|
||||
# [DEF:RootEndpoint:Endpoint]
|
||||
# @SEMANTICS: root, healthcheck
|
||||
# @PURPOSE: A simple root endpoint to confirm that the API is running.
|
||||
# [DEF:read_root:Function]
|
||||
# @PURPOSE: A simple root endpoint to confirm that the API is running when frontend is missing.
|
||||
# @PRE: None.
|
||||
# @POST: Returns a JSON message indicating API status.
|
||||
@app.get("/")
|
||||
async def read_root():
|
||||
return {"message": "Superset Tools API is running (Frontend build not found)"}
|
||||
# [/DEF:RootEndpoint:Endpoint]
|
||||
with belief_scope("read_root"):
|
||||
return {"message": "Superset Tools API is running (Frontend build not found)"}
|
||||
# [/DEF:read_root:Function]
|
||||
# [/DEF:StaticFiles:Mount]
|
||||
# [/DEF:AppModule:Module]
|
||||
|
||||
@@ -30,30 +30,33 @@ class ConfigManager:
|
||||
# @POST: self.config is an instance of AppConfig
|
||||
# @PARAM: config_path (str) - Path to the configuration file.
|
||||
def __init__(self, config_path: str = "config.json"):
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||
|
||||
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config_path = Path(config_path)
|
||||
self.config: AppConfig = self._load_config()
|
||||
with belief_scope("__init__"):
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||
|
||||
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config_path = Path(config_path)
|
||||
self.config: AppConfig = self._load_config()
|
||||
|
||||
# Configure logger with loaded settings
|
||||
configure_logger(self.config.settings.logging)
|
||||
# Configure logger with loaded settings
|
||||
configure_logger(self.config.settings.logging)
|
||||
|
||||
# 3. Runtime check of @POST
|
||||
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||
# 3. Runtime check of @POST
|
||||
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||
|
||||
logger.info(f"[ConfigManager][Exit] Initialized")
|
||||
logger.info(f"[ConfigManager][Exit] Initialized")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_load_config:Function]
|
||||
# @PURPOSE: Loads the configuration from disk or creates a default one.
|
||||
# @PRE: self.config_path is set.
|
||||
# @POST: isinstance(return, AppConfig)
|
||||
# @RETURN: AppConfig - The loaded or default configuration.
|
||||
def _load_config(self) -> AppConfig:
|
||||
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
|
||||
with belief_scope("_load_config"):
|
||||
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
|
||||
|
||||
if not self.config_path.exists():
|
||||
logger.info(f"[_load_config][Action] Config file not found. Creating default.")
|
||||
@@ -83,9 +86,11 @@ class ConfigManager:
|
||||
# [DEF:_save_config_to_disk:Function]
|
||||
# @PURPOSE: Saves the provided configuration object to disk.
|
||||
# @PRE: isinstance(config, AppConfig)
|
||||
# @POST: Configuration saved to disk.
|
||||
# @PARAM: config (AppConfig) - The configuration to save.
|
||||
def _save_config_to_disk(self, config: AppConfig):
|
||||
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
|
||||
with belief_scope("_save_config_to_disk"):
|
||||
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config, AppConfig), "config must be an instance of AppConfig"
|
||||
@@ -101,23 +106,31 @@ class ConfigManager:
|
||||
|
||||
# [DEF:save:Function]
|
||||
# @PURPOSE: Saves the current configuration state to disk.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: self._save_config_to_disk called.
|
||||
def save(self):
|
||||
self._save_config_to_disk(self.config)
|
||||
with belief_scope("save"):
|
||||
self._save_config_to_disk(self.config)
|
||||
# [/DEF:save:Function]
|
||||
|
||||
# [DEF:get_config:Function]
|
||||
# @PURPOSE: Returns the current configuration.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns self.config.
|
||||
# @RETURN: AppConfig - The current configuration.
|
||||
def get_config(self) -> AppConfig:
|
||||
return self.config
|
||||
with belief_scope("get_config"):
|
||||
return self.config
|
||||
# [/DEF:get_config:Function]
|
||||
|
||||
# [DEF:update_global_settings:Function]
|
||||
# @PURPOSE: Updates the global settings and persists the change.
|
||||
# @PRE: isinstance(settings, GlobalSettings)
|
||||
# @POST: self.config.settings updated and saved.
|
||||
# @PARAM: settings (GlobalSettings) - The new global settings.
|
||||
def update_global_settings(self, settings: GlobalSettings):
|
||||
logger.info(f"[update_global_settings][Entry] Updating settings")
|
||||
with belief_scope("update_global_settings"):
|
||||
logger.info(f"[update_global_settings][Entry] Updating settings")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
|
||||
@@ -134,10 +147,13 @@ class ConfigManager:
|
||||
|
||||
# [DEF:validate_path:Function]
|
||||
# @PURPOSE: Validates if a path exists and is writable.
|
||||
# @PRE: path is a string.
|
||||
# @POST: Returns (bool, str) status.
|
||||
# @PARAM: path (str) - The path to validate.
|
||||
# @RETURN: tuple (bool, str) - (is_valid, message)
|
||||
def validate_path(self, path: str) -> tuple[bool, str]:
|
||||
p = os.path.abspath(path)
|
||||
with belief_scope("validate_path"):
|
||||
p = os.path.abspath(path)
|
||||
if not os.path.exists(p):
|
||||
try:
|
||||
os.makedirs(p, exist_ok=True)
|
||||
@@ -152,24 +168,32 @@ class ConfigManager:
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: Returns the list of configured environments.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns list of environments.
|
||||
# @RETURN: List[Environment] - List of environments.
|
||||
def get_environments(self) -> List[Environment]:
|
||||
return self.config.environments
|
||||
with belief_scope("get_environments"):
|
||||
return self.config.environments
|
||||
# [/DEF:get_environments:Function]
|
||||
|
||||
# [DEF:has_environments:Function]
|
||||
# @PURPOSE: Checks if at least one environment is configured.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns boolean indicating if environments exist.
|
||||
# @RETURN: bool - True if at least one environment exists.
|
||||
def has_environments(self) -> bool:
|
||||
return len(self.config.environments) > 0
|
||||
with belief_scope("has_environments"):
|
||||
return len(self.config.environments) > 0
|
||||
# [/DEF:has_environments:Function]
|
||||
|
||||
# [DEF:add_environment:Function]
|
||||
# @PURPOSE: Adds a new environment to the configuration.
|
||||
# @PRE: isinstance(env, Environment)
|
||||
# @POST: Environment added or updated in self.config.environments.
|
||||
# @PARAM: env (Environment) - The environment to add.
|
||||
def add_environment(self, env: Environment):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
with belief_scope("add_environment"):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(env, Environment), "env must be an instance of Environment"
|
||||
@@ -186,11 +210,13 @@ class ConfigManager:
|
||||
# [DEF:update_environment:Function]
|
||||
# @PURPOSE: Updates an existing environment.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0 and isinstance(updated_env, Environment)
|
||||
# @POST: Returns True if environment was found and updated.
|
||||
# @PARAM: env_id (str) - The ID of the environment to update.
|
||||
# @PARAM: updated_env (Environment) - The updated environment data.
|
||||
# @RETURN: bool - True if updated, False otherwise.
|
||||
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
|
||||
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||
with belief_scope("update_environment"):
|
||||
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
@@ -215,9 +241,11 @@ class ConfigManager:
|
||||
# [DEF:delete_environment:Function]
|
||||
# @PURPOSE: Deletes an environment by ID.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0
|
||||
# @POST: Environment removed from self.config.environments if it existed.
|
||||
# @PARAM: env_id (str) - The ID of the environment to delete.
|
||||
def delete_environment(self, env_id: str):
|
||||
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||
with belief_scope("delete_environment"):
|
||||
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
|
||||
@@ -46,33 +46,40 @@ TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_e
|
||||
|
||||
# [DEF:init_db:Function]
|
||||
# @PURPOSE: Initializes the database by creating all tables.
|
||||
# @PRE: engine and tasks_engine are initialized.
|
||||
# @POST: Database tables created.
|
||||
def init_db():
|
||||
Base.metadata.create_all(bind=engine)
|
||||
Base.metadata.create_all(bind=tasks_engine)
|
||||
with belief_scope("init_db"):
|
||||
Base.metadata.create_all(bind=engine)
|
||||
Base.metadata.create_all(bind=tasks_engine)
|
||||
# [/DEF:init_db:Function]
|
||||
|
||||
# [DEF:get_db:Function]
|
||||
# @PURPOSE: Dependency for getting a database session.
|
||||
# @PRE: SessionLocal is initialized.
|
||||
# @POST: Session is closed after use.
|
||||
# @RETURN: Generator[Session, None, None]
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
with belief_scope("get_db"):
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
# [/DEF:get_db:Function]
|
||||
|
||||
# [DEF:get_tasks_db:Function]
|
||||
# @PURPOSE: Dependency for getting a tasks database session.
|
||||
# @PRE: TasksSessionLocal is initialized.
|
||||
# @POST: Session is closed after use.
|
||||
# @RETURN: Generator[Session, None, None]
|
||||
def get_tasks_db():
|
||||
db = TasksSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
with belief_scope("get_tasks_db"):
|
||||
db = TasksSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
# [/DEF:get_tasks_db:Function]
|
||||
|
||||
# [/DEF:backend.src.core.database:Module]
|
||||
|
||||
@@ -22,12 +22,19 @@ _enable_belief_state = True
|
||||
# [DEF:BeliefFormatter:Class]
|
||||
# @PURPOSE: Custom logging formatter that adds belief state prefixes to log messages.
|
||||
class BeliefFormatter(logging.Formatter):
|
||||
# [DEF:format:Function]
|
||||
# @PURPOSE: Formats the log record, adding belief state context if available.
|
||||
# @PRE: record is a logging.LogRecord.
|
||||
# @POST: Returns formatted string.
|
||||
# @PARAM: record (logging.LogRecord) - The log record to format.
|
||||
# @RETURN: str - The formatted log message.
|
||||
def format(self, record):
|
||||
msg = super().format(record)
|
||||
anchor_id = getattr(_belief_state, 'anchor_id', None)
|
||||
if anchor_id:
|
||||
msg = f"[{anchor_id}][Action] {msg}"
|
||||
return msg
|
||||
# [/DEF:format:Function]
|
||||
# [/DEF:BeliefFormatter:Class]
|
||||
|
||||
# Re-using LogEntry from task_manager for consistency
|
||||
@@ -42,8 +49,12 @@ class LogEntry(BaseModel):
|
||||
|
||||
# [/DEF:LogEntry:Class]
|
||||
|
||||
# [DEF:BeliefScope:Function]
|
||||
# [DEF:belief_scope:Function]
|
||||
# @PURPOSE: Context manager for structured Belief State logging.
|
||||
# @PARAM: anchor_id (str) - The identifier for the current semantic block.
|
||||
# @PARAM: message (str) - Optional entry message.
|
||||
# @PRE: anchor_id must be provided.
|
||||
# @POST: Thread-local belief state is updated and entry/exit logs are generated.
|
||||
@contextmanager
|
||||
def belief_scope(anchor_id: str, message: str = ""):
|
||||
# Log Entry if enabled
|
||||
@@ -71,9 +82,9 @@ def belief_scope(anchor_id: str, message: str = ""):
|
||||
# Restore old anchor
|
||||
_belief_state.anchor_id = old_anchor
|
||||
|
||||
# [/DEF:BeliefScope:Function]
|
||||
# [/DEF:belief_scope:Function]
|
||||
|
||||
# [DEF:ConfigureLogger:Function]
|
||||
# [DEF:configure_logger:Function]
|
||||
# @PURPOSE: Configures the logger with the provided logging settings.
|
||||
# @PRE: config is a valid LoggingConfig instance.
|
||||
# @POST: Logger level, handlers, and belief state flag are updated.
|
||||
@@ -115,7 +126,7 @@ def configure_logger(config):
|
||||
handler.setFormatter(BeliefFormatter(
|
||||
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
|
||||
))
|
||||
# [/DEF:ConfigureLogger:Function]
|
||||
# [/DEF:configure_logger:Function]
|
||||
|
||||
# [DEF:WebSocketLogHandler:Class]
|
||||
# @SEMANTICS: logging, handler, websocket, buffer
|
||||
@@ -125,38 +136,59 @@ class WebSocketLogHandler(logging.Handler):
|
||||
A logging handler that stores log records and can be extended to send them
|
||||
over WebSockets.
|
||||
"""
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the handler with a fixed-capacity buffer.
|
||||
# @PRE: capacity is an integer.
|
||||
# @POST: Instance initialized with empty deque.
|
||||
# @PARAM: capacity (int) - Maximum number of logs to keep in memory.
|
||||
def __init__(self, capacity: int = 1000):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
with belief_scope("WebSocketLogHandler.__init__"):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
# In a real implementation, you'd have a way to manage active WebSocket connections
|
||||
# e.g., self.active_connections: Set[WebSocket] = set()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:emit:Function]
|
||||
# @PURPOSE: Captures a log record, formats it, and stores it in the buffer.
|
||||
# @PRE: record is a logging.LogRecord.
|
||||
# @POST: Log is added to the log_buffer.
|
||||
# @PARAM: record (logging.LogRecord) - The log record to emit.
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
with belief_scope("WebSocketLogHandler.emit"):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
# [/DEF:emit:Function]
|
||||
|
||||
# [DEF:get_recent_logs:Function]
|
||||
# @PURPOSE: Returns a list of recent log entries from the buffer.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of LogEntry objects.
|
||||
# @RETURN: List[LogEntry] - List of buffered log entries.
|
||||
def get_recent_logs(self) -> List[LogEntry]:
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
with belief_scope("WebSocketLogHandler.get_recent_logs"):
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
# [/DEF:get_recent_logs:Function]
|
||||
|
||||
# [/DEF:WebSocketLogHandler:Class]
|
||||
|
||||
|
||||
@@ -23,12 +23,14 @@ import yaml
|
||||
# @PURPOSE: Engine for transforming Superset export ZIPs.
|
||||
class MigrationEngine:
|
||||
|
||||
# [DEF:MigrationEngine.transform_zip:Function]
|
||||
# [DEF:transform_zip:Function]
|
||||
# @PURPOSE: Extracts ZIP, replaces database UUIDs in YAMLs, and re-packages.
|
||||
# @PARAM: zip_path (str) - Path to the source ZIP file.
|
||||
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
|
||||
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
|
||||
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
|
||||
# @PRE: zip_path must point to a valid Superset export archive.
|
||||
# @POST: Transformed archive is saved to output_path.
|
||||
# @RETURN: bool - True if successful.
|
||||
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
|
||||
"""
|
||||
@@ -73,10 +75,14 @@ class MigrationEngine:
|
||||
except Exception as e:
|
||||
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
|
||||
return False
|
||||
# [/DEF:MigrationEngine.transform_zip:Function]
|
||||
# [/DEF:transform_zip:Function]
|
||||
|
||||
# [DEF:MigrationEngine._transform_yaml:Function]
|
||||
# [DEF:_transform_yaml:Function]
|
||||
# @PURPOSE: Replaces database_uuid in a single YAML file.
|
||||
# @PARAM: file_path (Path) - Path to the YAML file.
|
||||
# @PARAM: db_mapping (Dict[str, str]) - UUID mapping dictionary.
|
||||
# @PRE: file_path must exist and be readable.
|
||||
# @POST: File is modified in-place if source UUID matches mapping.
|
||||
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
|
||||
with open(file_path, 'r') as f:
|
||||
data = yaml.safe_load(f)
|
||||
@@ -91,7 +97,7 @@ class MigrationEngine:
|
||||
data['database_uuid'] = db_mapping[source_uuid]
|
||||
with open(file_path, 'w') as f:
|
||||
yaml.dump(data, f)
|
||||
# [/DEF:MigrationEngine._transform_yaml:Function]
|
||||
# [/DEF:_transform_yaml:Function]
|
||||
|
||||
# [/DEF:MigrationEngine:Class]
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
from .logger import belief_scope
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -17,43 +18,86 @@ class PluginBase(ABC):
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - Plugin ID.
|
||||
def id(self) -> str:
|
||||
"""A unique identifier for the plugin."""
|
||||
pass
|
||||
with belief_scope("id"):
|
||||
pass
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
"""A human-readable name for the plugin."""
|
||||
pass
|
||||
with belief_scope("name"):
|
||||
pass
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a brief description of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
"""A brief description of what the plugin does."""
|
||||
pass
|
||||
with belief_scope("description"):
|
||||
pass
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - Plugin version.
|
||||
def version(self) -> str:
|
||||
"""The version of the plugin."""
|
||||
pass
|
||||
with belief_scope("version"):
|
||||
pass
|
||||
# [/DEF:version:Function]
|
||||
|
||||
@abstractmethod
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for the plugin's input parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dict schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the JSON schema for the plugin's input parameters.
|
||||
This schema will be used to generate the frontend form.
|
||||
"""
|
||||
pass
|
||||
with belief_scope("get_schema"):
|
||||
pass
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
@abstractmethod
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the plugin's core logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Validated input parameters.
|
||||
# @PRE: params must be a dictionary.
|
||||
# @POST: Plugin execution is completed.
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
with belief_scope("execute"):
|
||||
pass
|
||||
"""
|
||||
Executes the plugin's logic.
|
||||
The `params` argument will be validated against the schema returned by `get_schema()`.
|
||||
"""
|
||||
pass
|
||||
# [/DEF:execute:Function]
|
||||
# [/DEF:PluginBase:Class]
|
||||
|
||||
# [DEF:PluginConfig:Class]
|
||||
|
||||
@@ -4,6 +4,7 @@ import sys # Added this line
|
||||
from typing import Dict, Type, List, Optional
|
||||
from .plugin_base import PluginBase, PluginConfig
|
||||
from jsonschema import validate
|
||||
from .logger import belief_scope
|
||||
|
||||
# [DEF:PluginLoader:Class]
|
||||
# @SEMANTICS: plugin, loader, dynamic, import
|
||||
@@ -16,22 +17,28 @@ class PluginLoader:
|
||||
that inherit from PluginBase.
|
||||
"""
|
||||
|
||||
# [DEF:PluginLoader.__init__:Function]
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
|
||||
# @PRE: plugin_dir is a valid directory path.
|
||||
# @POST: Plugins are loaded and registered.
|
||||
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
|
||||
def __init__(self, plugin_dir: str):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
# [/DEF:PluginLoader.__init__:Function]
|
||||
with belief_scope("__init__"):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:PluginLoader._load_plugins:Function]
|
||||
# [DEF:_load_plugins:Function]
|
||||
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
|
||||
# @PRE: plugin_dir exists or can be created.
|
||||
# @POST: _load_module is called for each .py file.
|
||||
def _load_plugins(self):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
with belief_scope("_load_plugins"):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
if not os.path.exists(self.plugin_dir):
|
||||
os.makedirs(self.plugin_dir)
|
||||
|
||||
@@ -47,16 +54,19 @@ class PluginLoader:
|
||||
module_name = filename[:-3]
|
||||
file_path = os.path.join(self.plugin_dir, filename)
|
||||
self._load_module(module_name, file_path)
|
||||
# [/DEF:PluginLoader._load_plugins:Function]
|
||||
# [/DEF:_load_plugins:Function]
|
||||
|
||||
# [DEF:PluginLoader._load_module:Function]
|
||||
# [DEF:_load_module:Function]
|
||||
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
|
||||
# @PRE: module_name and file_path are valid.
|
||||
# @POST: Plugin classes are instantiated and registered.
|
||||
# @PARAM: module_name (str) - The name of the module.
|
||||
# @PARAM: file_path (str) - The path to the module file.
|
||||
def _load_module(self, module_name: str, file_path: str):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
with belief_scope("_load_module"):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
# Try to determine the correct package prefix based on how the app is running
|
||||
# For standalone execution, we need to handle the import differently
|
||||
if __name__ == "__main__" or "test" in __name__:
|
||||
@@ -94,15 +104,18 @@ class PluginLoader:
|
||||
self._register_plugin(plugin_instance)
|
||||
except Exception as e:
|
||||
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
|
||||
# [/DEF:PluginLoader._load_module:Function]
|
||||
# [/DEF:_load_module:Function]
|
||||
|
||||
# [DEF:PluginLoader._register_plugin:Function]
|
||||
# [DEF:_register_plugin:Function]
|
||||
# @PURPOSE: Registers a PluginBase instance and its configuration.
|
||||
# @PRE: plugin_instance is a valid implementation of PluginBase.
|
||||
# @POST: Plugin is added to _plugins and _plugin_configs.
|
||||
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
|
||||
def _register_plugin(self, plugin_instance: PluginBase):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
with belief_scope("_register_plugin"):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
plugin_id = plugin_instance.id
|
||||
if plugin_id in self._plugins:
|
||||
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
|
||||
@@ -131,39 +144,48 @@ class PluginLoader:
|
||||
except Exception as e:
|
||||
from ..core.logger import logger
|
||||
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
|
||||
# [/DEF:PluginLoader._register_plugin:Function]
|
||||
# [/DEF:_register_plugin:Function]
|
||||
|
||||
|
||||
# [DEF:PluginLoader.get_plugin:Function]
|
||||
# [DEF:get_plugin:Function]
|
||||
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
|
||||
# @PRE: plugin_id is a string.
|
||||
# @POST: Returns plugin instance or None.
|
||||
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
|
||||
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
|
||||
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
with belief_scope("get_plugin"):
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
return self._plugins.get(plugin_id)
|
||||
# [/DEF:PluginLoader.get_plugin:Function]
|
||||
# [/DEF:get_plugin:Function]
|
||||
|
||||
# [DEF:PluginLoader.get_all_plugin_configs:Function]
|
||||
# [DEF:get_all_plugin_configs:Function]
|
||||
# @PURPOSE: Returns a list of all registered plugin configurations.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of all PluginConfig objects.
|
||||
# @RETURN: List[PluginConfig] - A list of plugin configurations.
|
||||
def get_all_plugin_configs(self) -> List[PluginConfig]:
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
with belief_scope("get_all_plugin_configs"):
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
return list(self._plugin_configs.values())
|
||||
# [/DEF:PluginLoader.get_all_plugin_configs:Function]
|
||||
# [/DEF:get_all_plugin_configs:Function]
|
||||
|
||||
# [DEF:PluginLoader.has_plugin:Function]
|
||||
# [DEF:has_plugin:Function]
|
||||
# @PURPOSE: Checks if a plugin with the given ID is registered.
|
||||
# @PRE: plugin_id is a string.
|
||||
# @POST: Returns True if plugin exists.
|
||||
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
|
||||
# @RETURN: bool - True if the plugin is registered, False otherwise.
|
||||
def has_plugin(self, plugin_id: str) -> bool:
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
with belief_scope("has_plugin"):
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
return plugin_id in self._plugins
|
||||
# [/DEF:PluginLoader.has_plugin:Function]
|
||||
# [/DEF:has_plugin:Function]
|
||||
|
||||
# [/DEF:PluginLoader:Class]
|
||||
@@ -17,34 +17,45 @@ import asyncio
|
||||
# @SEMANTICS: scheduler, service, apscheduler
|
||||
# @PURPOSE: Provides a service to manage scheduled backup tasks.
|
||||
class SchedulerService:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the scheduler service with task and config managers.
|
||||
# @PRE: task_manager and config_manager must be provided.
|
||||
# @POST: Scheduler instance is created but not started.
|
||||
def __init__(self, task_manager, config_manager: ConfigManager):
|
||||
with belief_scope("SchedulerService.__init__"):
|
||||
self.task_manager = task_manager
|
||||
self.config_manager = config_manager
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.loop = asyncio.get_event_loop()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:SchedulerService.start:Function]
|
||||
# [DEF:start:Function]
|
||||
# @PURPOSE: Starts the background scheduler and loads initial schedules.
|
||||
# @PRE: Scheduler should be initialized.
|
||||
# @POST: Scheduler is running and schedules are loaded.
|
||||
def start(self):
|
||||
with belief_scope("SchedulerService.start"):
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info("Scheduler started.")
|
||||
self.load_schedules()
|
||||
# [/DEF:SchedulerService.start:Function]
|
||||
# [/DEF:start:Function]
|
||||
|
||||
# [DEF:SchedulerService.stop:Function]
|
||||
# [DEF:stop:Function]
|
||||
# @PURPOSE: Stops the background scheduler.
|
||||
# @PRE: Scheduler should be running.
|
||||
# @POST: Scheduler is shut down.
|
||||
def stop(self):
|
||||
with belief_scope("SchedulerService.stop"):
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
logger.info("Scheduler stopped.")
|
||||
# [/DEF:SchedulerService.stop:Function]
|
||||
# [/DEF:stop:Function]
|
||||
|
||||
# [DEF:SchedulerService.load_schedules:Function]
|
||||
# [DEF:load_schedules:Function]
|
||||
# @PURPOSE: Loads backup schedules from configuration and registers them.
|
||||
# @PRE: config_manager must have valid configuration.
|
||||
# @POST: All enabled backup jobs are added to the scheduler.
|
||||
def load_schedules(self):
|
||||
with belief_scope("SchedulerService.load_schedules"):
|
||||
# Clear existing jobs
|
||||
@@ -54,12 +65,14 @@ class SchedulerService:
|
||||
for env in config.environments:
|
||||
if env.backup_schedule and env.backup_schedule.enabled:
|
||||
self.add_backup_job(env.id, env.backup_schedule.cron_expression)
|
||||
# [/DEF:SchedulerService.load_schedules:Function]
|
||||
# [/DEF:load_schedules:Function]
|
||||
|
||||
# [DEF:SchedulerService.add_backup_job:Function]
|
||||
# [DEF:add_backup_job:Function]
|
||||
# @PURPOSE: Adds a scheduled backup job for an environment.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PARAM: cron_expression (str) - The cron expression for the schedule.
|
||||
# @PRE: env_id and cron_expression must be valid strings.
|
||||
# @POST: A new job is added to the scheduler or replaced if it already exists.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PARAM: cron_expression (str) - The cron expression for the schedule.
|
||||
def add_backup_job(self, env_id: str, cron_expression: str):
|
||||
with belief_scope("SchedulerService.add_backup_job", f"env_id={env_id}, cron={cron_expression}"):
|
||||
job_id = f"backup_{env_id}"
|
||||
@@ -74,11 +87,13 @@ class SchedulerService:
|
||||
logger.info(f"Scheduled backup job added for environment {env_id}: {cron_expression}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add backup job for environment {env_id}: {e}")
|
||||
# [/DEF:SchedulerService.add_backup_job:Function]
|
||||
# [/DEF:add_backup_job:Function]
|
||||
|
||||
# [DEF:SchedulerService._trigger_backup:Function]
|
||||
# [DEF:_trigger_backup:Function]
|
||||
# @PURPOSE: Triggered by the scheduler to start a backup task.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PRE: env_id must be a valid environment ID.
|
||||
# @POST: A new backup task is created in the task manager if not already running.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
def _trigger_backup(self, env_id: str):
|
||||
with belief_scope("SchedulerService._trigger_backup", f"env_id={env_id}"):
|
||||
logger.info(f"Triggering scheduled backup for environment {env_id}")
|
||||
@@ -98,7 +113,7 @@ class SchedulerService:
|
||||
self.task_manager.create_task("superset-backup", {"environment_id": env_id}),
|
||||
self.loop
|
||||
)
|
||||
# [/DEF:SchedulerService._trigger_backup:Function]
|
||||
# [/DEF:_trigger_backup:Function]
|
||||
|
||||
# [/DEF:SchedulerService:Class]
|
||||
# [/DEF:SchedulerModule:Module]
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from backend.src.core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient as BaseSupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
# [/SECTION]
|
||||
@@ -17,88 +18,101 @@ from superset_tool.models import SupersetConfig
|
||||
# @PURPOSE: Extended SupersetClient for migration-specific operations.
|
||||
class SupersetClient(BaseSupersetClient):
|
||||
|
||||
# [DEF:SupersetClient.get_databases_summary:Function]
|
||||
# [DEF:get_databases_summary:Function]
|
||||
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
|
||||
# @POST: Returns a list of database dictionaries with 'engine' field.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
# @PRE: self.network must be initialized and authenticated.
|
||||
# @POST: Returns a list of database dictionaries with 'engine' field.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
def get_databases_summary(self) -> List[Dict]:
|
||||
"""
|
||||
Fetch a summary of databases including uuid, name, and engine.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
with belief_scope("SupersetClient.get_databases_summary"):
|
||||
"""
|
||||
Fetch a summary of databases including uuid, name, and engine.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
|
||||
return databases
|
||||
# [/DEF:SupersetClient.get_databases_summary:Function]
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
|
||||
return databases
|
||||
# [/DEF:get_databases_summary:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_database_by_uuid:Function]
|
||||
# [DEF:get_database_by_uuid:Function]
|
||||
# @PURPOSE: Find a database by its UUID.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
# @PRE: db_uuid must be a string.
|
||||
# @POST: Returns database metadata if found.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
|
||||
"""
|
||||
Find a database by its UUID.
|
||||
"""
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:SupersetClient.get_database_by_uuid:Function]
|
||||
with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"):
|
||||
"""
|
||||
Find a database by its UUID.
|
||||
"""
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:get_database_by_uuid:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_dashboards_summary:Function]
|
||||
# [DEF:get_dashboards_summary:Function]
|
||||
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
|
||||
# @POST: Returns a list of dashboard dictionaries.
|
||||
# @RETURN: List[Dict]
|
||||
# @PRE: self.network must be authenticated.
|
||||
# @POST: Returns a list of dashboard dictionaries mapped to the grid schema.
|
||||
# @RETURN: List[Dict]
|
||||
def get_dashboards_summary(self) -> List[Dict]:
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
_, dashboards = self.get_dashboards(query=query)
|
||||
with belief_scope("SupersetClient.get_dashboards_summary"):
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
_, dashboards = self.get_dashboards(query=query)
|
||||
|
||||
# Map fields to DashboardMetadata schema
|
||||
result = []
|
||||
for dash in dashboards:
|
||||
result.append({
|
||||
"id": dash.get("id"),
|
||||
"title": dash.get("dashboard_title"),
|
||||
"last_modified": dash.get("changed_on_utc"),
|
||||
"status": "published" if dash.get("published") else "draft"
|
||||
})
|
||||
return result
|
||||
# [/DEF:SupersetClient.get_dashboards_summary:Function]
|
||||
# Map fields to DashboardMetadata schema
|
||||
result = []
|
||||
for dash in dashboards:
|
||||
result.append({
|
||||
"id": dash.get("id"),
|
||||
"title": dash.get("dashboard_title"),
|
||||
"last_modified": dash.get("changed_on_utc"),
|
||||
"status": "published" if dash.get("published") else "draft"
|
||||
})
|
||||
return result
|
||||
# [/DEF:get_dashboards_summary:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_dataset:Function]
|
||||
# [DEF:get_dataset:Function]
|
||||
# @PURPOSE: Fetch full dataset structure including columns and metrics.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @RETURN: Dict - The dataset metadata.
|
||||
# @PRE: dataset_id must be a valid integer.
|
||||
# @POST: Returns full dataset metadata from Superset API.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @RETURN: Dict - The dataset metadata.
|
||||
def get_dataset(self, dataset_id: int) -> Dict:
|
||||
"""
|
||||
Fetch full dataset structure.
|
||||
"""
|
||||
return self.network.get(f"/api/v1/dataset/{dataset_id}").json()
|
||||
# [/DEF:SupersetClient.get_dataset:Function]
|
||||
with belief_scope("SupersetClient.get_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Fetch full dataset structure.
|
||||
"""
|
||||
return self.network.get(f"/api/v1/dataset/{dataset_id}").json()
|
||||
# [/DEF:get_dataset:Function]
|
||||
|
||||
# [DEF:SupersetClient.update_dataset:Function]
|
||||
# [DEF:update_dataset:Function]
|
||||
# @PURPOSE: Update dataset metadata.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @PARAM: data (Dict) - The payload for update.
|
||||
# @PRE: dataset_id must be valid, data must be a valid Superset dataset payload.
|
||||
# @POST: Dataset is updated in Superset.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @PARAM: data (Dict) - The payload for update.
|
||||
def update_dataset(self, dataset_id: int, data: Dict):
|
||||
"""
|
||||
Update dataset metadata.
|
||||
"""
|
||||
self.network.put(f"/api/v1/dataset/{dataset_id}", json=data)
|
||||
# [/DEF:SupersetClient.update_dataset:Function]
|
||||
with belief_scope("SupersetClient.update_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Update dataset metadata.
|
||||
"""
|
||||
self.network.put(f"/api/v1/dataset/{dataset_id}", json=data)
|
||||
# [/DEF:update_dataset:Function]
|
||||
|
||||
# [/DEF:SupersetClient:Class]
|
||||
|
||||
|
||||
@@ -186,17 +186,23 @@ class TaskManager:
|
||||
|
||||
# [DEF:get_task:Function]
|
||||
# @PURPOSE: Retrieves a task by its ID.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns Task object or None.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: Optional[Task] - The task or None.
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
return self.tasks.get(task_id)
|
||||
with belief_scope("TaskManager.get_task", f"task_id={task_id}"):
|
||||
return self.tasks.get(task_id)
|
||||
# [/DEF:get_task:Function]
|
||||
|
||||
# [DEF:get_all_tasks:Function]
|
||||
# @PURPOSE: Retrieves all registered tasks.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of all Task objects.
|
||||
# @RETURN: List[Task] - All tasks.
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
return list(self.tasks.values())
|
||||
with belief_scope("TaskManager.get_all_tasks"):
|
||||
return list(self.tasks.values())
|
||||
# [/DEF:get_all_tasks:Function]
|
||||
|
||||
# [DEF:get_tasks:Function]
|
||||
@@ -208,7 +214,8 @@ class TaskManager:
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @RETURN: List[Task] - List of tasks matching criteria.
|
||||
def get_tasks(self, limit: int = 10, offset: int = 0, status: Optional[TaskStatus] = None) -> List[Task]:
|
||||
tasks = list(self.tasks.values())
|
||||
with belief_scope("TaskManager.get_tasks"):
|
||||
tasks = list(self.tasks.values())
|
||||
if status:
|
||||
tasks = [t for t in tasks if t.status == status]
|
||||
# Sort by start_time descending (most recent first)
|
||||
@@ -218,11 +225,14 @@ class TaskManager:
|
||||
|
||||
# [DEF:get_task_logs:Function]
|
||||
# @PURPOSE: Retrieves logs for a specific task.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns list of LogEntry objects.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: List[LogEntry] - List of log entries.
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
with belief_scope("TaskManager.get_task_logs", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
# [/DEF:get_task_logs:Function]
|
||||
|
||||
# [DEF:_add_log:Function]
|
||||
@@ -234,51 +244,61 @@ class TaskManager:
|
||||
# @PARAM: message (str) - Log message.
|
||||
# @PARAM: context (Optional[Dict]) - Log context.
|
||||
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
with belief_scope("TaskManager._add_log", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
self.persistence_service.persist_task(task)
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
self.persistence_service.persist_task(task)
|
||||
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
# [/DEF:_add_log:Function]
|
||||
|
||||
# [DEF:subscribe_logs:Function]
|
||||
# @PURPOSE: Subscribes to real-time logs for a task.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns an asyncio.Queue for log entries.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: asyncio.Queue - Queue for log entries.
|
||||
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
with belief_scope("TaskManager.subscribe_logs", f"task_id={task_id}"):
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
# [/DEF:subscribe_logs:Function]
|
||||
|
||||
# [DEF:unsubscribe_logs:Function]
|
||||
# @PURPOSE: Unsubscribes from real-time logs for a task.
|
||||
# @PRE: task_id is a string, queue is asyncio.Queue.
|
||||
# @POST: Queue removed from subscribers.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: queue (asyncio.Queue) - Queue to remove.
|
||||
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
|
||||
if task_id in self.subscribers:
|
||||
if queue in self.subscribers[task_id]:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
with belief_scope("TaskManager.unsubscribe_logs", f"task_id={task_id}"):
|
||||
if task_id in self.subscribers:
|
||||
if queue in self.subscribers[task_id]:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
# [/DEF:unsubscribe_logs:Function]
|
||||
|
||||
# [DEF:load_persisted_tasks:Function]
|
||||
# @PURPOSE: Load persisted tasks using persistence service.
|
||||
# @PRE: None.
|
||||
# @POST: Persisted tasks loaded into self.tasks.
|
||||
def load_persisted_tasks(self) -> None:
|
||||
loaded_tasks = self.persistence_service.load_tasks(limit=100)
|
||||
for task in loaded_tasks:
|
||||
if task.id not in self.tasks:
|
||||
self.tasks[task.id] = task
|
||||
with belief_scope("TaskManager.load_persisted_tasks"):
|
||||
loaded_tasks = self.persistence_service.load_tasks(limit=100)
|
||||
for task in loaded_tasks:
|
||||
if task.id not in self.tasks:
|
||||
self.tasks[task.id] = task
|
||||
# [/DEF:load_persisted_tasks:Function]
|
||||
|
||||
# [DEF:await_input:Function]
|
||||
@@ -334,6 +354,8 @@ class TaskManager:
|
||||
|
||||
# [DEF:clear_tasks:Function]
|
||||
# @PURPOSE: Clears tasks based on status filter.
|
||||
# @PRE: status is Optional[TaskStatus].
|
||||
# @POST: Tasks matching filter (or all non-active) cleared from registry and database.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @RETURN: int - Number of tasks cleared.
|
||||
def clear_tasks(self, status: Optional[TaskStatus] = None) -> int:
|
||||
|
||||
@@ -26,12 +26,15 @@ class TaskPersistenceService:
|
||||
# @PRE: None.
|
||||
# @POST: Service is ready.
|
||||
def __init__(self):
|
||||
# We use TasksSessionLocal from database.py
|
||||
pass
|
||||
with belief_scope("TaskPersistenceService.__init__"):
|
||||
# We use TasksSessionLocal from database.py
|
||||
pass
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:persist_task:Function]
|
||||
# @PURPOSE: Persists or updates a single task in the database.
|
||||
# @PRE: isinstance(task, Task)
|
||||
# @POST: Task record created or updated in database.
|
||||
# @PARAM: task (Task) - The task object to persist.
|
||||
def persist_task(self, task: Task) -> None:
|
||||
with belief_scope("TaskPersistenceService.persist_task", f"task_id={task.id}"):
|
||||
@@ -75,14 +78,19 @@ class TaskPersistenceService:
|
||||
|
||||
# [DEF:persist_tasks:Function]
|
||||
# @PURPOSE: Persists multiple tasks.
|
||||
# @PRE: isinstance(tasks, list)
|
||||
# @POST: All tasks in list are persisted.
|
||||
# @PARAM: tasks (List[Task]) - The list of tasks to persist.
|
||||
def persist_tasks(self, tasks: List[Task]) -> None:
|
||||
for task in tasks:
|
||||
self.persist_task(task)
|
||||
with belief_scope("TaskPersistenceService.persist_tasks"):
|
||||
for task in tasks:
|
||||
self.persist_task(task)
|
||||
# [/DEF:persist_tasks:Function]
|
||||
|
||||
# [DEF:load_tasks:Function]
|
||||
# @PURPOSE: Loads tasks from the database.
|
||||
# @PRE: limit is an integer.
|
||||
# @POST: Returns list of Task objects.
|
||||
# @PARAM: limit (int) - Max tasks to load.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by status.
|
||||
# @RETURN: List[Task] - The loaded tasks.
|
||||
@@ -128,6 +136,8 @@ class TaskPersistenceService:
|
||||
|
||||
# [DEF:delete_tasks:Function]
|
||||
# @PURPOSE: Deletes specific tasks from the database.
|
||||
# @PRE: task_ids is a list of strings.
|
||||
# @POST: Specified task records deleted from database.
|
||||
# @PARAM: task_ids (List[str]) - List of task IDs to delete.
|
||||
def delete_tasks(self, task_ids: List[str]) -> None:
|
||||
if not task_ids:
|
||||
|
||||
@@ -10,6 +10,7 @@ from .core.task_manager import TaskManager
|
||||
from .core.config_manager import ConfigManager
|
||||
from .core.scheduler import SchedulerService
|
||||
from .core.database import init_db
|
||||
from .core.logger import logger, belief_scope
|
||||
|
||||
# Initialize singletons
|
||||
# Use absolute path relative to this file to ensure plugins are found regardless of CWD
|
||||
@@ -20,13 +21,20 @@ config_manager = ConfigManager(config_path=str(config_path))
|
||||
# Initialize database before any other services that might use it
|
||||
init_db()
|
||||
|
||||
# [DEF:get_config_manager:Function]
|
||||
# @PURPOSE: Dependency injector for the ConfigManager.
|
||||
# @PRE: Global config_manager must be initialized.
|
||||
# @POST: Returns shared ConfigManager instance.
|
||||
# @RETURN: ConfigManager - The shared config manager instance.
|
||||
def get_config_manager() -> ConfigManager:
|
||||
"""Dependency injector for the ConfigManager."""
|
||||
return config_manager
|
||||
with belief_scope("get_config_manager"):
|
||||
return config_manager
|
||||
# [/DEF:get_config_manager:Function]
|
||||
|
||||
plugin_dir = Path(__file__).parent / "plugins"
|
||||
|
||||
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
|
||||
from .core.logger import logger
|
||||
logger.info(f"PluginLoader initialized with directory: {plugin_dir}")
|
||||
logger.info(f"Available plugins: {[config.name for config in plugin_loader.get_all_plugin_configs()]}")
|
||||
|
||||
@@ -36,15 +44,37 @@ logger.info("TaskManager initialized")
|
||||
scheduler_service = SchedulerService(task_manager, config_manager)
|
||||
logger.info("SchedulerService initialized")
|
||||
|
||||
# [DEF:get_plugin_loader:Function]
|
||||
# @PURPOSE: Dependency injector for the PluginLoader.
|
||||
# @PRE: Global plugin_loader must be initialized.
|
||||
# @POST: Returns shared PluginLoader instance.
|
||||
# @RETURN: PluginLoader - The shared plugin loader instance.
|
||||
def get_plugin_loader() -> PluginLoader:
|
||||
"""Dependency injector for the PluginLoader."""
|
||||
return plugin_loader
|
||||
with belief_scope("get_plugin_loader"):
|
||||
return plugin_loader
|
||||
# [/DEF:get_plugin_loader:Function]
|
||||
|
||||
# [DEF:get_task_manager:Function]
|
||||
# @PURPOSE: Dependency injector for the TaskManager.
|
||||
# @PRE: Global task_manager must be initialized.
|
||||
# @POST: Returns shared TaskManager instance.
|
||||
# @RETURN: TaskManager - The shared task manager instance.
|
||||
def get_task_manager() -> TaskManager:
|
||||
"""Dependency injector for the TaskManager."""
|
||||
return task_manager
|
||||
with belief_scope("get_task_manager"):
|
||||
return task_manager
|
||||
# [/DEF:get_task_manager:Function]
|
||||
|
||||
# [DEF:get_scheduler_service:Function]
|
||||
# @PURPOSE: Dependency injector for the SchedulerService.
|
||||
# @PRE: Global scheduler_service must be initialized.
|
||||
# @POST: Returns shared SchedulerService instance.
|
||||
# @RETURN: SchedulerService - The shared scheduler service instance.
|
||||
def get_scheduler_service() -> SchedulerService:
|
||||
"""Dependency injector for the SchedulerService."""
|
||||
return scheduler_service
|
||||
with belief_scope("get_scheduler_service"):
|
||||
return scheduler_service
|
||||
# [/DEF:get_scheduler_service:Function]
|
||||
|
||||
# [/DEF:Dependencies:Module]
|
||||
@@ -11,6 +11,7 @@ from pathlib import Path
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from ..core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.exceptions import SupersetAPIError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
@@ -33,24 +34,58 @@ class BackupPlugin(PluginBase):
|
||||
"""
|
||||
|
||||
@property
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the backup plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - "superset-backup"
|
||||
def id(self) -> str:
|
||||
return "superset-backup"
|
||||
with belief_scope("id"):
|
||||
return "superset-backup"
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the backup plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
return "Superset Dashboard Backup"
|
||||
with belief_scope("name"):
|
||||
return "Superset Dashboard Backup"
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a description of the backup plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
return "Backs up all dashboards from a Superset instance."
|
||||
with belief_scope("description"):
|
||||
return "Backs up all dashboards from a Superset instance."
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the backup plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - "1.0.0"
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
with belief_scope("version"):
|
||||
return "1.0.0"
|
||||
# [/DEF:version:Function]
|
||||
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for backup plugin parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dictionary schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
config_manager = get_config_manager()
|
||||
envs = [e.name for e in config_manager.get_environments()]
|
||||
with belief_scope("get_schema"):
|
||||
config_manager = get_config_manager()
|
||||
envs = [e.name for e in config_manager.get_environments()]
|
||||
default_path = config_manager.get_config().settings.backup_path
|
||||
|
||||
return {
|
||||
@@ -71,79 +106,87 @@ class BackupPlugin(PluginBase):
|
||||
},
|
||||
"required": ["env", "backup_path"],
|
||||
}
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the dashboard backup logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Backup parameters (env, backup_path).
|
||||
# @PRE: Target environment must be configured. params must be a dictionary.
|
||||
# @POST: All dashboards are exported and archived.
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
config_manager = get_config_manager()
|
||||
env_id = params.get("environment_id")
|
||||
|
||||
# Resolve environment name if environment_id is provided
|
||||
if env_id:
|
||||
env_config = next((e for e in config_manager.get_environments() if e.id == env_id), None)
|
||||
if env_config:
|
||||
params["env"] = env_config.name
|
||||
|
||||
env = params.get("env")
|
||||
if not env:
|
||||
raise KeyError("env")
|
||||
|
||||
backup_path_str = params.get("backup_path") or config_manager.get_config().settings.backup_path
|
||||
backup_path = Path(backup_path_str)
|
||||
|
||||
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
|
||||
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
|
||||
|
||||
try:
|
||||
with belief_scope("execute"):
|
||||
config_manager = get_config_manager()
|
||||
if not config_manager.has_environments():
|
||||
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
|
||||
env_id = params.get("environment_id")
|
||||
|
||||
# Resolve environment name if environment_id is provided
|
||||
if env_id:
|
||||
env_config = next((e for e in config_manager.get_environments() if e.id == env_id), None)
|
||||
if env_config:
|
||||
params["env"] = env_config.name
|
||||
|
||||
env = params.get("env")
|
||||
if not env:
|
||||
raise KeyError("env")
|
||||
|
||||
backup_path_str = params.get("backup_path") or config_manager.get_config().settings.backup_path
|
||||
backup_path = Path(backup_path_str)
|
||||
|
||||
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
|
||||
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
|
||||
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
if not config_manager.has_environments():
|
||||
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
|
||||
|
||||
clients = setup_clients(logger, custom_envs=config_manager.get_environments())
|
||||
client = clients.get(env)
|
||||
|
||||
clients = setup_clients(logger, custom_envs=config_manager.get_environments())
|
||||
client = clients.get(env)
|
||||
|
||||
if not client:
|
||||
raise ValueError(f"Environment '{env}' not found in configuration.")
|
||||
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
|
||||
if not client:
|
||||
raise ValueError(f"Environment '{env}' not found in configuration.")
|
||||
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
|
||||
|
||||
if dashboard_count == 0:
|
||||
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
|
||||
return
|
||||
if dashboard_count == 0:
|
||||
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
|
||||
return
|
||||
|
||||
for db in dashboard_meta:
|
||||
dashboard_id = db.get('id')
|
||||
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
|
||||
if not dashboard_id:
|
||||
continue
|
||||
for db in dashboard_meta:
|
||||
dashboard_id = db.get('id')
|
||||
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
|
||||
if not dashboard_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
|
||||
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
|
||||
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
|
||||
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
|
||||
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
zip_content, filename = client.export_dashboard(dashboard_id)
|
||||
zip_content, filename = client.export_dashboard(dashboard_id)
|
||||
|
||||
save_and_unpack_dashboard(
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
)
|
||||
save_and_unpack_dashboard(
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
)
|
||||
|
||||
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
|
||||
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
|
||||
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
|
||||
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
|
||||
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
|
||||
|
||||
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
|
||||
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
|
||||
|
||||
except (RequestException, IOError, KeyError) as e:
|
||||
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
|
||||
raise e
|
||||
except (RequestException, IOError, KeyError) as e:
|
||||
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
|
||||
raise e
|
||||
# [/DEF:execute:Function]
|
||||
# [/DEF:BackupPlugin:Class]
|
||||
# [/DEF:BackupPlugin:Module]
|
||||
@@ -20,25 +20,57 @@ class DebugPlugin(PluginBase):
|
||||
"""
|
||||
|
||||
@property
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the debug plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - "system-debug"
|
||||
def id(self) -> str:
|
||||
return "system-debug"
|
||||
with belief_scope("id"):
|
||||
return "system-debug"
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the debug plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
return "System Debug"
|
||||
with belief_scope("name"):
|
||||
return "System Debug"
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a description of the debug plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
return "Run system diagnostics and debug Superset API responses."
|
||||
with belief_scope("description"):
|
||||
return "Run system diagnostics and debug Superset API responses."
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the debug plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - "1.0.0"
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
with belief_scope("version"):
|
||||
return "1.0.0"
|
||||
# [/DEF:version:Function]
|
||||
|
||||
# [DEF:DebugPlugin.get_schema:Function]
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for the debug plugin parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dictionary schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
return {
|
||||
with belief_scope("get_schema"):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"action": {
|
||||
@@ -70,12 +102,16 @@ class DebugPlugin(PluginBase):
|
||||
},
|
||||
"required": ["action"]
|
||||
}
|
||||
# [/DEF:DebugPlugin.get_schema:Function]
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:DebugPlugin.execute:Function]
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the debug logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Debug parameters.
|
||||
# @PRE: action must be provided in params.
|
||||
# @POST: Debug action is executed and results returned.
|
||||
# @RETURN: Dict[str, Any] - Execution results.
|
||||
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
with belief_scope("DebugPlugin.execute", f"params={params}"):
|
||||
with belief_scope("execute"):
|
||||
action = params.get("action")
|
||||
|
||||
if action == "test-db-api":
|
||||
@@ -84,16 +120,17 @@ class DebugPlugin(PluginBase):
|
||||
return await self._get_dataset_structure(params)
|
||||
else:
|
||||
raise ValueError(f"Unknown action: {action}")
|
||||
# [/DEF:DebugPlugin.execute:Function]
|
||||
# [/DEF:execute:Function]
|
||||
|
||||
# [DEF:DebugPlugin._test_db_api:Function]
|
||||
# [DEF:_test_db_api:Function]
|
||||
# @PURPOSE: Tests database API connectivity for source and target environments.
|
||||
# @PRE: source_env and target_env params exist.
|
||||
# @PRE: source_env and target_env params exist in params.
|
||||
# @POST: Returns DB counts for both envs.
|
||||
# @PARAM: params (Dict) - Plugin parameters.
|
||||
# @RETURN: Dict - Comparison results.
|
||||
async def _test_db_api(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
source_env_name = params.get("source_env")
|
||||
with belief_scope("_test_db_api"):
|
||||
source_env_name = params.get("source_env")
|
||||
target_env_name = params.get("target_env")
|
||||
|
||||
if not source_env_name or not target_env_name:
|
||||
@@ -117,16 +154,17 @@ class DebugPlugin(PluginBase):
|
||||
}
|
||||
|
||||
return results
|
||||
# [/DEF:DebugPlugin._test_db_api:Function]
|
||||
# [/DEF:_test_db_api:Function]
|
||||
|
||||
# [DEF:DebugPlugin._get_dataset_structure:Function]
|
||||
# [DEF:_get_dataset_structure:Function]
|
||||
# @PURPOSE: Retrieves the structure of a dataset.
|
||||
# @PRE: env and dataset_id params exist.
|
||||
# @PRE: env and dataset_id params exist in params.
|
||||
# @POST: Returns dataset JSON structure.
|
||||
# @PARAM: params (Dict) - Plugin parameters.
|
||||
# @RETURN: Dict - Dataset structure.
|
||||
async def _get_dataset_structure(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
env_name = params.get("env")
|
||||
with belief_scope("_get_dataset_structure"):
|
||||
env_name = params.get("env")
|
||||
dataset_id = params.get("dataset_id")
|
||||
|
||||
if not env_name or dataset_id is None:
|
||||
@@ -143,7 +181,7 @@ class DebugPlugin(PluginBase):
|
||||
|
||||
dataset_response = client.get_dataset(dataset_id)
|
||||
return dataset_response.get('result') or {}
|
||||
# [/DEF:DebugPlugin._get_dataset_structure:Function]
|
||||
# [/DEF:_get_dataset_structure:Function]
|
||||
|
||||
# [/DEF:DebugPlugin:Class]
|
||||
# [/DEF:DebugPluginModule:Module]
|
||||
@@ -24,25 +24,57 @@ class MapperPlugin(PluginBase):
|
||||
"""
|
||||
|
||||
@property
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the mapper plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - "dataset-mapper"
|
||||
def id(self) -> str:
|
||||
return "dataset-mapper"
|
||||
with belief_scope("id"):
|
||||
return "dataset-mapper"
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the mapper plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
return "Dataset Mapper"
|
||||
with belief_scope("name"):
|
||||
return "Dataset Mapper"
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a description of the mapper plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
return "Map dataset column verbose names using PostgreSQL comments or Excel files."
|
||||
with belief_scope("description"):
|
||||
return "Map dataset column verbose names using PostgreSQL comments or Excel files."
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the mapper plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - "1.0.0"
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
with belief_scope("version"):
|
||||
return "1.0.0"
|
||||
# [/DEF:version:Function]
|
||||
|
||||
# [DEF:MapperPlugin.get_schema:Function]
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for the mapper plugin parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dictionary schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
return {
|
||||
with belief_scope("get_schema"):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"env": {
|
||||
@@ -85,14 +117,16 @@ class MapperPlugin(PluginBase):
|
||||
},
|
||||
"required": ["env", "dataset_id", "source"]
|
||||
}
|
||||
# [/DEF:MapperPlugin.get_schema:Function]
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:MapperPlugin.execute:Function]
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the dataset mapping logic.
|
||||
# @PRE: Params contain valid 'env', 'dataset_id', and 'source'.
|
||||
# @PARAM: params (Dict[str, Any]) - Mapping parameters.
|
||||
# @PRE: Params contain valid 'env', 'dataset_id', and 'source'. params must be a dictionary.
|
||||
# @POST: Updates the dataset in Superset.
|
||||
# @RETURN: Dict[str, Any] - Execution status.
|
||||
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
with belief_scope("MapperPlugin.execute", f"params={params}"):
|
||||
with belief_scope("execute"):
|
||||
env_name = params.get("env")
|
||||
dataset_id = params.get("dataset_id")
|
||||
source = params.get("source")
|
||||
@@ -158,7 +192,7 @@ class MapperPlugin(PluginBase):
|
||||
except Exception as e:
|
||||
logger.error(f"[MapperPlugin.execute][Failure] Mapping failed: {e}")
|
||||
raise
|
||||
# [/DEF:MapperPlugin.execute:Function]
|
||||
# [/DEF:execute:Function]
|
||||
|
||||
# [/DEF:MapperPlugin:Class]
|
||||
# [/DEF:MapperPluginModule:Module]
|
||||
@@ -12,6 +12,7 @@ import zipfile
|
||||
import re
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from ..core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
|
||||
@@ -29,23 +30,57 @@ class MigrationPlugin(PluginBase):
|
||||
"""
|
||||
|
||||
@property
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the migration plugin.
|
||||
# @PRE: None.
|
||||
# @POST: Returns "superset-migration".
|
||||
# @RETURN: str - "superset-migration"
|
||||
def id(self) -> str:
|
||||
return "superset-migration"
|
||||
with belief_scope("id"):
|
||||
return "superset-migration"
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the migration plugin.
|
||||
# @PRE: None.
|
||||
# @POST: Returns the plugin name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
return "Superset Dashboard Migration"
|
||||
with belief_scope("name"):
|
||||
return "Superset Dashboard Migration"
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a description of the migration plugin.
|
||||
# @PRE: None.
|
||||
# @POST: Returns the plugin description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
return "Migrates dashboards between Superset environments."
|
||||
with belief_scope("description"):
|
||||
return "Migrates dashboards between Superset environments."
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the migration plugin.
|
||||
# @PRE: None.
|
||||
# @POST: Returns "1.0.0".
|
||||
# @RETURN: str - "1.0.0"
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
with belief_scope("version"):
|
||||
return "1.0.0"
|
||||
# [/DEF:version:Function]
|
||||
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for migration plugin parameters.
|
||||
# @PRE: Config manager is available.
|
||||
# @POST: Returns a valid JSON schema dictionary.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
config_manager = get_config_manager()
|
||||
with belief_scope("get_schema"):
|
||||
config_manager = get_config_manager()
|
||||
envs = [e.name for e in config_manager.get_environments()]
|
||||
|
||||
return {
|
||||
@@ -87,11 +122,18 @@ class MigrationPlugin(PluginBase):
|
||||
},
|
||||
"required": ["from_env", "to_env", "dashboard_regex"],
|
||||
}
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the dashboard migration logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Migration parameters.
|
||||
# @PRE: Source and target environments must be configured.
|
||||
# @POST: Selected dashboards are migrated.
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
source_env_id = params.get("source_env_id")
|
||||
target_env_id = params.get("target_env_id")
|
||||
selected_ids = params.get("selected_ids")
|
||||
with belief_scope("MigrationPlugin.execute"):
|
||||
source_env_id = params.get("source_env_id")
|
||||
target_env_id = params.get("target_env_id")
|
||||
selected_ids = params.get("selected_ids")
|
||||
|
||||
# Legacy support or alternative params
|
||||
from_env_name = params.get("from_env")
|
||||
@@ -109,29 +151,77 @@ class MigrationPlugin(PluginBase):
|
||||
tm = get_task_manager()
|
||||
|
||||
class TaskLoggerProxy(SupersetLogger):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the proxy logger.
|
||||
# @PRE: None.
|
||||
# @POST: Instance is initialized.
|
||||
def __init__(self):
|
||||
# Initialize parent with dummy values since we override methods
|
||||
super().__init__(console=False)
|
||||
with belief_scope("__init__"):
|
||||
# Initialize parent with dummy values since we override methods
|
||||
super().__init__(console=False)
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:debug:Function]
|
||||
# @PURPOSE: Logs a debug message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def debug(self, msg, *args, extra=None, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "DEBUG", msg, extra or {})
|
||||
with belief_scope("debug"):
|
||||
if task_id: tm._add_log(task_id, "DEBUG", msg, extra or {})
|
||||
# [/DEF:debug:Function]
|
||||
|
||||
# [DEF:info:Function]
|
||||
# @PURPOSE: Logs an info message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def info(self, msg, *args, extra=None, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "INFO", msg, extra or {})
|
||||
with belief_scope("info"):
|
||||
if task_id: tm._add_log(task_id, "INFO", msg, extra or {})
|
||||
# [/DEF:info:Function]
|
||||
|
||||
# [DEF:warning:Function]
|
||||
# @PURPOSE: Logs a warning message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def warning(self, msg, *args, extra=None, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "WARNING", msg, extra or {})
|
||||
with belief_scope("warning"):
|
||||
if task_id: tm._add_log(task_id, "WARNING", msg, extra or {})
|
||||
# [/DEF:warning:Function]
|
||||
|
||||
# [DEF:error:Function]
|
||||
# @PURPOSE: Logs an error message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def error(self, msg, *args, extra=None, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
|
||||
with belief_scope("error"):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
|
||||
# [/DEF:error:Function]
|
||||
|
||||
# [DEF:critical:Function]
|
||||
# @PURPOSE: Logs a critical message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def critical(self, msg, *args, extra=None, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
|
||||
with belief_scope("critical"):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
|
||||
# [/DEF:critical:Function]
|
||||
|
||||
# [DEF:exception:Function]
|
||||
# @PURPOSE: Logs an exception message to the task manager.
|
||||
# @PRE: msg is a string.
|
||||
# @POST: Log is added to task manager if task_id exists.
|
||||
def exception(self, msg, *args, **kwargs):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, {"exception": True})
|
||||
with belief_scope("exception"):
|
||||
if task_id: tm._add_log(task_id, "ERROR", msg, {"exception": True})
|
||||
# [/DEF:exception:Function]
|
||||
|
||||
logger = TaskLoggerProxy()
|
||||
logger.info(f"[MigrationPlugin][Entry] Starting migration task.")
|
||||
logger.info(f"[MigrationPlugin][Action] Params: {params}")
|
||||
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
with belief_scope("execute"):
|
||||
config_manager = get_config_manager()
|
||||
environments = config_manager.get_environments()
|
||||
|
||||
# Resolve environments
|
||||
@@ -289,12 +379,12 @@ class MigrationPlugin(PluginBase):
|
||||
continue
|
||||
|
||||
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
|
||||
# [/DEF:MigrationPlugin.execute:Action]
|
||||
|
||||
logger.info("[MigrationPlugin][Exit] Migration finished.")
|
||||
|
||||
logger.info("[MigrationPlugin][Exit] Migration finished.")
|
||||
except Exception as e:
|
||||
logger.critical(f"[MigrationPlugin][Failure] Fatal error during migration: {e}", exc_info=True)
|
||||
raise e
|
||||
# [/DEF:MigrationPlugin.execute:Action]
|
||||
# [/DEF:execute:Function]
|
||||
# [/DEF:MigrationPlugin:Class]
|
||||
# [/DEF:MigrationPlugin:Module]
|
||||
@@ -21,25 +21,57 @@ class SearchPlugin(PluginBase):
|
||||
"""
|
||||
|
||||
@property
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the search plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - "search-datasets"
|
||||
def id(self) -> str:
|
||||
return "search-datasets"
|
||||
with belief_scope("id"):
|
||||
return "search-datasets"
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the search plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
return "Search Datasets"
|
||||
with belief_scope("name"):
|
||||
return "Search Datasets"
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a description of the search plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
return "Search for text patterns across all datasets in a specific environment."
|
||||
with belief_scope("description"):
|
||||
return "Search for text patterns across all datasets in a specific environment."
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the search plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - "1.0.0"
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
with belief_scope("version"):
|
||||
return "1.0.0"
|
||||
# [/DEF:version:Function]
|
||||
|
||||
# [DEF:SearchPlugin.get_schema:Function]
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for the search plugin parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dictionary schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
return {
|
||||
with belief_scope("get_schema"):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"env": {
|
||||
@@ -55,12 +87,14 @@ class SearchPlugin(PluginBase):
|
||||
},
|
||||
"required": ["env", "query"]
|
||||
}
|
||||
# [/DEF:SearchPlugin.get_schema:Function]
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:SearchPlugin.execute:Function]
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the dataset search logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Search parameters.
|
||||
# @PRE: Params contain valid 'env' and 'query'.
|
||||
# @POST: Returns a dictionary with count and results list.
|
||||
# @RETURN: Dict[str, Any] - Search results.
|
||||
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
with belief_scope("SearchPlugin.execute", f"params={params}"):
|
||||
env_name = params.get("env")
|
||||
@@ -124,38 +158,45 @@ class SearchPlugin(PluginBase):
|
||||
except Exception as e:
|
||||
logger.error(f"[SearchPlugin.execute][Failure] Error during search: {e}")
|
||||
raise
|
||||
# [/DEF:SearchPlugin.execute:Function]
|
||||
# [/DEF:execute:Function]
|
||||
|
||||
# [DEF:SearchPlugin._get_context:Function]
|
||||
# [DEF:_get_context:Function]
|
||||
# @PURPOSE: Extracts a small context around the match for display.
|
||||
# @PARAM: text (str) - The full text to extract context from.
|
||||
# @PARAM: match_text (str) - The matched text pattern.
|
||||
# @PARAM: context_lines (int) - Number of lines of context to include.
|
||||
# @PRE: text and match_text must be strings.
|
||||
# @POST: Returns context string.
|
||||
# @RETURN: str - Extracted context.
|
||||
def _get_context(self, text: str, match_text: str, context_lines: int = 1) -> str:
|
||||
"""
|
||||
Extracts a small context around the match for display.
|
||||
"""
|
||||
if not match_text:
|
||||
return text[:100] + "..." if len(text) > 100 else text
|
||||
with belief_scope("_get_context"):
|
||||
if not match_text:
|
||||
return text[:100] + "..." if len(text) > 100 else text
|
||||
|
||||
lines = text.splitlines()
|
||||
lines = text.splitlines()
|
||||
match_line_index = -1
|
||||
for i, line in enumerate(lines):
|
||||
if match_text in line:
|
||||
match_line_index = i
|
||||
break
|
||||
|
||||
if match_line_index != -1:
|
||||
start = max(0, match_line_index - context_lines)
|
||||
end = min(len(lines), match_line_index + context_lines + 1)
|
||||
context = []
|
||||
for i in range(start, end):
|
||||
line_content = lines[i]
|
||||
if i == match_line_index:
|
||||
context.append(f"==> {line_content}")
|
||||
else:
|
||||
context.append(f" {line_content}")
|
||||
return "\n".join(context)
|
||||
|
||||
return text[:100] + "..." if len(text) > 100 else text
|
||||
# [/DEF:SearchPlugin._get_context:Function]
|
||||
if match_line_index != -1:
|
||||
start = max(0, match_line_index - context_lines)
|
||||
end = min(len(lines), match_line_index + context_lines + 1)
|
||||
context = []
|
||||
for i in range(start, end):
|
||||
line_content = lines[i]
|
||||
if i == match_line_index:
|
||||
context.append(f"==> {line_content}")
|
||||
else:
|
||||
context.append(f" {line_content}")
|
||||
return "\n".join(context)
|
||||
|
||||
return text[:100] + "..." if len(text) > 100 else text
|
||||
# [/DEF:_get_context:Function]
|
||||
|
||||
# [/DEF:SearchPlugin:Class]
|
||||
# [/DEF:SearchPluginModule:Module]
|
||||
@@ -10,6 +10,7 @@
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from typing import List, Dict
|
||||
from backend.src.core.logger import belief_scope
|
||||
from backend.src.core.superset_client import SupersetClient
|
||||
from backend.src.core.utils.matching import suggest_mappings
|
||||
from superset_tool.models import SupersetConfig
|
||||
@@ -19,50 +20,62 @@ from superset_tool.models import SupersetConfig
|
||||
# @PURPOSE: Service for handling database mapping logic.
|
||||
class MappingService:
|
||||
|
||||
# [DEF:MappingService.__init__:Function]
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the mapping service with a config manager.
|
||||
# @PRE: config_manager is provided.
|
||||
# @PARAM: config_manager (ConfigManager) - The configuration manager.
|
||||
# @POST: Service is initialized.
|
||||
def __init__(self, config_manager):
|
||||
self.config_manager = config_manager
|
||||
# [/DEF:MappingService.__init__:Function]
|
||||
with belief_scope("MappingService.__init__"):
|
||||
self.config_manager = config_manager
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:MappingService._get_client:Function]
|
||||
# [DEF:_get_client:Function]
|
||||
# @PURPOSE: Helper to get an initialized SupersetClient for an environment.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PRE: environment must exist in config.
|
||||
# @POST: Returns an initialized SupersetClient.
|
||||
# @RETURN: SupersetClient - Initialized client.
|
||||
def _get_client(self, env_id: str) -> SupersetClient:
|
||||
envs = self.config_manager.get_environments()
|
||||
env = next((e for e in envs if e.id == env_id), None)
|
||||
if not env:
|
||||
raise ValueError(f"Environment {env_id} not found")
|
||||
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db",
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "false"
|
||||
}
|
||||
)
|
||||
return SupersetClient(superset_config)
|
||||
# [/DEF:MappingService._get_client:Function]
|
||||
with belief_scope("MappingService._get_client", f"env_id={env_id}"):
|
||||
envs = self.config_manager.get_environments()
|
||||
env = next((e for e in envs if e.id == env_id), None)
|
||||
if not env:
|
||||
raise ValueError(f"Environment {env_id} not found")
|
||||
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db",
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "false"
|
||||
}
|
||||
)
|
||||
return SupersetClient(superset_config)
|
||||
# [/DEF:_get_client:Function]
|
||||
|
||||
# [DEF:MappingService.get_suggestions:Function]
|
||||
# [DEF:get_suggestions:Function]
|
||||
# @PURPOSE: Fetches databases from both environments and returns fuzzy matching suggestions.
|
||||
# @PARAM: source_env_id (str) - Source environment ID.
|
||||
# @PARAM: target_env_id (str) - Target environment ID.
|
||||
# @PRE: Both environments must be accessible.
|
||||
# @POST: Returns fuzzy-matched database suggestions.
|
||||
# @RETURN: List[Dict] - Suggested mappings.
|
||||
async def get_suggestions(self, source_env_id: str, target_env_id: str) -> List[Dict]:
|
||||
"""
|
||||
Get suggested mappings between two environments.
|
||||
"""
|
||||
source_client = self._get_client(source_env_id)
|
||||
target_client = self._get_client(target_env_id)
|
||||
|
||||
source_dbs = source_client.get_databases_summary()
|
||||
target_dbs = target_client.get_databases_summary()
|
||||
|
||||
return suggest_mappings(source_dbs, target_dbs)
|
||||
# [/DEF:MappingService.get_suggestions:Function]
|
||||
with belief_scope("MappingService.get_suggestions", f"source={source_env_id}, target={target_env_id}"):
|
||||
"""
|
||||
Get suggested mappings between two environments.
|
||||
"""
|
||||
source_client = self._get_client(source_env_id)
|
||||
target_client = self._get_client(target_env_id)
|
||||
|
||||
source_dbs = source_client.get_databases_summary()
|
||||
target_dbs = target_client.get_databases_summary()
|
||||
|
||||
return suggest_mappings(source_dbs, target_dbs)
|
||||
# [/DEF:get_suggestions:Function]
|
||||
|
||||
# [/DEF:MappingService:Class]
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@ import pytest
|
||||
from backend.src.core.logger import belief_scope, logger
|
||||
|
||||
|
||||
# [DEF:test_belief_scope_logs_entry_action_exit:Function]
|
||||
# @PURPOSE: Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs.
|
||||
# @PRE: belief_scope is available. caplog fixture is used.
|
||||
# @POST: Logs are verified to contain Entry, Action, and Exit tags.
|
||||
def test_belief_scope_logs_entry_action_exit(caplog):
|
||||
"""Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs."""
|
||||
caplog.set_level("INFO")
|
||||
@@ -15,8 +19,13 @@ def test_belief_scope_logs_entry_action_exit(caplog):
|
||||
assert any("[TestFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
|
||||
assert any("[TestFunction][Action] Doing something important" in msg for msg in log_messages), "Action log not found"
|
||||
assert any("[TestFunction][Exit]" in msg for msg in log_messages), "Exit log not found"
|
||||
# [/DEF:test_belief_scope_logs_entry_action_exit:Function]
|
||||
|
||||
|
||||
# [DEF:test_belief_scope_error_handling:Function]
|
||||
# @PURPOSE: Test that belief_scope logs Coherence:Failed on exception.
|
||||
# @PRE: belief_scope is available. caplog fixture is used.
|
||||
# @POST: Logs are verified to contain Coherence:Failed tag.
|
||||
def test_belief_scope_error_handling(caplog):
|
||||
"""Test that belief_scope logs Coherence:Failed on exception."""
|
||||
caplog.set_level("INFO")
|
||||
@@ -30,8 +39,13 @@ def test_belief_scope_error_handling(caplog):
|
||||
assert any("[FailingFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
|
||||
assert any("[FailingFunction][Coherence:Failed]" in msg for msg in log_messages), "Failed coherence log not found"
|
||||
# Exit should not be logged on failure
|
||||
# [/DEF:test_belief_scope_error_handling:Function]
|
||||
|
||||
|
||||
# [DEF:test_belief_scope_success_coherence:Function]
|
||||
# @PURPOSE: Test that belief_scope logs Coherence:OK on success.
|
||||
# @PRE: belief_scope is available. caplog fixture is used.
|
||||
# @POST: Logs are verified to contain Coherence:OK tag.
|
||||
def test_belief_scope_success_coherence(caplog):
|
||||
"""Test that belief_scope logs Coherence:OK on success."""
|
||||
caplog.set_level("INFO")
|
||||
@@ -41,4 +55,5 @@ def test_belief_scope_success_coherence(caplog):
|
||||
|
||||
log_messages = [record.message for record in caplog.records]
|
||||
|
||||
assert any("[SuccessFunction][Coherence:OK]" in msg for msg in log_messages), "Success coherence log not found"
|
||||
assert any("[SuccessFunction][Coherence:OK]" in msg for msg in log_messages), "Success coherence log not found"
|
||||
# [/DEF:test_belief_scope_success_coherence:Function]
|
||||
@@ -1,49 +1,62 @@
|
||||
import pytest
|
||||
from superset_tool.models import SupersetConfig
|
||||
from superset_tool.utils.logger import belief_scope
|
||||
|
||||
# [DEF:test_superset_config_url_normalization:Function]
|
||||
# @PURPOSE: Tests that SupersetConfig correctly normalizes the base URL.
|
||||
# @PRE: SupersetConfig class is available.
|
||||
# @POST: URL normalization is verified.
|
||||
def test_superset_config_url_normalization():
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
# Test with /api/v1 already present
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088/api/v1",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test without /api/v1
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test with trailing slash
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088/",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
def test_superset_config_invalid_url():
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError, match="Must start with http:// or https://"):
|
||||
SupersetConfig(
|
||||
with belief_scope("test_superset_config_url_normalization"):
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
# Test with /api/v1 already present
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="localhost:8088",
|
||||
base_url="http://localhost:8088/api/v1",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test without /api/v1
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test with trailing slash
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088/",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
# [/DEF:test_superset_config_url_normalization:Function]
|
||||
|
||||
# [DEF:test_superset_config_invalid_url:Function]
|
||||
# @PURPOSE: Tests that SupersetConfig raises ValueError for invalid URLs.
|
||||
# @PRE: SupersetConfig class is available.
|
||||
# @POST: ValueError is raised for invalid URLs.
|
||||
def test_superset_config_invalid_url():
|
||||
with belief_scope("test_superset_config_invalid_url"):
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError, match="Must start with http:// or https://"):
|
||||
SupersetConfig(
|
||||
env="dev",
|
||||
base_url="localhost:8088",
|
||||
auth=auth
|
||||
)
|
||||
# [/DEF:test_superset_config_invalid_url:Function]
|
||||
|
||||
Reference in New Issue
Block a user