mappings+migrate

This commit is contained in:
2025-12-27 10:16:41 +03:00
parent 3d75a21127
commit 6962a78112
19 changed files with 925 additions and 143 deletions

View File

@@ -0,0 +1,71 @@
# [DEF:backend.src.api.routes.migration:Module]
# @SEMANTICS: api, migration, dashboards
# @PURPOSE: API endpoints for migration operations.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
from fastapi import APIRouter, Depends, HTTPException
from typing import List, Dict
from backend.src.dependencies import get_config_manager, get_task_manager
from backend.src.models.dashboard import DashboardMetadata, DashboardSelection
from backend.src.core.superset_client import SupersetClient
from superset_tool.models import SupersetConfig
router = APIRouter(prefix="/api", tags=["migration"])
# [DEF:get_dashboards:Function]
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
# @PRE: Environment ID must be valid.
# @POST: Returns a list of dashboard metadata.
# @PARAM: env_id (str) - The ID of the environment to fetch from.
# @RETURN: List[DashboardMetadata]
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)):
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={'provider': 'db', 'username': env.username, 'password': env.password, 'refresh': False},
verify_ssl=True,
timeout=30
)
client = SupersetClient(config)
dashboards = client.get_dashboards_summary()
return dashboards
# [/DEF:get_dashboards]
# [DEF:execute_migration:Function]
# @PURPOSE: Execute the migration of selected dashboards.
# @PRE: Selection must be valid and environments must exist.
# @POST: Starts the migration task and returns the task ID.
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
# @RETURN: Dict - {"task_id": str, "message": str}
@router.post("/migration/execute")
async def execute_migration(selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager)):
# Validate environments exist
environments = config_manager.get_environments()
env_ids = {e.id for e in environments}
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
raise HTTPException(status_code=400, detail="Invalid source or target environment")
# Create migration task with debug logging
from ...core.logger import logger
logger.info(f"Creating migration task with selection: {selection.dict()}")
logger.info(f"Available environments: {env_ids}")
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
try:
task = await task_manager.create_task("superset-migration", selection.dict())
logger.info(f"Task created successfully: {task.id}")
return {"task_id": task.id, "message": "Migration initiated"}
except Exception as e:
logger.error(f"Task creation failed: {e}")
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
# [/DEF:execute_migration]
# [/DEF:backend.src.api.routes.migration]

View File

@@ -20,7 +20,7 @@ import os
from .dependencies import get_task_manager from .dependencies import get_task_manager
from .core.logger import logger from .core.logger import logger
from .api.routes import plugins, tasks, settings, environments, mappings from .api.routes import plugins, tasks, settings, environments, mappings, migration
from .core.database import init_db from .core.database import init_db
# Initialize database # Initialize database
@@ -51,6 +51,7 @@ app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
app.include_router(settings.router, prefix="/api/settings", tags=["Settings"]) app.include_router(settings.router, prefix="/api/settings", tags=["Settings"])
app.include_router(environments.router) app.include_router(environments.router)
app.include_router(mappings.router) app.include_router(mappings.router)
app.include_router(migration.router)
# [DEF:WebSocketEndpoint:Endpoint] # [DEF:WebSocketEndpoint:Endpoint]
# @SEMANTICS: websocket, logs, streaming, real-time # @SEMANTICS: websocket, logs, streaming, real-time

View File

@@ -15,6 +15,8 @@ import shutil
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import Dict from typing import Dict
from .logger import logger, belief_scope
import yaml
# [/SECTION] # [/SECTION]
# [DEF:MigrationEngine:Class] # [DEF:MigrationEngine:Class]
@@ -26,28 +28,42 @@ class MigrationEngine:
# @PARAM: zip_path (str) - Path to the source ZIP file. # @PARAM: zip_path (str) - Path to the source ZIP file.
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved. # @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID. # @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
# @RETURN: bool - True if successful. # @RETURN: bool - True if successful.
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str]) -> bool: def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
""" """
Transform a Superset export ZIP by replacing database UUIDs. Transform a Superset export ZIP by replacing database UUIDs.
""" """
with belief_scope("MigrationEngine.transform_zip"):
with tempfile.TemporaryDirectory() as temp_dir_str: with tempfile.TemporaryDirectory() as temp_dir_str:
temp_dir = Path(temp_dir_str) temp_dir = Path(temp_dir_str)
try: try:
# 1. Extract # 1. Extract
logger.info(f"[MigrationEngine.transform_zip][Action] Extracting ZIP: {zip_path}")
with zipfile.ZipFile(zip_path, 'r') as zf: with zipfile.ZipFile(zip_path, 'r') as zf:
zf.extractall(temp_dir) zf.extractall(temp_dir)
# 2. Transform YAMLs # 2. Transform YAMLs
# Datasets are usually in datasets/*.yaml # Datasets are usually in datasets/*.yaml
dataset_files = list(temp_dir.glob("**/datasets/*.yaml")) dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
dataset_files = list(set(dataset_files))
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dataset_files)} dataset files.")
for ds_file in dataset_files: for ds_file in dataset_files:
logger.info(f"[MigrationEngine.transform_zip][Action] Transforming dataset: {ds_file}")
self._transform_yaml(ds_file, db_mapping) self._transform_yaml(ds_file, db_mapping)
# 3. Re-package # 3. Re-package
logger.info(f"[MigrationEngine.transform_zip][Action] Re-packaging ZIP to: {output_path} (strip_databases={strip_databases})")
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf: with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
for root, dirs, files in os.walk(temp_dir): for root, dirs, files in os.walk(temp_dir):
rel_root = Path(root).relative_to(temp_dir)
if strip_databases and "databases" in rel_root.parts:
logger.info(f"[MigrationEngine.transform_zip][Action] Skipping file in databases directory: {rel_root}")
continue
for file in files: for file in files:
file_path = Path(root) / file file_path = Path(root) / file
arcname = file_path.relative_to(temp_dir) arcname = file_path.relative_to(temp_dir)
@@ -55,7 +71,7 @@ class MigrationEngine:
return True return True
except Exception as e: except Exception as e:
print(f"Error transforming ZIP: {e}") logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
return False return False
# [DEF:MigrationEngine._transform_yaml:Function] # [DEF:MigrationEngine._transform_yaml:Function]

View File

@@ -47,12 +47,17 @@ class PluginLoader:
Loads a single Python module and extracts PluginBase subclasses. Loads a single Python module and extracts PluginBase subclasses.
""" """
# Try to determine the correct package prefix based on how the app is running # Try to determine the correct package prefix based on how the app is running
if "backend.src" in __name__: # For standalone execution, we need to handle the import differently
if __name__ == "__main__" or "test" in __name__:
# When running as standalone or in tests, use relative import
package_name = f"plugins.{module_name}"
elif "backend.src" in __name__:
package_prefix = "backend.src.plugins" package_prefix = "backend.src.plugins"
package_name = f"{package_prefix}.{module_name}"
else: else:
package_prefix = "src.plugins" package_prefix = "src.plugins"
package_name = f"{package_prefix}.{module_name}" package_name = f"{package_prefix}.{module_name}"
# print(f"DEBUG: Loading plugin {module_name} as {package_name}") # print(f"DEBUG: Loading plugin {module_name} as {package_name}")
spec = importlib.util.spec_from_file_location(package_name, file_path) spec = importlib.util.spec_from_file_location(package_name, file_path)
if spec is None or spec.loader is None: if spec is None or spec.loader is None:
@@ -106,9 +111,11 @@ class PluginLoader:
# validate(instance={}, schema=schema) # validate(instance={}, schema=schema)
self._plugins[plugin_id] = plugin_instance self._plugins[plugin_id] = plugin_instance
self._plugin_configs[plugin_id] = plugin_config self._plugin_configs[plugin_id] = plugin_config
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging from ..core.logger import logger
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
except Exception as e: except Exception as e:
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]: def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:

View File

@@ -52,6 +52,32 @@ class SupersetClient(BaseSupersetClient):
return databases[0] if databases else None return databases[0] if databases else None
# [/DEF:SupersetClient.get_database_by_uuid] # [/DEF:SupersetClient.get_database_by_uuid]
# [DEF:SupersetClient.get_dashboards_summary:Function]
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
# @POST: Returns a list of dashboard dictionaries.
# @RETURN: List[Dict]
def get_dashboards_summary(self) -> List[Dict]:
"""
Fetches dashboard metadata optimized for the grid.
Returns a list of dictionaries mapped to DashboardMetadata fields.
"""
query = {
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
}
_, dashboards = self.get_dashboards(query=query)
# Map fields to DashboardMetadata schema
result = []
for dash in dashboards:
result.append({
"id": dash.get("id"),
"title": dash.get("dashboard_title"),
"last_modified": dash.get("changed_on_utc"),
"status": "published" if dash.get("published") else "draft"
})
return result
# [/DEF:SupersetClient.get_dashboards_summary]
# [/DEF:SupersetClient] # [/DEF:SupersetClient]
# [/DEF:backend.src.core.superset_client] # [/DEF:backend.src.core.superset_client]

View File

@@ -64,6 +64,9 @@ class TaskManager:
self.tasks: Dict[str, Task] = {} self.tasks: Dict[str, Task] = {}
self.subscribers: Dict[str, List[asyncio.Queue]] = {} self.subscribers: Dict[str, List[asyncio.Queue]] = {}
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
try:
self.loop = asyncio.get_running_loop()
except RuntimeError:
self.loop = asyncio.get_event_loop() self.loop = asyncio.get_event_loop()
self.task_futures: Dict[str, asyncio.Future] = {} self.task_futures: Dict[str, asyncio.Future] = {}
# [/DEF] # [/DEF]
@@ -72,17 +75,25 @@ class TaskManager:
""" """
Creates and queues a new task for execution. Creates and queues a new task for execution.
""" """
from ..core.logger import logger
logger.info(f"TaskManager: Creating task for plugin '{plugin_id}' with params: {params}")
if not self.plugin_loader.has_plugin(plugin_id): if not self.plugin_loader.has_plugin(plugin_id):
logger.error(f"TaskManager: Plugin with ID '{plugin_id}' not found.")
raise ValueError(f"Plugin with ID '{plugin_id}' not found.") raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id) plugin = self.plugin_loader.get_plugin(plugin_id)
logger.info(f"TaskManager: Found plugin '{plugin.name}' for task creation")
# Validate params against plugin schema (this will be done at a higher level, e.g., API route) # Validate params against plugin schema (this will be done at a higher level, e.g., API route)
# For now, a basic check # For now, a basic check
if not isinstance(params, dict): if not isinstance(params, dict):
logger.error("TaskManager: Task parameters must be a dictionary.")
raise ValueError("Task parameters must be a dictionary.") raise ValueError("Task parameters must be a dictionary.")
task = Task(plugin_id=plugin_id, params=params, user_id=user_id) task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
self.tasks[task.id] = task self.tasks[task.id] = task
logger.info(f"TaskManager: Task {task.id} created and scheduled for execution")
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task return task
@@ -90,9 +101,11 @@ class TaskManager:
""" """
Internal method to execute a task. Internal method to execute a task.
""" """
from ..core.logger import logger
task = self.tasks[task_id] task = self.tasks[task_id]
plugin = self.plugin_loader.get_plugin(task.plugin_id) plugin = self.plugin_loader.get_plugin(task.plugin_id)
logger.info(f"TaskManager: Starting execution of task {task_id} for plugin '{plugin.name}'")
task.status = TaskStatus.RUNNING task.status = TaskStatus.RUNNING
task.started_at = datetime.utcnow() task.started_at = datetime.utcnow()
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'") self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
@@ -103,17 +116,27 @@ class TaskManager:
# If the plugin's execute method is already async, this can be simplified. # If the plugin's execute method is already async, this can be simplified.
# Pass task_id to plugin so it can signal pause # Pass task_id to plugin so it can signal pause
params = {**task.params, "_task_id": task_id} params = {**task.params, "_task_id": task_id}
logger.info(f"TaskManager: Executing plugin '{plugin.name}' with params: {params}")
if asyncio.iscoroutinefunction(plugin.execute):
logger.info(f"TaskManager: Executing async plugin '{plugin.name}'")
await plugin.execute(params)
else:
logger.info(f"TaskManager: Executing sync plugin '{plugin.name}' in executor")
await self.loop.run_in_executor( await self.loop.run_in_executor(
self.executor, self.executor,
lambda: asyncio.run(plugin.execute(params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(params) plugin.execute,
params
) )
logger.info(f"TaskManager: Task {task_id} completed successfully for plugin '{plugin.name}'")
task.status = TaskStatus.SUCCESS task.status = TaskStatus.SUCCESS
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'") self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
except Exception as e: except Exception as e:
logger.error(f"TaskManager: Task {task_id} failed for plugin '{plugin.name}': {e}")
task.status = TaskStatus.FAILED task.status = TaskStatus.FAILED
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__}) self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
finally: finally:
task.finished_at = datetime.utcnow() task.finished_at = datetime.utcnow()
logger.info(f"TaskManager: Task {task_id} execution finished with status: {task.status}")
# In a real system, you might notify clients via WebSocket here # In a real system, you might notify clients via WebSocket here
async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]): async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]):

View File

@@ -21,7 +21,12 @@ def get_config_manager() -> ConfigManager:
plugin_dir = Path(__file__).parent / "plugins" plugin_dir = Path(__file__).parent / "plugins"
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir)) plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
from .core.logger import logger
logger.info(f"PluginLoader initialized with directory: {plugin_dir}")
logger.info(f"Available plugins: {[config.name for config in plugin_loader.get_all_plugin_configs()]}")
task_manager = TaskManager(plugin_loader) task_manager = TaskManager(plugin_loader)
logger.info("TaskManager initialized")
def get_plugin_loader() -> PluginLoader: def get_plugin_loader() -> PluginLoader:
"""Dependency injector for the PluginLoader.""" """Dependency injector for the PluginLoader."""

View File

@@ -0,0 +1,27 @@
# [DEF:backend.src.models.dashboard:Module]
# @SEMANTICS: dashboard, model, metadata, migration
# @PURPOSE: Defines data models for dashboard metadata and selection.
# @LAYER: Model
# @RELATION: USED_BY -> backend.src.api.routes.migration
from pydantic import BaseModel
from typing import List
# [DEF:DashboardMetadata:Class]
# @PURPOSE: Represents a dashboard available for migration.
class DashboardMetadata(BaseModel):
id: int
title: str
last_modified: str
status: str
# [/DEF:DashboardMetadata]
# [DEF:DashboardSelection:Class]
# @PURPOSE: Represents the user's selection of dashboards to migrate.
class DashboardSelection(BaseModel):
selected_ids: List[int]
source_env_id: str
target_env_id: str
# [/DEF:DashboardSelection]
# [/DEF:backend.src.models.dashboard]

View File

@@ -87,34 +87,72 @@ class MigrationPlugin(PluginBase):
} }
async def execute(self, params: Dict[str, Any]): async def execute(self, params: Dict[str, Any]):
from_env = params["from_env"] source_env_id = params.get("source_env_id")
to_env = params["to_env"] target_env_id = params.get("target_env_id")
dashboard_regex = params["dashboard_regex"] selected_ids = params.get("selected_ids")
# Legacy support or alternative params
from_env_name = params.get("from_env")
to_env_name = params.get("to_env")
dashboard_regex = params.get("dashboard_regex")
replace_db_config = params.get("replace_db_config", False) replace_db_config = params.get("replace_db_config", False)
from_db_id = params.get("from_db_id") from_db_id = params.get("from_db_id")
to_db_id = params.get("to_db_id") to_db_id = params.get("to_db_id")
logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True) logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True)
logger.info(f"[MigrationPlugin][Entry] Starting migration from {from_env} to {to_env}.") logger.info(f"[MigrationPlugin][Entry] Starting migration task.")
logger.info(f"[MigrationPlugin][Action] Params: {params}")
try: try:
config_manager = get_config_manager() config_manager = get_config_manager()
all_clients = setup_clients(logger, custom_envs=config_manager.get_environments()) environments = config_manager.get_environments()
from_c = all_clients.get(from_env)
to_c = all_clients.get(to_env) # Resolve environments
src_env = None
tgt_env = None
if source_env_id:
src_env = next((e for e in environments if e.id == source_env_id), None)
elif from_env_name:
src_env = next((e for e in environments if e.name == from_env_name), None)
if target_env_id:
tgt_env = next((e for e in environments if e.id == target_env_id), None)
elif to_env_name:
tgt_env = next((e for e in environments if e.name == to_env_name), None)
if not src_env or not tgt_env:
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
from_env_name = src_env.name
to_env_name = tgt_env.name
logger.info(f"[MigrationPlugin][State] Resolved environments: {from_env_name} -> {to_env_name}")
all_clients = setup_clients(logger, custom_envs=environments)
from_c = all_clients.get(from_env_name)
to_c = all_clients.get(to_env_name)
if not from_c or not to_c: if not from_c or not to_c:
raise ValueError(f"One or both environments ('{from_env}', '{to_env}') not found in configuration.") raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
_, all_dashboards = from_c.get_dashboards() _, all_dashboards = from_c.get_dashboards()
dashboards_to_migrate = []
if selected_ids:
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
elif dashboard_regex:
regex_str = str(dashboard_regex) regex_str = str(dashboard_regex)
dashboards_to_migrate = [ dashboards_to_migrate = [
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE) d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
] ]
else:
logger.warning("[MigrationPlugin][State] No selection criteria provided (selected_ids or dashboard_regex).")
return
if not dashboards_to_migrate: if not dashboards_to_migrate:
logger.warning("[MigrationPlugin][State] No dashboards found matching the regex.") logger.warning("[MigrationPlugin][State] No dashboards found matching criteria.")
return return
# Fetch mappings from database # Fetch mappings from database
@@ -123,8 +161,8 @@ class MigrationPlugin(PluginBase):
db = SessionLocal() db = SessionLocal()
try: try:
# Find environment IDs by name # Find environment IDs by name
src_env = db.query(Environment).filter(Environment.name == from_env).first() src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env = db.query(Environment).filter(Environment.name == to_env).first() tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
if src_env and tgt_env: if src_env and tgt_env:
mappings = db.query(DatabaseMapping).filter( mappings = db.query(DatabaseMapping).filter(
@@ -144,19 +182,12 @@ class MigrationPlugin(PluginBase):
try: try:
exported_content, _ = from_c.export_dashboard(dash_id) exported_content, _ = from_c.export_dashboard(dash_id)
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path: with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path:
if not replace_db_config: # Always transform to strip databases to avoid password errors
to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
else:
# Check for missing mappings before transformation
# This is a simplified check, in reality we'd check all YAMLs
# For US3, we'll just use the engine and handle missing ones there
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip: with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
# If we have missing mappings, we might need to pause success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
# For now, let's assume the engine can tell us what's missing
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping)
if not success: if not success and replace_db_config:
# Signal missing mapping and wait # Signal missing mapping and wait (only if we care about mappings)
task_id = params.get("_task_id") task_id = params.get("_task_id")
if task_id: if task_id:
from ..dependencies import get_task_manager from ..dependencies import get_task_manager
@@ -169,8 +200,8 @@ class MigrationPlugin(PluginBase):
# (Mappings would be updated in task.params by resolve_task) # (Mappings would be updated in task.params by resolve_task)
db = SessionLocal() db = SessionLocal()
try: try:
src_env = db.query(Environment).filter(Environment.name == from_env).first() src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env = db.query(Environment).filter(Environment.name == to_env).first() tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
mappings = db.query(DatabaseMapping).filter( mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env.id, DatabaseMapping.source_env_id == src_env.id,
DatabaseMapping.target_env_id == tgt_env.id DatabaseMapping.target_env_id == tgt_env.id
@@ -178,7 +209,7 @@ class MigrationPlugin(PluginBase):
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings} db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
finally: finally:
db.close() db.close()
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping) success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
if success: if success:
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug) to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)

View File

@@ -24,7 +24,7 @@ export const options = {
app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n", app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n",
error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n" error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n"
}, },
version_hash: "n7gbte" version_hash: "oj9twc"
}; };
export async function get_hooks() { export async function get_hooks() {

View File

@@ -0,0 +1,205 @@
<!-- [DEF:DashboardGrid:Component] -->
<!--
@SEMANTICS: dashboard, grid, selection, pagination
@PURPOSE: Displays a grid of dashboards with selection and pagination.
@LAYER: Component
@RELATION: USED_BY -> frontend/src/routes/migration/+page.svelte
@INVARIANT: Selected IDs must be a subset of available dashboards.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { createEventDispatcher } from 'svelte';
import type { DashboardMetadata } from '../types/dashboard';
// [/SECTION]
// [SECTION: PROPS]
export let dashboards: DashboardMetadata[] = [];
export let selectedIds: number[] = [];
// [/SECTION]
// [SECTION: STATE]
let filterText = "";
let currentPage = 0;
let pageSize = 20;
let sortColumn: keyof DashboardMetadata = "title";
let sortDirection: "asc" | "desc" = "asc";
// [/SECTION]
// [SECTION: DERIVED]
$: filteredDashboards = dashboards.filter(d =>
d.title.toLowerCase().includes(filterText.toLowerCase())
);
$: sortedDashboards = [...filteredDashboards].sort((a, b) => {
let aVal = a[sortColumn];
let bVal = b[sortColumn];
if (sortColumn === "id") {
aVal = Number(aVal);
bVal = Number(bVal);
}
if (aVal < bVal) return sortDirection === "asc" ? -1 : 1;
if (aVal > bVal) return sortDirection === "asc" ? 1 : -1;
return 0;
});
$: paginatedDashboards = sortedDashboards.slice(
currentPage * pageSize,
(currentPage + 1) * pageSize
);
$: totalPages = Math.ceil(sortedDashboards.length / pageSize);
$: allSelected = paginatedDashboards.length > 0 && paginatedDashboards.every(d => selectedIds.includes(d.id));
$: someSelected = paginatedDashboards.some(d => selectedIds.includes(d.id));
// [/SECTION]
// [SECTION: EVENTS]
const dispatch = createEventDispatcher<{ selectionChanged: number[] }>();
// [/SECTION]
// [DEF:handleSort:Function]
// @PURPOSE: Toggles sort direction or changes sort column.
function handleSort(column: keyof DashboardMetadata) {
if (sortColumn === column) {
sortDirection = sortDirection === "asc" ? "desc" : "asc";
} else {
sortColumn = column;
sortDirection = "asc";
}
}
// [/DEF:handleSort]
// [DEF:handleSelectionChange:Function]
// @PURPOSE: Handles individual checkbox changes.
function handleSelectionChange(id: number, checked: boolean) {
let newSelected = [...selectedIds];
if (checked) {
if (!newSelected.includes(id)) newSelected.push(id);
} else {
newSelected = newSelected.filter(sid => sid !== id);
}
selectedIds = newSelected;
dispatch('selectionChanged', newSelected);
}
// [/DEF:handleSelectionChange]
// [DEF:handleSelectAll:Function]
// @PURPOSE: Handles select all checkbox.
function handleSelectAll(checked: boolean) {
let newSelected = [...selectedIds];
if (checked) {
paginatedDashboards.forEach(d => {
if (!newSelected.includes(d.id)) newSelected.push(d.id);
});
} else {
paginatedDashboards.forEach(d => {
newSelected = newSelected.filter(sid => sid !== d.id);
});
}
selectedIds = newSelected;
dispatch('selectionChanged', newSelected);
}
// [/DEF:handleSelectAll]
// [DEF:goToPage:Function]
// @PURPOSE: Changes current page.
function goToPage(page: number) {
if (page >= 0 && page < totalPages) {
currentPage = page;
}
}
// [/DEF:goToPage]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="dashboard-grid">
<!-- Filter Input -->
<div class="mb-4">
<input
type="text"
bind:value={filterText}
placeholder="Search dashboards..."
class="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
</div>
<!-- Grid/Table -->
<div class="overflow-x-auto">
<table class="min-w-full bg-white border border-gray-300">
<thead class="bg-gray-50">
<tr>
<th class="px-4 py-2 border-b">
<input
type="checkbox"
checked={allSelected}
indeterminate={someSelected && !allSelected}
on:change={(e) => handleSelectAll((e.target as HTMLInputElement).checked)}
/>
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('title')}>
Title {sortColumn === 'title' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('last_modified')}>
Last Modified {sortColumn === 'last_modified' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('status')}>
Status {sortColumn === 'status' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
</tr>
</thead>
<tbody>
{#each paginatedDashboards as dashboard (dashboard.id)}
<tr class="hover:bg-gray-50">
<td class="px-4 py-2 border-b">
<input
type="checkbox"
checked={selectedIds.includes(dashboard.id)}
on:change={(e) => handleSelectionChange(dashboard.id, (e.target as HTMLInputElement).checked)}
/>
</td>
<td class="px-4 py-2 border-b">{dashboard.title}</td>
<td class="px-4 py-2 border-b">{new Date(dashboard.last_modified).toLocaleDateString()}</td>
<td class="px-4 py-2 border-b">
<span class="px-2 py-1 text-xs font-medium rounded-full {dashboard.status === 'published' ? 'bg-green-100 text-green-800' : 'bg-gray-100 text-gray-800'}">
{dashboard.status}
</span>
</td>
</tr>
{/each}
</tbody>
</table>
</div>
<!-- Pagination Controls -->
<div class="flex items-center justify-between mt-4">
<div class="text-sm text-gray-700">
Showing {currentPage * pageSize + 1} to {Math.min((currentPage + 1) * pageSize, sortedDashboards.length)} of {sortedDashboards.length} dashboards
</div>
<div class="flex space-x-2">
<button
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
disabled={currentPage === 0}
on:click={() => goToPage(currentPage - 1)}
>
Previous
</button>
<button
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
disabled={currentPage >= totalPages - 1}
on:click={() => goToPage(currentPage + 1)}
>
Next
</button>
</div>
</div>
</div>
<!-- [/SECTION] -->
<style>
/* Component styles */
</style>
<!-- [/DEF:DashboardGrid] -->

View File

@@ -36,8 +36,9 @@
<!-- [SECTION: TEMPLATE] --> <!-- [SECTION: TEMPLATE] -->
<div class="flex flex-col space-y-1"> <div class="flex flex-col space-y-1">
<label class="text-sm font-medium text-gray-700">{label}</label> <label for="env-select" class="text-sm font-medium text-gray-700">{label}</label>
<select <select
id="env-select"
class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md" class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md"
value={selectedId} value={selectedId}
on:change={handleSelect} on:change={handleSelect}

View File

@@ -12,16 +12,26 @@
// [SECTION: IMPORTS] // [SECTION: IMPORTS]
import { onMount } from 'svelte'; import { onMount } from 'svelte';
import EnvSelector from '../../components/EnvSelector.svelte'; import EnvSelector from '../../components/EnvSelector.svelte';
import DashboardGrid from '../../components/DashboardGrid.svelte';
import MappingTable from '../../components/MappingTable.svelte';
import MissingMappingModal from '../../components/MissingMappingModal.svelte';
import type { DashboardMetadata, DashboardSelection } from '../../types/dashboard';
// [/SECTION] // [/SECTION]
// [SECTION: STATE] // [SECTION: STATE]
let environments = []; let environments: any[] = [];
let sourceEnvId = ""; let sourceEnvId = "";
let targetEnvId = ""; let targetEnvId = "";
let dashboardRegex = ".*";
let replaceDb = false; let replaceDb = false;
let loading = true; let loading = true;
let error = ""; let error = "";
let dashboards: DashboardMetadata[] = [];
let selectedDashboardIds: number[] = [];
let sourceDatabases: any[] = [];
let targetDatabases: any[] = [];
let mappings: any[] = [];
let suggestions: any[] = [];
let fetchingDbs = false;
// [/SECTION] // [/SECTION]
// [DEF:fetchEnvironments:Function] // [DEF:fetchEnvironments:Function]
@@ -42,8 +52,100 @@
} }
// [/DEF:fetchEnvironments] // [/DEF:fetchEnvironments]
// [DEF:fetchDashboards:Function]
/**
* @purpose Fetches dashboards for the selected source environment.
* @param envId The environment ID.
* @post dashboards state is updated.
*/
async function fetchDashboards(envId: string) {
try {
const response = await fetch(`/api/environments/${envId}/dashboards`);
if (!response.ok) throw new Error('Failed to fetch dashboards');
dashboards = await response.json();
selectedDashboardIds = []; // Reset selection when env changes
} catch (e) {
error = e.message;
dashboards = [];
}
}
// [/DEF:fetchDashboards]
onMount(fetchEnvironments); onMount(fetchEnvironments);
// Reactive: fetch dashboards when source env changes
$: if (sourceEnvId) fetchDashboards(sourceEnvId);
// [DEF:fetchDatabases:Function]
/**
* @purpose Fetches databases from both environments and gets suggestions.
*/
async function fetchDatabases() {
if (!sourceEnvId || !targetEnvId) return;
fetchingDbs = true;
error = "";
try {
const [srcRes, tgtRes, mapRes, sugRes] = await Promise.all([
fetch(`/api/environments/${sourceEnvId}/databases`),
fetch(`/api/environments/${targetEnvId}/databases`),
fetch(`/api/mappings?source_env_id=${sourceEnvId}&target_env_id=${targetEnvId}`),
fetch(`/api/mappings/suggest`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ source_env_id: sourceEnvId, target_env_id: targetEnvId })
})
]);
if (!srcRes.ok || !tgtRes.ok) throw new Error('Failed to fetch databases from environments');
sourceDatabases = await srcRes.json();
targetDatabases = await tgtRes.json();
mappings = await mapRes.json();
suggestions = await sugRes.json();
} catch (e) {
error = e.message;
} finally {
fetchingDbs = false;
}
}
// [/DEF:fetchDatabases]
// [DEF:handleMappingUpdate:Function]
/**
* @purpose Saves a mapping to the backend.
*/
async function handleMappingUpdate(event: CustomEvent) {
const { sourceUuid, targetUuid } = event.detail;
const sDb = sourceDatabases.find(d => d.uuid === sourceUuid);
const tDb = targetDatabases.find(d => d.uuid === targetUuid);
if (!sDb || !tDb) return;
try {
const response = await fetch('/api/mappings', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
source_env_id: sourceEnvId,
target_env_id: targetEnvId,
source_db_uuid: sourceUuid,
target_db_uuid: targetUuid,
source_db_name: sDb.database_name,
target_db_name: tDb.database_name
})
});
if (!response.ok) throw new Error('Failed to save mapping');
const savedMapping = await response.json();
mappings = [...mappings.filter(m => m.source_db_uuid !== sourceUuid), savedMapping];
} catch (e) {
error = e.message;
}
}
// [/DEF:handleMappingUpdate]
// [DEF:startMigration:Function] // [DEF:startMigration:Function]
/** /**
* @purpose Starts the migration process. * @purpose Starts the migration process.
@@ -58,10 +160,33 @@
error = "Source and target environments must be different."; error = "Source and target environments must be different.";
return; return;
} }
if (selectedDashboardIds.length === 0) {
error = "Please select at least one dashboard to migrate.";
return;
}
error = ""; error = "";
console.log(`[MigrationDashboard][Action] Starting migration from ${sourceEnvId} to ${targetEnvId} (Replace DB: ${replaceDb})`); try {
// TODO: Implement actual migration trigger in US3 const selection: DashboardSelection = {
selected_ids: selectedDashboardIds,
source_env_id: sourceEnvId,
target_env_id: targetEnvId
};
console.log(`[MigrationDashboard][Action] Starting migration with selection:`, selection);
const response = await fetch('/api/migration/execute', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(selection)
});
console.log(`[MigrationDashboard][Action] API response status: ${response.status}`);
if (!response.ok) throw new Error(`Failed to start migration: ${response.status} ${response.statusText}`);
const result = await response.json();
console.log(`[MigrationDashboard][Action] Migration started: ${result.task_id} - ${result.message}`);
// TODO: Show success message or redirect to task status
} catch (e) {
console.error(`[MigrationDashboard][Failure] Migration failed:`, e);
error = e.message;
}
} }
// [/DEF:startMigration] // [/DEF:startMigration]
</script> </script>
@@ -91,23 +216,28 @@
/> />
</div> </div>
<!-- [DEF:DashboardSelectionSection] -->
<div class="mb-8"> <div class="mb-8">
<label for="dashboard-regex" class="block text-sm font-medium text-gray-700 mb-1">Dashboard Regex</label> <h2 class="text-lg font-medium mb-4">Select Dashboards</h2>
<input
id="dashboard-regex"
type="text"
bind:value={dashboardRegex}
placeholder="e.g. ^Finance Dashboard$"
class="shadow-sm focus:ring-indigo-500 focus:border-indigo-500 block w-full sm:text-sm border-gray-300 rounded-md"
/>
<p class="mt-1 text-sm text-gray-500">Regular expression to filter dashboards to migrate.</p>
</div>
<div class="flex items-center mb-8"> {#if sourceEnvId}
<DashboardGrid
{dashboards}
bind:selectedIds={selectedDashboardIds}
/>
{:else}
<p class="text-gray-500 italic">Select a source environment to view dashboards.</p>
{/if}
</div>
<!-- [/DEF:DashboardSelectionSection] -->
<div class="flex items-center mb-4">
<input <input
id="replace-db" id="replace-db"
type="checkbox" type="checkbox"
bind:checked={replaceDb} bind:checked={replaceDb}
on:change={() => { if (replaceDb && sourceDatabases.length === 0) fetchDatabases(); }}
class="h-4 w-4 text-indigo-600 focus:ring-indigo-500 border-gray-300 rounded" class="h-4 w-4 text-indigo-600 focus:ring-indigo-500 border-gray-300 rounded"
/> />
<label for="replace-db" class="ml-2 block text-sm text-gray-900"> <label for="replace-db" class="ml-2 block text-sm text-gray-900">
@@ -115,9 +245,33 @@
</label> </label>
</div> </div>
{#if replaceDb}
<div class="mb-8 p-4 border rounded-md bg-gray-50">
<h3 class="text-md font-medium mb-4">Database Mappings</h3>
{#if fetchingDbs}
<p>Loading databases and suggestions...</p>
{:else if sourceDatabases.length > 0}
<MappingTable
{sourceDatabases}
{targetDatabases}
{mappings}
{suggestions}
on:update={handleMappingUpdate}
/>
{:else if sourceEnvId && targetEnvId}
<button
on:click={fetchDatabases}
class="text-indigo-600 hover:text-indigo-500 text-sm font-medium"
>
Refresh Databases & Suggestions
</button>
{/if}
</div>
{/if}
<button <button
on:click={startMigration} on:click={startMigration}
disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId} disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId || selectedDashboardIds.length === 0}
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 disabled:bg-gray-400" class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 disabled:bg-gray-400"
> >
Start Migration Start Migration

View File

@@ -0,0 +1,12 @@
export interface DashboardMetadata {
id: number;
title: string;
last_modified: string;
status: string;
}
export interface DashboardSelection {
selected_ids: number[];
source_env_id: string;
target_env_id: string;
}

BIN
mappings.db Normal file

Binary file not shown.

View File

@@ -10,20 +10,20 @@ description: "Architecture tasks for Migration Plugin Dashboard Grid"
## Phase 1: Setup & Models ## Phase 1: Setup & Models
- [ ] A001 Define contracts/scaffolding for migration route in backend/src/api/routes/migration.py - [x] A001 Define contracts/scaffolding for migration route in backend/src/api/routes/migration.py
- [ ] A002 Define contracts/scaffolding for Dashboard model in backend/src/models/dashboard.py - [x] A002 Define contracts/scaffolding for Dashboard model in backend/src/models/dashboard.py
## Phase 2: User Story 1 - Advanced Dashboard Selection ## Phase 2: User Story 1 - Advanced Dashboard Selection
- [ ] A003 [US1] Define contracts/scaffolding for SupersetClient extensions in backend/src/core/superset_client.py - [x] A003 [US1] Define contracts/scaffolding for SupersetClient extensions in backend/src/core/superset_client.py
- [ ] A004 [US1] Define contracts/scaffolding for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py - [x] A004 [US1] Define contracts/scaffolding for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py
- [ ] A005 [US1] Define contracts/scaffolding for DashboardGrid component in frontend/src/components/DashboardGrid.svelte - [x] A005 [US1] Define contracts/scaffolding for DashboardGrid component in frontend/src/components/DashboardGrid.svelte
- [ ] A006 [US1] Define contracts/scaffolding for migration page integration in frontend/src/routes/migration/+page.svelte - [x] A006 [US1] Define contracts/scaffolding for migration page integration in frontend/src/routes/migration/+page.svelte
- [ ] A007 [US1] Define contracts/scaffolding for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py - [x] A007 [US1] Define contracts/scaffolding for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
## Handover Checklist ## Handover Checklist
- [ ] All new files created with `[DEF]` anchors - [x] All new files created with `[DEF]` anchors
- [ ] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags - [x] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
- [ ] No "naked code" (logic outside of anchors) - [x] No "naked code" (logic outside of anchors)
- [ ] `tasks-dev.md` is ready for the Developer Agent - [x] `tasks-dev.md` is ready for the Developer Agent

View File

@@ -1,34 +1,49 @@
--- ---
description: "Development tasks for Migration Plugin Dashboard Grid"
---
description: "Developer tasks for Migration Plugin Dashboard Grid" # Development Tasks: Migration Plugin Dashboard Grid
---
# Developer Tasks: Migration Plugin Dashboard Grid
**Role**: Developer Agent **Role**: Developer Agent
**Goal**: Implement the "How" (Logic, State, Error Handling) inside the defined contracts. **Goal**: Implement the logic defined in the architecture contracts.
## Phase 1: Setup & Models ## Phase 1: Backend Implementation
- [ ] D001 Implement logic for migration route in backend/src/api/routes/migration.py - [x] D001 [US1] Implement `SupersetClient.get_dashboards_summary` in `backend/src/core/superset_client.py`
- [ ] D002 Register migration router in backend/src/app.py - **Context**: Fetch dashboards from Superset API with specific columns (`id`, `dashboard_title`, `changed_on_utc`, `published`).
- [ ] D003 Export migration router in backend/src/api/routes/__init__.py - **Input**: None (uses instance config).
- [ ] D004 Implement logic for Dashboard model in backend/src/models/dashboard.py - **Output**: List of dictionaries mapped to `DashboardMetadata` fields.
## Phase 2: User Story 1 - Advanced Dashboard Selection - [x] D002 [US1] Implement `get_dashboards` endpoint in `backend/src/api/routes/migration.py`
- **Context**: Initialize `SupersetClient` with environment config and call `get_dashboards_summary`.
- **Input**: `env_id` (path param).
- **Output**: JSON list of `DashboardMetadata`.
- [ ] D005 [P] [US1] Implement logic for SupersetClient extensions in backend/src/core/superset_client.py - [x] D003 [US1] Implement `execute_migration` endpoint in `backend/src/api/routes/migration.py`
- [ ] D006 [US1] Implement logic for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py - **Context**: Validate selection and initiate migration task (placeholder or TaskManager integration).
- [ ] D007 [US1] Implement structure and styles for DashboardGrid component in frontend/src/components/DashboardGrid.svelte - **Input**: `DashboardSelection` body.
- [ ] D008 [US1] Implement data fetching and state management in frontend/src/components/DashboardGrid.svelte - **Output**: Task ID and status message.
- [ ] D009 [US1] Implement client-side filtering logic in frontend/src/components/DashboardGrid.svelte
- [ ] D010 [US1] Implement pagination logic in frontend/src/components/DashboardGrid.svelte
- [ ] D011 [US1] Implement selection logic (single and Select All) in frontend/src/components/DashboardGrid.svelte
- [ ] D012 [US1] Integrate DashboardGrid and connect selection to submission in frontend/src/routes/migration/+page.svelte
- [ ] D013 [US1] Implement logic for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
- [ ] D014 [US1] Verify semantic compliance and belief state logging
## Polish & Quality Assurance ## Phase 2: Frontend Implementation
- [ ] D015 Verify error handling and empty states in frontend/src/components/DashboardGrid.svelte - [x] D004 [US1] Implement `DashboardGrid.svelte` logic
- [ ] D016 Ensure consistent styling with Tailwind CSS in frontend/src/components/DashboardGrid.svelte - **Context**: `frontend/src/components/DashboardGrid.svelte`
- **Requirements**:
- Client-side pagination (default 20 items).
- Sorting by Title, Last Modified, Status.
- Text filtering (search by title).
- Multi-selection with "Select All" capability.
- Emit selection events.
- [x] D005 [US1] Integrate `DashboardGrid` into Migration Page
- **Context**: `frontend/src/routes/migration/+page.svelte`
- **Requirements**:
- Fetch dashboards when `sourceEnvId` changes.
- Bind `dashboards` data to `DashboardGrid`.
- Bind `selectedDashboardIds`.
- Update `startMigration` to send `selectedDashboardIds` to backend.
## Phase 3: Verification
- [ ] D006 Verify Dashboard Grid functionality (Sort, Filter, Page).
- [ ] D007 Verify API integration (Fetch dashboards, Start migration).

View File

@@ -20,7 +20,7 @@ from .utils.logger import SupersetLogger
class SupersetConfig(BaseModel): class SupersetConfig(BaseModel):
env: str = Field(..., description="Название окружения (например, dev, prod).") env: str = Field(..., description="Название окружения (например, dev, prod).")
base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.") base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.")
auth: Dict[str, str] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).") auth: Dict[str, Any] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).")
verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.") verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.")
timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.") timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.")
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.") logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
@@ -32,7 +32,7 @@ class SupersetConfig(BaseModel):
# @THROW: ValueError - Если отсутствуют обязательные поля. # @THROW: ValueError - Если отсутствуют обязательные поля.
# @PARAM: v (Dict[str, str]) - Значение поля auth. # @PARAM: v (Dict[str, str]) - Значение поля auth.
@validator('auth') @validator('auth')
def validate_auth(cls, v: Dict[str, str]) -> Dict[str, str]: def validate_auth(cls, v: Dict[str, Any]) -> Dict[str, Any]:
required = {'provider', 'username', 'password', 'refresh'} required = {'provider', 'username', 'password', 'refresh'}
if not required.issubset(v.keys()): if not required.issubset(v.keys()):
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}") raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")

188
test_migration_debug.py Normal file
View File

@@ -0,0 +1,188 @@
#!/usr/bin/env python3
"""
Debug script to test the migration flow and identify where the issue occurs.
"""
import sys
import os
from pathlib import Path
# Add project root to sys.path
project_root = Path(__file__).parent
sys.path.insert(0, str(project_root))
def test_plugin_loader():
"""Test if the plugin loader can find the migration plugin."""
print("=== Testing Plugin Loader ===")
try:
from backend.src.core.plugin_loader import PluginLoader
from pathlib import Path
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
print(f"Plugin directory: {plugin_dir}")
print(f"Directory exists: {plugin_dir.exists()}")
if plugin_dir.exists():
print(f"Files in plugin directory: {list(plugin_dir.iterdir())}")
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
configs = plugin_loader.get_all_plugin_configs()
print(f"Found {len(configs)} plugins:")
for config in configs:
print(f" - {config.name} (ID: {config.id})")
migration_plugin = plugin_loader.get_plugin("superset-migration")
if migration_plugin:
print("✓ Migration plugin found")
print(f" Name: {migration_plugin.name}")
print(f" Description: {migration_plugin.description}")
print(f" Schema: {migration_plugin.get_schema()}")
else:
print("✗ Migration plugin NOT found")
return migration_plugin is not None
except Exception as e:
print(f"✗ Plugin loader test failed: {e}")
import traceback
traceback.print_exc()
return False
def test_task_manager():
"""Test if the task manager can create tasks."""
print("\n=== Testing Task Manager ===")
try:
from backend.src.core.plugin_loader import PluginLoader
from backend.src.core.task_manager import TaskManager
from pathlib import Path
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
task_manager = TaskManager(plugin_loader)
# Test task creation
test_params = {
"from_env": "dev",
"to_env": "prod",
"dashboard_regex": ".*",
"replace_db_config": False
}
print(f"Creating test task with params: {test_params}")
import asyncio
task = asyncio.run(task_manager.create_task("superset-migration", test_params))
print(f"✓ Task created successfully: {task.id}")
print(f" Status: {task.status}")
print(f" Plugin ID: {task.plugin_id}")
return True
except Exception as e:
print(f"✗ Task manager test failed: {e}")
import traceback
traceback.print_exc()
return False
def test_migration_endpoint():
"""Test the migration endpoint directly."""
print("\n=== Testing Migration Endpoint ===")
try:
from backend.src.api.routes.migration import execute_migration
from backend.src.models.dashboard import DashboardSelection
from backend.src.dependencies import get_config_manager, get_task_manager
# Create a test selection
selection = DashboardSelection(
selected_ids=[1, 2, 3],
source_env_id="ss2",
target_env_id="ss1"
)
print(f"Testing migration with selection: {selection.dict()}")
# This would normally be called by FastAPI with dependencies
# For testing, we'll call it directly
config_manager = get_config_manager()
task_manager = get_task_manager()
import asyncio
# We need to ensure TaskManager uses the SAME loop as run_and_wait
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Re-initialize TaskManager with the new loop
task_manager.loop = loop
async def run_and_wait():
result = await execute_migration(selection, config_manager, task_manager)
print(f"✓ Migration endpoint test successful: {result}")
task_id = result["task_id"]
print(f"Waiting for task {task_id} to complete...")
for i in range(20):
await asyncio.sleep(1)
task = task_manager.get_task(task_id)
print(f" [{i}] Task status: {task.status}")
if task.status in ["SUCCESS", "FAILED"]:
print(f"Task finished with status: {task.status}")
for log in task.logs:
print(f" [{log.level}] {log.message}")
return task.status == "SUCCESS"
print("Task timed out. Current status: " + task_manager.get_task(task_id).status)
return False
try:
return loop.run_until_complete(run_and_wait())
finally:
loop.close()
except Exception as e:
print(f"✗ Migration endpoint test failed: {e}")
import traceback
traceback.print_exc()
return False
def main():
"""Run all tests."""
print("Migration Debug Test Script")
print("=" * 50)
results = []
# Test 1: Plugin Loader
results.append(("Plugin Loader", test_plugin_loader()))
# Test 2: Task Manager
results.append(("Task Manager", test_task_manager()))
# Test 3: Migration Endpoint
results.append(("Migration Endpoint", test_migration_endpoint()))
# Summary
print("\n" + "=" * 50)
print("TEST RESULTS SUMMARY:")
print("=" * 50)
passed = 0
for test_name, result in results:
status = "PASS" if result else "FAIL"
print(f"{test_name}: {status}")
if result:
passed += 1
print(f"\nTotal: {passed}/{len(results)} tests passed")
if passed == len(results):
print("✓ All tests passed! The migration system appears to be working correctly.")
else:
print("✗ Some tests failed. Check the logs above for details.")
return passed == len(results)
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)