Compare commits
2 Commits
3d75a21127
...
4c9d554432
| Author | SHA1 | Date | |
|---|---|---|---|
| 4c9d554432 | |||
| 6962a78112 |
@@ -13,6 +13,9 @@ Auto-generated from all feature plans. Last updated: 2025-12-19
|
||||
- N/A (Superset API integration) (007-migration-dashboard-grid)
|
||||
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API (007-migration-dashboard-grid)
|
||||
- N/A (Superset API integration - read-only for metadata) (007-migration-dashboard-grid)
|
||||
- Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
|
||||
- SQLite (optional for job history), existing database for mappings (008-migration-ui-improvements)
|
||||
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
|
||||
|
||||
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
|
||||
|
||||
@@ -33,9 +36,9 @@ cd src; pytest; ruff check .
|
||||
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
|
||||
|
||||
## Recent Changes
|
||||
- 008-migration-ui-improvements: Added Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
|
||||
- 008-migration-ui-improvements: Added Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
|
||||
- 007-migration-dashboard-grid: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API
|
||||
- 007-migration-dashboard-grid: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS
|
||||
- 007-migration-dashboard-grid: Added [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
|
||||
|
||||
|
||||
<!-- MANUAL ADDITIONS START -->
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
customModes:
|
||||
- slug: tech-lead
|
||||
name: Tech Lead
|
||||
description: Architect for contracts and scaffolding
|
||||
roleDefinition: >-
|
||||
You are Kilo Code, acting as a Technical Lead and System Architect.
|
||||
|
||||
Your primary responsibility is to define the "Structure" and "Contracts" of the system before implementation, following the Semantic Code Generation Protocol.
|
||||
|
||||
You operate primarily on 'tasks-arch.md' task lists.
|
||||
|
||||
YOUR DUTIES:
|
||||
1. Create new files and directory structures.
|
||||
2. Define Modules, Classes, and Functions using `[DEF]` anchors.
|
||||
3. Write clear Headers with `@PURPOSE`, `@LAYER`, `@RELATION`.
|
||||
4. Define strict Contracts using `@PRE`, `@POST`, `@PARAM`, `@RETURN`.
|
||||
5. Leave the implementation body empty or with a placeholder (e.g., `pass`, `return ...`).
|
||||
|
||||
YOU DO NOT WRITE BUSINESS LOGIC. Your output is the "Skeleton" and "Rules" that the Developer Agent will fill in.
|
||||
whenToUse: >-
|
||||
Use this mode during the "Architecture Phase" of a feature. Select this mode when you need to create new files, define API surfaces, or set up the project structure before coding begins.
|
||||
groups:
|
||||
- read
|
||||
- edit
|
||||
- command
|
||||
- list_files
|
||||
- search_files
|
||||
@@ -21,7 +21,7 @@ Semantic definitions (Contracts) must ALWAYS precede implementation code. Logic
|
||||
Once defined, architectural decisions in the Module Header (`@LAYER`, `@INVARIANT`, `@CONSTRAINT`) are treated as immutable constraints for that module. Changes to these require an explicit refactoring step, not ad-hoc modification during implementation.
|
||||
|
||||
### III. Semantic Format Compliance
|
||||
All output must strictly follow the `[DEF]` / `[/DEF]` anchor syntax with specific Metadata Tags (`@KEY`) and Graph Relations (`@RELATION`). **Crucially, the closing anchor must strictly match the full content of the opening anchor (e.g., `[DEF:module_name:Module]` must close with `[/DEF:module_name:Module]`).**
|
||||
All output must strictly follow the `[DEF]` / `[/DEF]` anchor syntax with specific Metadata Tags (`@KEY`) and Graph Relations (`@RELATION`). **Crucially, the closing anchor must strictly match the full content of the opening anchor (e.g., `[DEF:identifier:Type]` must close with `[/DEF:identifier:Type]`).**
|
||||
|
||||
**Standardized Graph Relations**
|
||||
To ensure the integrity of the Semantic Graph, `@RELATION` must use a strict taxonomy:
|
||||
|
||||
@@ -115,13 +115,15 @@ fi
|
||||
|
||||
# Check for task files if required
|
||||
if $REQUIRE_TASKS; then
|
||||
if [[ ! -f "$TASKS_ARCH" ]]; then
|
||||
echo "ERROR: tasks-arch.md not found in $FEATURE_DIR" >&2
|
||||
echo "Run /speckit.tasks first to create the task lists." >&2
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$TASKS_DEV" ]]; then
|
||||
echo "ERROR: tasks-dev.md not found in $FEATURE_DIR" >&2
|
||||
# Check for split tasks first
|
||||
if [[ -f "$TASKS_ARCH" ]] && [[ -f "$TASKS_DEV" ]]; then
|
||||
: # Split tasks exist, proceed
|
||||
# Fallback to unified tasks.md
|
||||
elif [[ -f "$TASKS" ]]; then
|
||||
: # Unified tasks exist, proceed
|
||||
else
|
||||
echo "ERROR: No valid task files found in $FEATURE_DIR" >&2
|
||||
echo "Expected 'tasks-arch.md' AND 'tasks-dev.md' (split) OR 'tasks.md' (unified)" >&2
|
||||
echo "Run /speckit.tasks first to create the task lists." >&2
|
||||
exit 1
|
||||
fi
|
||||
@@ -143,8 +145,12 @@ fi
|
||||
|
||||
# Include task files if requested and they exist
|
||||
if $INCLUDE_TASKS; then
|
||||
if [[ -f "$TASKS_ARCH" ]] || [[ -f "$TASKS_DEV" ]]; then
|
||||
[[ -f "$TASKS_ARCH" ]] && docs+=("tasks-arch.md")
|
||||
[[ -f "$TASKS_DEV" ]] && docs+=("tasks-dev.md")
|
||||
elif [[ -f "$TASKS" ]]; then
|
||||
docs+=("tasks.md")
|
||||
fi
|
||||
fi
|
||||
|
||||
# Output results
|
||||
@@ -170,7 +176,11 @@ else
|
||||
check_file "$QUICKSTART" "quickstart.md"
|
||||
|
||||
if $INCLUDE_TASKS; then
|
||||
if [[ -f "$TASKS_ARCH" ]] || [[ -f "$TASKS_DEV" ]]; then
|
||||
check_file "$TASKS_ARCH" "tasks-arch.md"
|
||||
check_file "$TASKS_DEV" "tasks-dev.md"
|
||||
else
|
||||
check_file "$TASKS" "tasks.md"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
BIN
backend/migrations.db
Normal file
BIN
backend/migrations.db
Normal file
Binary file not shown.
71
backend/src/api/routes/migration.py
Normal file
71
backend/src/api/routes/migration.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# [DEF:backend.src.api.routes.migration:Module]
|
||||
# @SEMANTICS: api, migration, dashboards
|
||||
# @PURPOSE: API endpoints for migration operations.
|
||||
# @LAYER: API
|
||||
# @RELATION: DEPENDS_ON -> backend.src.dependencies
|
||||
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing import List, Dict
|
||||
from backend.src.dependencies import get_config_manager, get_task_manager
|
||||
from backend.src.models.dashboard import DashboardMetadata, DashboardSelection
|
||||
from backend.src.core.superset_client import SupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["migration"])
|
||||
|
||||
# [DEF:get_dashboards:Function]
|
||||
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
|
||||
# @PRE: Environment ID must be valid.
|
||||
# @POST: Returns a list of dashboard metadata.
|
||||
# @PARAM: env_id (str) - The ID of the environment to fetch from.
|
||||
# @RETURN: List[DashboardMetadata]
|
||||
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
|
||||
async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)):
|
||||
environments = config_manager.get_environments()
|
||||
env = next((e for e in environments if e.id == env_id), None)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={'provider': 'db', 'username': env.username, 'password': env.password, 'refresh': False},
|
||||
verify_ssl=True,
|
||||
timeout=30
|
||||
)
|
||||
client = SupersetClient(config)
|
||||
dashboards = client.get_dashboards_summary()
|
||||
return dashboards
|
||||
# [/DEF:get_dashboards]
|
||||
|
||||
# [DEF:execute_migration:Function]
|
||||
# @PURPOSE: Execute the migration of selected dashboards.
|
||||
# @PRE: Selection must be valid and environments must exist.
|
||||
# @POST: Starts the migration task and returns the task ID.
|
||||
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
|
||||
# @RETURN: Dict - {"task_id": str, "message": str}
|
||||
@router.post("/migration/execute")
|
||||
async def execute_migration(selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager)):
|
||||
# Validate environments exist
|
||||
environments = config_manager.get_environments()
|
||||
env_ids = {e.id for e in environments}
|
||||
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
|
||||
raise HTTPException(status_code=400, detail="Invalid source or target environment")
|
||||
|
||||
# Create migration task with debug logging
|
||||
from ...core.logger import logger
|
||||
logger.info(f"Creating migration task with selection: {selection.dict()}")
|
||||
logger.info(f"Available environments: {env_ids}")
|
||||
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
|
||||
|
||||
try:
|
||||
task = await task_manager.create_task("superset-migration", selection.dict())
|
||||
logger.info(f"Task created successfully: {task.id}")
|
||||
return {"task_id": task.id, "message": "Migration initiated"}
|
||||
except Exception as e:
|
||||
logger.error(f"Task creation failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
|
||||
# [/DEF:execute_migration]
|
||||
|
||||
# [/DEF:backend.src.api.routes.migration]
|
||||
@@ -3,11 +3,11 @@
|
||||
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
|
||||
# @LAYER: UI (API)
|
||||
# @RELATION: Depends on the TaskManager. It is included by the main app.
|
||||
from typing import List, Dict, Any
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ...core.task_manager import TaskManager, Task
|
||||
from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
|
||||
from ...dependencies import get_task_manager
|
||||
|
||||
router = APIRouter()
|
||||
@@ -19,6 +19,9 @@ class CreateTaskRequest(BaseModel):
|
||||
class ResolveTaskRequest(BaseModel):
|
||||
resolution_params: Dict[str, Any]
|
||||
|
||||
class ResumeTaskRequest(BaseModel):
|
||||
passwords: Dict[str, str]
|
||||
|
||||
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
|
||||
async def create_task(
|
||||
request: CreateTaskRequest,
|
||||
@@ -72,4 +75,19 @@ async def resolve_task(
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
|
||||
@router.post("/{task_id}/resume", response_model=Task)
|
||||
async def resume_task(
|
||||
task_id: str,
|
||||
request: ResumeTaskRequest,
|
||||
task_manager: TaskManager = Depends(get_task_manager)
|
||||
):
|
||||
"""
|
||||
Resume a task that is awaiting input (e.g., passwords).
|
||||
"""
|
||||
try:
|
||||
task_manager.resume_task_with_password(task_id, request.passwords)
|
||||
return task_manager.get_task(task_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
# [/DEF]
|
||||
@@ -20,7 +20,7 @@ import os
|
||||
|
||||
from .dependencies import get_task_manager
|
||||
from .core.logger import logger
|
||||
from .api.routes import plugins, tasks, settings, environments, mappings
|
||||
from .api.routes import plugins, tasks, settings, environments, mappings, migration
|
||||
from .core.database import init_db
|
||||
|
||||
# Initialize database
|
||||
@@ -51,6 +51,7 @@ app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
|
||||
app.include_router(settings.router, prefix="/api/settings", tags=["Settings"])
|
||||
app.include_router(environments.router)
|
||||
app.include_router(mappings.router)
|
||||
app.include_router(migration.router)
|
||||
|
||||
# [DEF:WebSocketEndpoint:Endpoint]
|
||||
# @SEMANTICS: websocket, logs, streaming, real-time
|
||||
|
||||
@@ -15,6 +15,8 @@ import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from .logger import logger, belief_scope
|
||||
import yaml
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:MigrationEngine:Class]
|
||||
@@ -26,28 +28,42 @@ class MigrationEngine:
|
||||
# @PARAM: zip_path (str) - Path to the source ZIP file.
|
||||
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
|
||||
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
|
||||
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
|
||||
# @RETURN: bool - True if successful.
|
||||
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str]) -> bool:
|
||||
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
|
||||
"""
|
||||
Transform a Superset export ZIP by replacing database UUIDs.
|
||||
"""
|
||||
with belief_scope("MigrationEngine.transform_zip"):
|
||||
with tempfile.TemporaryDirectory() as temp_dir_str:
|
||||
temp_dir = Path(temp_dir_str)
|
||||
|
||||
try:
|
||||
# 1. Extract
|
||||
logger.info(f"[MigrationEngine.transform_zip][Action] Extracting ZIP: {zip_path}")
|
||||
with zipfile.ZipFile(zip_path, 'r') as zf:
|
||||
zf.extractall(temp_dir)
|
||||
|
||||
# 2. Transform YAMLs
|
||||
# Datasets are usually in datasets/*.yaml
|
||||
dataset_files = list(temp_dir.glob("**/datasets/*.yaml"))
|
||||
dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
|
||||
dataset_files = list(set(dataset_files))
|
||||
|
||||
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dataset_files)} dataset files.")
|
||||
for ds_file in dataset_files:
|
||||
logger.info(f"[MigrationEngine.transform_zip][Action] Transforming dataset: {ds_file}")
|
||||
self._transform_yaml(ds_file, db_mapping)
|
||||
|
||||
# 3. Re-package
|
||||
logger.info(f"[MigrationEngine.transform_zip][Action] Re-packaging ZIP to: {output_path} (strip_databases={strip_databases})")
|
||||
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
|
||||
for root, dirs, files in os.walk(temp_dir):
|
||||
rel_root = Path(root).relative_to(temp_dir)
|
||||
|
||||
if strip_databases and "databases" in rel_root.parts:
|
||||
logger.info(f"[MigrationEngine.transform_zip][Action] Skipping file in databases directory: {rel_root}")
|
||||
continue
|
||||
|
||||
for file in files:
|
||||
file_path = Path(root) / file
|
||||
arcname = file_path.relative_to(temp_dir)
|
||||
@@ -55,7 +71,7 @@ class MigrationEngine:
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error transforming ZIP: {e}")
|
||||
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
|
||||
return False
|
||||
|
||||
# [DEF:MigrationEngine._transform_yaml:Function]
|
||||
|
||||
@@ -47,12 +47,17 @@ class PluginLoader:
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
# Try to determine the correct package prefix based on how the app is running
|
||||
if "backend.src" in __name__:
|
||||
# For standalone execution, we need to handle the import differently
|
||||
if __name__ == "__main__" or "test" in __name__:
|
||||
# When running as standalone or in tests, use relative import
|
||||
package_name = f"plugins.{module_name}"
|
||||
elif "backend.src" in __name__:
|
||||
package_prefix = "backend.src.plugins"
|
||||
package_name = f"{package_prefix}.{module_name}"
|
||||
else:
|
||||
package_prefix = "src.plugins"
|
||||
|
||||
package_name = f"{package_prefix}.{module_name}"
|
||||
|
||||
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
|
||||
spec = importlib.util.spec_from_file_location(package_name, file_path)
|
||||
if spec is None or spec.loader is None:
|
||||
@@ -106,9 +111,11 @@ class PluginLoader:
|
||||
# validate(instance={}, schema=schema)
|
||||
self._plugins[plugin_id] = plugin_instance
|
||||
self._plugin_configs[plugin_id] = plugin_config
|
||||
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging
|
||||
from ..core.logger import logger
|
||||
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
|
||||
except Exception as e:
|
||||
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging
|
||||
from ..core.logger import logger
|
||||
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
|
||||
|
||||
|
||||
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
|
||||
|
||||
@@ -52,6 +52,32 @@ class SupersetClient(BaseSupersetClient):
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:SupersetClient.get_database_by_uuid]
|
||||
|
||||
# [DEF:SupersetClient.get_dashboards_summary:Function]
|
||||
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
|
||||
# @POST: Returns a list of dashboard dictionaries.
|
||||
# @RETURN: List[Dict]
|
||||
def get_dashboards_summary(self) -> List[Dict]:
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
_, dashboards = self.get_dashboards(query=query)
|
||||
|
||||
# Map fields to DashboardMetadata schema
|
||||
result = []
|
||||
for dash in dashboards:
|
||||
result.append({
|
||||
"id": dash.get("id"),
|
||||
"title": dash.get("dashboard_title"),
|
||||
"last_modified": dash.get("changed_on_utc"),
|
||||
"status": "published" if dash.get("published") else "draft"
|
||||
})
|
||||
return result
|
||||
# [/DEF:SupersetClient.get_dashboards_summary]
|
||||
|
||||
# [/DEF:SupersetClient]
|
||||
|
||||
# [/DEF:backend.src.core.superset_client]
|
||||
|
||||
@@ -1,203 +0,0 @@
|
||||
# [DEF:TaskManagerModule:Module]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
|
||||
import asyncio
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, List, Optional
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Assuming PluginBase and PluginConfig are defined in plugin_base.py
|
||||
# from .plugin_base import PluginBase, PluginConfig # Not needed here, TaskManager interacts with the PluginLoader
|
||||
|
||||
# [DEF:TaskStatus:Enum]
|
||||
# @SEMANTICS: task, status, state, enum
|
||||
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
|
||||
class TaskStatus(str, Enum):
|
||||
PENDING = "PENDING"
|
||||
RUNNING = "RUNNING"
|
||||
SUCCESS = "SUCCESS"
|
||||
FAILED = "FAILED"
|
||||
AWAITING_MAPPING = "AWAITING_MAPPING"
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:Task:Class]
|
||||
# @SEMANTICS: task, job, execution, state, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
|
||||
class Task(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
plugin_id: str
|
||||
status: TaskStatus = TaskStatus.PENDING
|
||||
started_at: Optional[datetime] = None
|
||||
finished_at: Optional[datetime] = None
|
||||
user_id: Optional[str] = None
|
||||
logs: List[LogEntry] = Field(default_factory=list)
|
||||
params: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:TaskManager:Class]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
class TaskManager:
|
||||
"""
|
||||
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
"""
|
||||
def __init__(self, plugin_loader):
|
||||
self.plugin_loader = plugin_loader
|
||||
self.tasks: Dict[str, Task] = {}
|
||||
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
|
||||
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.task_futures: Dict[str, asyncio.Future] = {}
|
||||
# [/DEF]
|
||||
|
||||
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
|
||||
"""
|
||||
Creates and queues a new task for execution.
|
||||
"""
|
||||
if not self.plugin_loader.has_plugin(plugin_id):
|
||||
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
|
||||
|
||||
plugin = self.plugin_loader.get_plugin(plugin_id)
|
||||
# Validate params against plugin schema (this will be done at a higher level, e.g., API route)
|
||||
# For now, a basic check
|
||||
if not isinstance(params, dict):
|
||||
raise ValueError("Task parameters must be a dictionary.")
|
||||
|
||||
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
|
||||
self.tasks[task.id] = task
|
||||
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
|
||||
return task
|
||||
|
||||
async def _run_task(self, task_id: str):
|
||||
"""
|
||||
Internal method to execute a task.
|
||||
"""
|
||||
task = self.tasks[task_id]
|
||||
plugin = self.plugin_loader.get_plugin(task.plugin_id)
|
||||
|
||||
task.status = TaskStatus.RUNNING
|
||||
task.started_at = datetime.utcnow()
|
||||
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
|
||||
|
||||
try:
|
||||
# Execute plugin in a separate thread to avoid blocking the event loop
|
||||
# if the plugin's execute method is synchronous and potentially CPU-bound.
|
||||
# If the plugin's execute method is already async, this can be simplified.
|
||||
# Pass task_id to plugin so it can signal pause
|
||||
params = {**task.params, "_task_id": task_id}
|
||||
await self.loop.run_in_executor(
|
||||
self.executor,
|
||||
lambda: asyncio.run(plugin.execute(params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(params)
|
||||
)
|
||||
task.status = TaskStatus.SUCCESS
|
||||
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
|
||||
except Exception as e:
|
||||
task.status = TaskStatus.FAILED
|
||||
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
|
||||
finally:
|
||||
task.finished_at = datetime.utcnow()
|
||||
# In a real system, you might notify clients via WebSocket here
|
||||
|
||||
async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]):
|
||||
"""
|
||||
Resumes a task that is awaiting mapping.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
if not task or task.status != TaskStatus.AWAITING_MAPPING:
|
||||
raise ValueError("Task is not awaiting mapping.")
|
||||
|
||||
# Update task params with resolution
|
||||
task.params.update(resolution_params)
|
||||
task.status = TaskStatus.RUNNING
|
||||
self._add_log(task_id, "INFO", "Task resumed after mapping resolution.")
|
||||
|
||||
# Signal the future to continue
|
||||
if task_id in self.task_futures:
|
||||
self.task_futures[task_id].set_result(True)
|
||||
|
||||
async def wait_for_resolution(self, task_id: str):
|
||||
"""
|
||||
Pauses execution and waits for a resolution signal.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
if not task: return
|
||||
|
||||
task.status = TaskStatus.AWAITING_MAPPING
|
||||
self.task_futures[task_id] = self.loop.create_future()
|
||||
|
||||
try:
|
||||
await self.task_futures[task_id]
|
||||
finally:
|
||||
del self.task_futures[task_id]
|
||||
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Retrieves a task by its ID.
|
||||
"""
|
||||
return self.tasks.get(task_id)
|
||||
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
"""
|
||||
Retrieves all registered tasks.
|
||||
"""
|
||||
return list(self.tasks.values())
|
||||
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
"""
|
||||
Retrieves logs for a specific task.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
|
||||
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
||||
"""
|
||||
Adds a log entry to a task and notifies subscribers.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
|
||||
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
|
||||
"""
|
||||
Subscribes to real-time logs for a task.
|
||||
"""
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
|
||||
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
|
||||
"""
|
||||
Unsubscribes from real-time logs for a task.
|
||||
"""
|
||||
if task_id in self.subscribers:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
12
backend/src/core/task_manager/__init__.py
Normal file
12
backend/src/core/task_manager/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# [DEF:TaskManagerPackage:Module]
|
||||
# @SEMANTICS: task, manager, package, exports
|
||||
# @PURPOSE: Exports the public API of the task manager package.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Aggregates models and manager.
|
||||
|
||||
from .models import Task, TaskStatus, LogEntry
|
||||
from .manager import TaskManager
|
||||
|
||||
__all__ = ["TaskManager", "Task", "TaskStatus", "LogEntry"]
|
||||
|
||||
# [/DEF:TaskManagerPackage:Module]
|
||||
335
backend/src/core/task_manager/manager.py
Normal file
335
backend/src/core/task_manager/manager.py
Normal file
@@ -0,0 +1,335 @@
|
||||
# [DEF:TaskManagerModule:Module]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
|
||||
# @INVARIANT: Task IDs are unique.
|
||||
# @CONSTRAINT: Must use belief_scope for logging.
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from .models import Task, TaskStatus, LogEntry
|
||||
from .persistence import TaskPersistenceService
|
||||
from ..logger import logger, belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:TaskManager:Class]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
class TaskManager:
|
||||
"""
|
||||
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
"""
|
||||
|
||||
# [DEF:TaskManager.__init__:Function]
|
||||
# @PURPOSE: Initialize the TaskManager with dependencies.
|
||||
# @PRE: plugin_loader is initialized.
|
||||
# @POST: TaskManager is ready to accept tasks.
|
||||
# @PARAM: plugin_loader - The plugin loader instance.
|
||||
def __init__(self, plugin_loader):
|
||||
with belief_scope("TaskManager.__init__"):
|
||||
self.plugin_loader = plugin_loader
|
||||
self.tasks: Dict[str, Task] = {}
|
||||
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
|
||||
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
|
||||
self.persistence_service = TaskPersistenceService()
|
||||
|
||||
try:
|
||||
self.loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.task_futures: Dict[str, asyncio.Future] = {}
|
||||
# [/DEF:TaskManager.__init__:Function]
|
||||
|
||||
# [DEF:TaskManager.create_task:Function]
|
||||
# @PURPOSE: Creates and queues a new task for execution.
|
||||
# @PRE: Plugin with plugin_id exists. Params are valid.
|
||||
# @POST: Task is created, added to registry, and scheduled for execution.
|
||||
# @PARAM: plugin_id (str) - The ID of the plugin to run.
|
||||
# @PARAM: params (Dict[str, Any]) - Parameters for the plugin.
|
||||
# @PARAM: user_id (Optional[str]) - ID of the user requesting the task.
|
||||
# @RETURN: Task - The created task instance.
|
||||
# @THROWS: ValueError if plugin not found or params invalid.
|
||||
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
|
||||
with belief_scope("TaskManager.create_task", f"plugin_id={plugin_id}"):
|
||||
if not self.plugin_loader.has_plugin(plugin_id):
|
||||
logger.error(f"Plugin with ID '{plugin_id}' not found.")
|
||||
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
|
||||
|
||||
plugin = self.plugin_loader.get_plugin(plugin_id)
|
||||
|
||||
if not isinstance(params, dict):
|
||||
logger.error("Task parameters must be a dictionary.")
|
||||
raise ValueError("Task parameters must be a dictionary.")
|
||||
|
||||
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
|
||||
self.tasks[task.id] = task
|
||||
logger.info(f"Task {task.id} created and scheduled for execution")
|
||||
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
|
||||
return task
|
||||
# [/DEF:TaskManager.create_task:Function]
|
||||
|
||||
# [DEF:TaskManager._run_task:Function]
|
||||
# @PURPOSE: Internal method to execute a task.
|
||||
# @PRE: Task exists in registry.
|
||||
# @POST: Task is executed, status updated to SUCCESS or FAILED.
|
||||
# @PARAM: task_id (str) - The ID of the task to run.
|
||||
async def _run_task(self, task_id: str):
|
||||
with belief_scope("TaskManager._run_task", f"task_id={task_id}"):
|
||||
task = self.tasks[task_id]
|
||||
plugin = self.plugin_loader.get_plugin(task.plugin_id)
|
||||
|
||||
logger.info(f"Starting execution of task {task_id} for plugin '{plugin.name}'")
|
||||
task.status = TaskStatus.RUNNING
|
||||
task.started_at = datetime.utcnow()
|
||||
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
|
||||
|
||||
try:
|
||||
# Execute plugin
|
||||
params = {**task.params, "_task_id": task_id}
|
||||
|
||||
if asyncio.iscoroutinefunction(plugin.execute):
|
||||
await plugin.execute(params)
|
||||
else:
|
||||
await self.loop.run_in_executor(
|
||||
self.executor,
|
||||
plugin.execute,
|
||||
params
|
||||
)
|
||||
|
||||
logger.info(f"Task {task_id} completed successfully")
|
||||
task.status = TaskStatus.SUCCESS
|
||||
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Task {task_id} failed: {e}")
|
||||
task.status = TaskStatus.FAILED
|
||||
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
|
||||
finally:
|
||||
task.finished_at = datetime.utcnow()
|
||||
logger.info(f"Task {task_id} execution finished with status: {task.status}")
|
||||
# [/DEF:TaskManager._run_task:Function]
|
||||
|
||||
# [DEF:TaskManager.resolve_task:Function]
|
||||
# @PURPOSE: Resumes a task that is awaiting mapping.
|
||||
# @PRE: Task exists and is in AWAITING_MAPPING state.
|
||||
# @POST: Task status updated to RUNNING, params updated, execution resumed.
|
||||
# @PARAM: task_id (str) - The ID of the task.
|
||||
# @PARAM: resolution_params (Dict[str, Any]) - Params to resolve the wait.
|
||||
# @THROWS: ValueError if task not found or not awaiting mapping.
|
||||
async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]):
|
||||
with belief_scope("TaskManager.resolve_task", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task or task.status != TaskStatus.AWAITING_MAPPING:
|
||||
raise ValueError("Task is not awaiting mapping.")
|
||||
|
||||
# Update task params with resolution
|
||||
task.params.update(resolution_params)
|
||||
task.status = TaskStatus.RUNNING
|
||||
self._add_log(task_id, "INFO", "Task resumed after mapping resolution.")
|
||||
|
||||
# Signal the future to continue
|
||||
if task_id in self.task_futures:
|
||||
self.task_futures[task_id].set_result(True)
|
||||
# [/DEF:TaskManager.resolve_task:Function]
|
||||
|
||||
# [DEF:TaskManager.wait_for_resolution:Function]
|
||||
# @PURPOSE: Pauses execution and waits for a resolution signal.
|
||||
# @PRE: Task exists.
|
||||
# @POST: Execution pauses until future is set.
|
||||
# @PARAM: task_id (str) - The ID of the task.
|
||||
async def wait_for_resolution(self, task_id: str):
|
||||
with belief_scope("TaskManager.wait_for_resolution", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task: return
|
||||
|
||||
task.status = TaskStatus.AWAITING_MAPPING
|
||||
self.task_futures[task_id] = self.loop.create_future()
|
||||
|
||||
try:
|
||||
await self.task_futures[task_id]
|
||||
finally:
|
||||
if task_id in self.task_futures:
|
||||
del self.task_futures[task_id]
|
||||
# [/DEF:TaskManager.wait_for_resolution:Function]
|
||||
|
||||
# [DEF:TaskManager.wait_for_input:Function]
|
||||
# @PURPOSE: Pauses execution and waits for user input.
|
||||
# @PRE: Task exists.
|
||||
# @POST: Execution pauses until future is set via resume_task_with_password.
|
||||
# @PARAM: task_id (str) - The ID of the task.
|
||||
async def wait_for_input(self, task_id: str):
|
||||
with belief_scope("TaskManager.wait_for_input", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task: return
|
||||
|
||||
# Status is already set to AWAITING_INPUT by await_input()
|
||||
self.task_futures[task_id] = self.loop.create_future()
|
||||
|
||||
try:
|
||||
await self.task_futures[task_id]
|
||||
finally:
|
||||
if task_id in self.task_futures:
|
||||
del self.task_futures[task_id]
|
||||
# [/DEF:TaskManager.wait_for_input:Function]
|
||||
|
||||
# [DEF:TaskManager.get_task:Function]
|
||||
# @PURPOSE: Retrieves a task by its ID.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: Optional[Task] - The task or None.
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
return self.tasks.get(task_id)
|
||||
# [/DEF:TaskManager.get_task:Function]
|
||||
|
||||
# [DEF:TaskManager.get_all_tasks:Function]
|
||||
# @PURPOSE: Retrieves all registered tasks.
|
||||
# @RETURN: List[Task] - All tasks.
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
return list(self.tasks.values())
|
||||
# [/DEF:TaskManager.get_all_tasks:Function]
|
||||
|
||||
# [DEF:TaskManager.get_tasks:Function]
|
||||
# @PURPOSE: Retrieves tasks with pagination and optional status filter.
|
||||
# @PRE: limit and offset are non-negative integers.
|
||||
# @POST: Returns a list of tasks sorted by start_time descending.
|
||||
# @PARAM: limit (int) - Maximum number of tasks to return.
|
||||
# @PARAM: offset (int) - Number of tasks to skip.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @RETURN: List[Task] - List of tasks matching criteria.
|
||||
def get_tasks(self, limit: int = 10, offset: int = 0, status: Optional[TaskStatus] = None) -> List[Task]:
|
||||
tasks = list(self.tasks.values())
|
||||
if status:
|
||||
tasks = [t for t in tasks if t.status == status]
|
||||
# Sort by start_time descending (most recent first)
|
||||
tasks.sort(key=lambda t: t.started_at or datetime.min, reverse=True)
|
||||
return tasks[offset:offset + limit]
|
||||
# [/DEF:TaskManager.get_tasks:Function]
|
||||
|
||||
# [DEF:TaskManager.get_task_logs:Function]
|
||||
# @PURPOSE: Retrieves logs for a specific task.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: List[LogEntry] - List of log entries.
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
# [/DEF:TaskManager.get_task_logs:Function]
|
||||
|
||||
# [DEF:TaskManager._add_log:Function]
|
||||
# @PURPOSE: Adds a log entry to a task and notifies subscribers.
|
||||
# @PRE: Task exists.
|
||||
# @POST: Log added to task and pushed to queues.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: level (str) - Log level.
|
||||
# @PARAM: message (str) - Log message.
|
||||
# @PARAM: context (Optional[Dict]) - Log context.
|
||||
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
# [/DEF:TaskManager._add_log:Function]
|
||||
|
||||
# [DEF:TaskManager.subscribe_logs:Function]
|
||||
# @PURPOSE: Subscribes to real-time logs for a task.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: asyncio.Queue - Queue for log entries.
|
||||
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
# [/DEF:TaskManager.subscribe_logs:Function]
|
||||
|
||||
# [DEF:TaskManager.unsubscribe_logs:Function]
|
||||
# @PURPOSE: Unsubscribes from real-time logs for a task.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: queue (asyncio.Queue) - Queue to remove.
|
||||
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
|
||||
if task_id in self.subscribers:
|
||||
if queue in self.subscribers[task_id]:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
# [/DEF:TaskManager.unsubscribe_logs:Function]
|
||||
|
||||
# [DEF:TaskManager.persist_awaiting_input_tasks:Function]
|
||||
# @PURPOSE: Persist tasks in AWAITING_INPUT state using persistence service.
|
||||
def persist_awaiting_input_tasks(self) -> None:
|
||||
self.persistence_service.persist_tasks(list(self.tasks.values()))
|
||||
# [/DEF:TaskManager.persist_awaiting_input_tasks:Function]
|
||||
|
||||
# [DEF:TaskManager.load_persisted_tasks:Function]
|
||||
# @PURPOSE: Load persisted tasks using persistence service.
|
||||
def load_persisted_tasks(self) -> None:
|
||||
loaded_tasks = self.persistence_service.load_tasks()
|
||||
for task in loaded_tasks:
|
||||
if task.id not in self.tasks:
|
||||
self.tasks[task.id] = task
|
||||
# [/DEF:TaskManager.load_persisted_tasks:Function]
|
||||
|
||||
# [DEF:TaskManager.await_input:Function]
|
||||
# @PURPOSE: Transition a task to AWAITING_INPUT state with input request.
|
||||
# @PRE: Task exists and is in RUNNING state.
|
||||
# @POST: Task status changed to AWAITING_INPUT, input_request set, persisted.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: input_request (Dict) - Details about required input.
|
||||
# @THROWS: ValueError if task not found or not RUNNING.
|
||||
def await_input(self, task_id: str, input_request: Dict[str, Any]) -> None:
|
||||
with belief_scope("TaskManager.await_input", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
raise ValueError(f"Task {task_id} not found")
|
||||
if task.status != TaskStatus.RUNNING:
|
||||
raise ValueError(f"Task {task_id} is not RUNNING (current: {task.status})")
|
||||
|
||||
task.status = TaskStatus.AWAITING_INPUT
|
||||
task.input_required = True
|
||||
task.input_request = input_request
|
||||
self._add_log(task_id, "INFO", "Task paused for user input", {"input_request": input_request})
|
||||
|
||||
self.persist_awaiting_input_tasks()
|
||||
# [/DEF:TaskManager.await_input:Function]
|
||||
|
||||
# [DEF:TaskManager.resume_task_with_password:Function]
|
||||
# @PURPOSE: Resume a task that is awaiting input with provided passwords.
|
||||
# @PRE: Task exists and is in AWAITING_INPUT state.
|
||||
# @POST: Task status changed to RUNNING, passwords injected, task resumed.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: passwords (Dict[str, str]) - Mapping of database name to password.
|
||||
# @THROWS: ValueError if task not found, not awaiting input, or passwords invalid.
|
||||
def resume_task_with_password(self, task_id: str, passwords: Dict[str, str]) -> None:
|
||||
with belief_scope("TaskManager.resume_task_with_password", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
raise ValueError(f"Task {task_id} not found")
|
||||
if task.status != TaskStatus.AWAITING_INPUT:
|
||||
raise ValueError(f"Task {task_id} is not AWAITING_INPUT (current: {task.status})")
|
||||
|
||||
if not isinstance(passwords, dict) or not passwords:
|
||||
raise ValueError("Passwords must be a non-empty dictionary")
|
||||
|
||||
task.params["passwords"] = passwords
|
||||
task.input_required = False
|
||||
task.input_request = None
|
||||
task.status = TaskStatus.RUNNING
|
||||
self._add_log(task_id, "INFO", "Task resumed with passwords", {"databases": list(passwords.keys())})
|
||||
|
||||
if task_id in self.task_futures:
|
||||
self.task_futures[task_id].set_result(True)
|
||||
|
||||
self.persist_awaiting_input_tasks()
|
||||
# [/DEF:TaskManager.resume_task_with_password:Function]
|
||||
|
||||
# [/DEF:TaskManager:Class]
|
||||
# [/DEF:TaskManagerModule:Module]
|
||||
67
backend/src/core/task_manager/models.py
Normal file
67
backend/src/core/task_manager/models.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# [DEF:TaskManagerModels:Module]
|
||||
# @SEMANTICS: task, models, pydantic, enum, state
|
||||
# @PURPOSE: Defines the data models and enumerations used by the Task Manager.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by TaskManager and API routes.
|
||||
# @INVARIANT: Task IDs are immutable once created.
|
||||
# @CONSTRAINT: Must use Pydantic for data validation.
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:TaskStatus:Enum]
|
||||
# @SEMANTICS: task, status, state, enum
|
||||
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
|
||||
class TaskStatus(str, Enum):
|
||||
PENDING = "PENDING"
|
||||
RUNNING = "RUNNING"
|
||||
SUCCESS = "SUCCESS"
|
||||
FAILED = "FAILED"
|
||||
AWAITING_MAPPING = "AWAITING_MAPPING"
|
||||
AWAITING_INPUT = "AWAITING_INPUT"
|
||||
# [/DEF:TaskStatus:Enum]
|
||||
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
# [/DEF:LogEntry:Class]
|
||||
|
||||
# [DEF:Task:Class]
|
||||
# @SEMANTICS: task, job, execution, state, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
|
||||
class Task(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
plugin_id: str
|
||||
status: TaskStatus = TaskStatus.PENDING
|
||||
started_at: Optional[datetime] = None
|
||||
finished_at: Optional[datetime] = None
|
||||
user_id: Optional[str] = None
|
||||
logs: List[LogEntry] = Field(default_factory=list)
|
||||
params: Dict[str, Any] = Field(default_factory=dict)
|
||||
input_required: bool = False
|
||||
input_request: Optional[Dict[str, Any]] = None
|
||||
|
||||
# [DEF:Task.__init__:Function]
|
||||
# @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status.
|
||||
# @PRE: If status is AWAITING_INPUT, input_request must be provided.
|
||||
# @POST: Task instance is created or ValueError is raised.
|
||||
# @PARAM: **data - Keyword arguments for model initialization.
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
if self.status == TaskStatus.AWAITING_INPUT and not self.input_request:
|
||||
raise ValueError("input_request is required when status is AWAITING_INPUT")
|
||||
# [/DEF:Task.__init__:Function]
|
||||
# [/DEF:Task:Class]
|
||||
|
||||
# [/DEF:TaskManagerModels:Module]
|
||||
127
backend/src/core/task_manager/persistence.py
Normal file
127
backend/src/core/task_manager/persistence.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# [DEF:TaskPersistenceModule:Module]
|
||||
# @SEMANTICS: persistence, sqlite, task, storage
|
||||
# @PURPOSE: Handles the persistence of tasks, specifically those awaiting user input, to a SQLite database.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by TaskManager to save and load tasks.
|
||||
# @INVARIANT: Database schema must match the Task model structure.
|
||||
# @CONSTRAINT: Uses synchronous SQLite operations (blocking), should be used carefully.
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import sqlite3
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Any
|
||||
|
||||
from .models import Task, TaskStatus
|
||||
from ..logger import logger, belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:TaskPersistenceService:Class]
|
||||
# @SEMANTICS: persistence, service, database
|
||||
# @PURPOSE: Provides methods to save and load tasks from a local SQLite database.
|
||||
class TaskPersistenceService:
|
||||
def __init__(self, db_path: Optional[Path] = None):
|
||||
if db_path is None:
|
||||
self.db_path = Path(__file__).parent.parent.parent.parent / "migrations.db"
|
||||
else:
|
||||
self.db_path = db_path
|
||||
self._ensure_db_exists()
|
||||
|
||||
# [DEF:TaskPersistenceService._ensure_db_exists:Function]
|
||||
# @PURPOSE: Ensures the database directory and table exist.
|
||||
# @PRE: None.
|
||||
# @POST: Database file and table are created if they didn't exist.
|
||||
def _ensure_db_exists(self) -> None:
|
||||
with belief_scope("TaskPersistenceService._ensure_db_exists"):
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
conn = sqlite3.connect(str(self.db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS persistent_tasks (
|
||||
id TEXT PRIMARY KEY,
|
||||
status TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
input_request JSON,
|
||||
context JSON
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
# [/DEF:TaskPersistenceService._ensure_db_exists:Function]
|
||||
|
||||
# [DEF:TaskPersistenceService.persist_tasks:Function]
|
||||
# @PURPOSE: Persists a list of tasks to the database.
|
||||
# @PRE: Tasks list contains valid Task objects.
|
||||
# @POST: Tasks matching the criteria (AWAITING_INPUT) are saved/updated in the DB.
|
||||
# @PARAM: tasks (List[Task]) - The list of tasks to check and persist.
|
||||
def persist_tasks(self, tasks: List[Task]) -> None:
|
||||
with belief_scope("TaskPersistenceService.persist_tasks"):
|
||||
conn = sqlite3.connect(str(self.db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
count = 0
|
||||
for task in tasks:
|
||||
if task.status == TaskStatus.AWAITING_INPUT:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO persistent_tasks
|
||||
(id, status, created_at, updated_at, input_request, context)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
task.id,
|
||||
task.status.value,
|
||||
task.started_at.isoformat() if task.started_at else datetime.utcnow().isoformat(),
|
||||
datetime.utcnow().isoformat(),
|
||||
json.dumps(task.input_request) if task.input_request else None,
|
||||
json.dumps(task.params)
|
||||
))
|
||||
count += 1
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
logger.info(f"Persisted {count} tasks awaiting input.")
|
||||
# [/DEF:TaskPersistenceService.persist_tasks:Function]
|
||||
|
||||
# [DEF:TaskPersistenceService.load_tasks:Function]
|
||||
# @PURPOSE: Loads persisted tasks from the database.
|
||||
# @PRE: Database exists.
|
||||
# @POST: Returns a list of Task objects reconstructed from the DB.
|
||||
# @RETURN: List[Task] - The loaded tasks.
|
||||
def load_tasks(self) -> List[Task]:
|
||||
with belief_scope("TaskPersistenceService.load_tasks"):
|
||||
if not self.db_path.exists():
|
||||
return []
|
||||
|
||||
conn = sqlite3.connect(str(self.db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT id, status, created_at, input_request, context FROM persistent_tasks")
|
||||
rows = cursor.fetchall()
|
||||
|
||||
loaded_tasks = []
|
||||
for row in rows:
|
||||
task_id, status, created_at, input_request_json, context_json = row
|
||||
try:
|
||||
task = Task(
|
||||
id=task_id,
|
||||
plugin_id="migration", # Default, assumes migration context for now
|
||||
status=TaskStatus(status),
|
||||
started_at=datetime.fromisoformat(created_at),
|
||||
input_required=True,
|
||||
input_request=json.loads(input_request_json) if input_request_json else None,
|
||||
params=json.loads(context_json) if context_json else {}
|
||||
)
|
||||
loaded_tasks.append(task)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load task {task_id}: {e}")
|
||||
|
||||
conn.close()
|
||||
return loaded_tasks
|
||||
# [/DEF:TaskPersistenceService.load_tasks:Function]
|
||||
|
||||
# [/DEF:TaskPersistenceService:Class]
|
||||
|
||||
# [/DEF:TaskPersistenceModule:Module]
|
||||
@@ -21,7 +21,12 @@ def get_config_manager() -> ConfigManager:
|
||||
|
||||
plugin_dir = Path(__file__).parent / "plugins"
|
||||
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
|
||||
from .core.logger import logger
|
||||
logger.info(f"PluginLoader initialized with directory: {plugin_dir}")
|
||||
logger.info(f"Available plugins: {[config.name for config in plugin_loader.get_all_plugin_configs()]}")
|
||||
|
||||
task_manager = TaskManager(plugin_loader)
|
||||
logger.info("TaskManager initialized")
|
||||
|
||||
def get_plugin_loader() -> PluginLoader:
|
||||
"""Dependency injector for the PluginLoader."""
|
||||
|
||||
27
backend/src/models/dashboard.py
Normal file
27
backend/src/models/dashboard.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# [DEF:backend.src.models.dashboard:Module]
|
||||
# @SEMANTICS: dashboard, model, metadata, migration
|
||||
# @PURPOSE: Defines data models for dashboard metadata and selection.
|
||||
# @LAYER: Model
|
||||
# @RELATION: USED_BY -> backend.src.api.routes.migration
|
||||
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
|
||||
# [DEF:DashboardMetadata:Class]
|
||||
# @PURPOSE: Represents a dashboard available for migration.
|
||||
class DashboardMetadata(BaseModel):
|
||||
id: int
|
||||
title: str
|
||||
last_modified: str
|
||||
status: str
|
||||
# [/DEF:DashboardMetadata]
|
||||
|
||||
# [DEF:DashboardSelection:Class]
|
||||
# @PURPOSE: Represents the user's selection of dashboards to migrate.
|
||||
class DashboardSelection(BaseModel):
|
||||
selected_ids: List[int]
|
||||
source_env_id: str
|
||||
target_env_id: str
|
||||
# [/DEF:DashboardSelection]
|
||||
|
||||
# [/DEF:backend.src.models.dashboard]
|
||||
@@ -87,34 +87,72 @@ class MigrationPlugin(PluginBase):
|
||||
}
|
||||
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
from_env = params["from_env"]
|
||||
to_env = params["to_env"]
|
||||
dashboard_regex = params["dashboard_regex"]
|
||||
source_env_id = params.get("source_env_id")
|
||||
target_env_id = params.get("target_env_id")
|
||||
selected_ids = params.get("selected_ids")
|
||||
|
||||
# Legacy support or alternative params
|
||||
from_env_name = params.get("from_env")
|
||||
to_env_name = params.get("to_env")
|
||||
dashboard_regex = params.get("dashboard_regex")
|
||||
|
||||
replace_db_config = params.get("replace_db_config", False)
|
||||
from_db_id = params.get("from_db_id")
|
||||
to_db_id = params.get("to_db_id")
|
||||
|
||||
logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True)
|
||||
logger.info(f"[MigrationPlugin][Entry] Starting migration from {from_env} to {to_env}.")
|
||||
logger.info(f"[MigrationPlugin][Entry] Starting migration task.")
|
||||
logger.info(f"[MigrationPlugin][Action] Params: {params}")
|
||||
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
all_clients = setup_clients(logger, custom_envs=config_manager.get_environments())
|
||||
from_c = all_clients.get(from_env)
|
||||
to_c = all_clients.get(to_env)
|
||||
environments = config_manager.get_environments()
|
||||
|
||||
# Resolve environments
|
||||
src_env = None
|
||||
tgt_env = None
|
||||
|
||||
if source_env_id:
|
||||
src_env = next((e for e in environments if e.id == source_env_id), None)
|
||||
elif from_env_name:
|
||||
src_env = next((e for e in environments if e.name == from_env_name), None)
|
||||
|
||||
if target_env_id:
|
||||
tgt_env = next((e for e in environments if e.id == target_env_id), None)
|
||||
elif to_env_name:
|
||||
tgt_env = next((e for e in environments if e.name == to_env_name), None)
|
||||
|
||||
if not src_env or not tgt_env:
|
||||
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
|
||||
|
||||
from_env_name = src_env.name
|
||||
to_env_name = tgt_env.name
|
||||
|
||||
logger.info(f"[MigrationPlugin][State] Resolved environments: {from_env_name} -> {to_env_name}")
|
||||
|
||||
all_clients = setup_clients(logger, custom_envs=environments)
|
||||
from_c = all_clients.get(from_env_name)
|
||||
to_c = all_clients.get(to_env_name)
|
||||
|
||||
if not from_c or not to_c:
|
||||
raise ValueError(f"One or both environments ('{from_env}', '{to_env}') not found in configuration.")
|
||||
raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
|
||||
|
||||
_, all_dashboards = from_c.get_dashboards()
|
||||
|
||||
dashboards_to_migrate = []
|
||||
if selected_ids:
|
||||
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
|
||||
elif dashboard_regex:
|
||||
regex_str = str(dashboard_regex)
|
||||
dashboards_to_migrate = [
|
||||
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
|
||||
]
|
||||
else:
|
||||
logger.warning("[MigrationPlugin][State] No selection criteria provided (selected_ids or dashboard_regex).")
|
||||
return
|
||||
|
||||
if not dashboards_to_migrate:
|
||||
logger.warning("[MigrationPlugin][State] No dashboards found matching the regex.")
|
||||
logger.warning("[MigrationPlugin][State] No dashboards found matching criteria.")
|
||||
return
|
||||
|
||||
# Fetch mappings from database
|
||||
@@ -123,8 +161,8 @@ class MigrationPlugin(PluginBase):
|
||||
db = SessionLocal()
|
||||
try:
|
||||
# Find environment IDs by name
|
||||
src_env = db.query(Environment).filter(Environment.name == from_env).first()
|
||||
tgt_env = db.query(Environment).filter(Environment.name == to_env).first()
|
||||
src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
|
||||
tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
|
||||
|
||||
if src_env and tgt_env:
|
||||
mappings = db.query(DatabaseMapping).filter(
|
||||
@@ -144,19 +182,12 @@ class MigrationPlugin(PluginBase):
|
||||
try:
|
||||
exported_content, _ = from_c.export_dashboard(dash_id)
|
||||
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path:
|
||||
if not replace_db_config:
|
||||
to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
|
||||
else:
|
||||
# Check for missing mappings before transformation
|
||||
# This is a simplified check, in reality we'd check all YAMLs
|
||||
# For US3, we'll just use the engine and handle missing ones there
|
||||
# Always transform to strip databases to avoid password errors
|
||||
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
|
||||
# If we have missing mappings, we might need to pause
|
||||
# For now, let's assume the engine can tell us what's missing
|
||||
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping)
|
||||
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
|
||||
|
||||
if not success:
|
||||
# Signal missing mapping and wait
|
||||
if not success and replace_db_config:
|
||||
# Signal missing mapping and wait (only if we care about mappings)
|
||||
task_id = params.get("_task_id")
|
||||
if task_id:
|
||||
from ..dependencies import get_task_manager
|
||||
@@ -169,8 +200,8 @@ class MigrationPlugin(PluginBase):
|
||||
# (Mappings would be updated in task.params by resolve_task)
|
||||
db = SessionLocal()
|
||||
try:
|
||||
src_env = db.query(Environment).filter(Environment.name == from_env).first()
|
||||
tgt_env = db.query(Environment).filter(Environment.name == to_env).first()
|
||||
src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
|
||||
tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
|
||||
mappings = db.query(DatabaseMapping).filter(
|
||||
DatabaseMapping.source_env_id == src_env.id,
|
||||
DatabaseMapping.target_env_id == tgt_env.id
|
||||
@@ -178,7 +209,7 @@ class MigrationPlugin(PluginBase):
|
||||
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
|
||||
finally:
|
||||
db.close()
|
||||
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping)
|
||||
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
|
||||
|
||||
if success:
|
||||
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
|
||||
@@ -187,6 +218,41 @@ class MigrationPlugin(PluginBase):
|
||||
|
||||
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported.")
|
||||
except Exception as exc:
|
||||
# Check for password error
|
||||
error_msg = str(exc)
|
||||
if "Must provide a password for the database" in error_msg:
|
||||
# Extract database name (assuming format: "Must provide a password for the database 'PostgreSQL'")
|
||||
import re
|
||||
match = re.search(r"database '([^']+)'", error_msg)
|
||||
db_name = match.group(1) if match else "unknown"
|
||||
|
||||
# Get task manager
|
||||
from ..dependencies import get_task_manager
|
||||
tm = get_task_manager()
|
||||
task_id = params.get("_task_id")
|
||||
|
||||
if task_id:
|
||||
input_request = {
|
||||
"type": "database_password",
|
||||
"databases": [db_name],
|
||||
"error_message": error_msg
|
||||
}
|
||||
tm.await_input(task_id, input_request)
|
||||
|
||||
# Wait for user input
|
||||
await tm.wait_for_input(task_id)
|
||||
|
||||
# Resume with passwords
|
||||
task = tm.get_task(task_id)
|
||||
passwords = task.params.get("passwords", {})
|
||||
|
||||
# Retry import with password
|
||||
if passwords:
|
||||
logger.info(f"[MigrationPlugin][Action] Retrying import for {title} with provided passwords.")
|
||||
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug, passwords=passwords)
|
||||
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported after password injection.")
|
||||
continue
|
||||
|
||||
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
|
||||
|
||||
logger.info("[MigrationPlugin][Exit] Migration finished.")
|
||||
|
||||
@@ -24,7 +24,7 @@ export const options = {
|
||||
app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n",
|
||||
error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n"
|
||||
},
|
||||
version_hash: "n7gbte"
|
||||
version_hash: "oj9twc"
|
||||
};
|
||||
|
||||
export async function get_hooks() {
|
||||
|
||||
205
frontend/src/components/DashboardGrid.svelte
Normal file
205
frontend/src/components/DashboardGrid.svelte
Normal file
@@ -0,0 +1,205 @@
|
||||
<!-- [DEF:DashboardGrid:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: dashboard, grid, selection, pagination
|
||||
@PURPOSE: Displays a grid of dashboards with selection and pagination.
|
||||
@LAYER: Component
|
||||
@RELATION: USED_BY -> frontend/src/routes/migration/+page.svelte
|
||||
|
||||
@INVARIANT: Selected IDs must be a subset of available dashboards.
|
||||
-->
|
||||
|
||||
<script lang="ts">
|
||||
// [SECTION: IMPORTS]
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
import type { DashboardMetadata } from '../types/dashboard';
|
||||
// [/SECTION]
|
||||
|
||||
// [SECTION: PROPS]
|
||||
export let dashboards: DashboardMetadata[] = [];
|
||||
export let selectedIds: number[] = [];
|
||||
// [/SECTION]
|
||||
|
||||
// [SECTION: STATE]
|
||||
let filterText = "";
|
||||
let currentPage = 0;
|
||||
let pageSize = 20;
|
||||
let sortColumn: keyof DashboardMetadata = "title";
|
||||
let sortDirection: "asc" | "desc" = "asc";
|
||||
// [/SECTION]
|
||||
|
||||
// [SECTION: DERIVED]
|
||||
$: filteredDashboards = dashboards.filter(d =>
|
||||
d.title.toLowerCase().includes(filterText.toLowerCase())
|
||||
);
|
||||
|
||||
$: sortedDashboards = [...filteredDashboards].sort((a, b) => {
|
||||
let aVal = a[sortColumn];
|
||||
let bVal = b[sortColumn];
|
||||
if (sortColumn === "id") {
|
||||
aVal = Number(aVal);
|
||||
bVal = Number(bVal);
|
||||
}
|
||||
if (aVal < bVal) return sortDirection === "asc" ? -1 : 1;
|
||||
if (aVal > bVal) return sortDirection === "asc" ? 1 : -1;
|
||||
return 0;
|
||||
});
|
||||
|
||||
$: paginatedDashboards = sortedDashboards.slice(
|
||||
currentPage * pageSize,
|
||||
(currentPage + 1) * pageSize
|
||||
);
|
||||
|
||||
$: totalPages = Math.ceil(sortedDashboards.length / pageSize);
|
||||
|
||||
$: allSelected = paginatedDashboards.length > 0 && paginatedDashboards.every(d => selectedIds.includes(d.id));
|
||||
$: someSelected = paginatedDashboards.some(d => selectedIds.includes(d.id));
|
||||
// [/SECTION]
|
||||
|
||||
// [SECTION: EVENTS]
|
||||
const dispatch = createEventDispatcher<{ selectionChanged: number[] }>();
|
||||
// [/SECTION]
|
||||
|
||||
// [DEF:handleSort:Function]
|
||||
// @PURPOSE: Toggles sort direction or changes sort column.
|
||||
function handleSort(column: keyof DashboardMetadata) {
|
||||
if (sortColumn === column) {
|
||||
sortDirection = sortDirection === "asc" ? "desc" : "asc";
|
||||
} else {
|
||||
sortColumn = column;
|
||||
sortDirection = "asc";
|
||||
}
|
||||
}
|
||||
// [/DEF:handleSort]
|
||||
|
||||
// [DEF:handleSelectionChange:Function]
|
||||
// @PURPOSE: Handles individual checkbox changes.
|
||||
function handleSelectionChange(id: number, checked: boolean) {
|
||||
let newSelected = [...selectedIds];
|
||||
if (checked) {
|
||||
if (!newSelected.includes(id)) newSelected.push(id);
|
||||
} else {
|
||||
newSelected = newSelected.filter(sid => sid !== id);
|
||||
}
|
||||
selectedIds = newSelected;
|
||||
dispatch('selectionChanged', newSelected);
|
||||
}
|
||||
// [/DEF:handleSelectionChange]
|
||||
|
||||
// [DEF:handleSelectAll:Function]
|
||||
// @PURPOSE: Handles select all checkbox.
|
||||
function handleSelectAll(checked: boolean) {
|
||||
let newSelected = [...selectedIds];
|
||||
if (checked) {
|
||||
paginatedDashboards.forEach(d => {
|
||||
if (!newSelected.includes(d.id)) newSelected.push(d.id);
|
||||
});
|
||||
} else {
|
||||
paginatedDashboards.forEach(d => {
|
||||
newSelected = newSelected.filter(sid => sid !== d.id);
|
||||
});
|
||||
}
|
||||
selectedIds = newSelected;
|
||||
dispatch('selectionChanged', newSelected);
|
||||
}
|
||||
// [/DEF:handleSelectAll]
|
||||
|
||||
// [DEF:goToPage:Function]
|
||||
// @PURPOSE: Changes current page.
|
||||
function goToPage(page: number) {
|
||||
if (page >= 0 && page < totalPages) {
|
||||
currentPage = page;
|
||||
}
|
||||
}
|
||||
// [/DEF:goToPage]
|
||||
|
||||
</script>
|
||||
|
||||
<!-- [SECTION: TEMPLATE] -->
|
||||
<div class="dashboard-grid">
|
||||
<!-- Filter Input -->
|
||||
<div class="mb-4">
|
||||
<input
|
||||
type="text"
|
||||
bind:value={filterText}
|
||||
placeholder="Search dashboards..."
|
||||
class="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Grid/Table -->
|
||||
<div class="overflow-x-auto">
|
||||
<table class="min-w-full bg-white border border-gray-300">
|
||||
<thead class="bg-gray-50">
|
||||
<tr>
|
||||
<th class="px-4 py-2 border-b">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={allSelected}
|
||||
indeterminate={someSelected && !allSelected}
|
||||
on:change={(e) => handleSelectAll((e.target as HTMLInputElement).checked)}
|
||||
/>
|
||||
</th>
|
||||
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('title')}>
|
||||
Title {sortColumn === 'title' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
|
||||
</th>
|
||||
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('last_modified')}>
|
||||
Last Modified {sortColumn === 'last_modified' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
|
||||
</th>
|
||||
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('status')}>
|
||||
Status {sortColumn === 'status' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each paginatedDashboards as dashboard (dashboard.id)}
|
||||
<tr class="hover:bg-gray-50">
|
||||
<td class="px-4 py-2 border-b">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={selectedIds.includes(dashboard.id)}
|
||||
on:change={(e) => handleSelectionChange(dashboard.id, (e.target as HTMLInputElement).checked)}
|
||||
/>
|
||||
</td>
|
||||
<td class="px-4 py-2 border-b">{dashboard.title}</td>
|
||||
<td class="px-4 py-2 border-b">{new Date(dashboard.last_modified).toLocaleDateString()}</td>
|
||||
<td class="px-4 py-2 border-b">
|
||||
<span class="px-2 py-1 text-xs font-medium rounded-full {dashboard.status === 'published' ? 'bg-green-100 text-green-800' : 'bg-gray-100 text-gray-800'}">
|
||||
{dashboard.status}
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Pagination Controls -->
|
||||
<div class="flex items-center justify-between mt-4">
|
||||
<div class="text-sm text-gray-700">
|
||||
Showing {currentPage * pageSize + 1} to {Math.min((currentPage + 1) * pageSize, sortedDashboards.length)} of {sortedDashboards.length} dashboards
|
||||
</div>
|
||||
<div class="flex space-x-2">
|
||||
<button
|
||||
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
disabled={currentPage === 0}
|
||||
on:click={() => goToPage(currentPage - 1)}
|
||||
>
|
||||
Previous
|
||||
</button>
|
||||
<button
|
||||
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
disabled={currentPage >= totalPages - 1}
|
||||
on:click={() => goToPage(currentPage + 1)}
|
||||
>
|
||||
Next
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- [/SECTION] -->
|
||||
|
||||
<style>
|
||||
/* Component styles */
|
||||
</style>
|
||||
|
||||
<!-- [/DEF:DashboardGrid] -->
|
||||
@@ -36,8 +36,9 @@
|
||||
|
||||
<!-- [SECTION: TEMPLATE] -->
|
||||
<div class="flex flex-col space-y-1">
|
||||
<label class="text-sm font-medium text-gray-700">{label}</label>
|
||||
<label for="env-select" class="text-sm font-medium text-gray-700">{label}</label>
|
||||
<select
|
||||
id="env-select"
|
||||
class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md"
|
||||
value={selectedId}
|
||||
on:change={handleSelect}
|
||||
|
||||
123
frontend/src/components/PasswordPrompt.svelte
Normal file
123
frontend/src/components/PasswordPrompt.svelte
Normal file
@@ -0,0 +1,123 @@
|
||||
<!-- [DEF:PasswordPrompt:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: password, prompt, modal, input, security
|
||||
@PURPOSE: A modal component to prompt the user for database passwords when a migration task is paused.
|
||||
@LAYER: UI
|
||||
@RELATION: USES -> frontend/src/lib/api.js (inferred)
|
||||
@RELATION: EMITS -> resume, cancel
|
||||
-->
|
||||
<script>
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
|
||||
export let show = false;
|
||||
export let databases = []; // List of database names requiring passwords
|
||||
export let errorMessage = "";
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
let passwords = {};
|
||||
let submitting = false;
|
||||
|
||||
function handleSubmit() {
|
||||
if (submitting) return;
|
||||
|
||||
// Validate all passwords entered
|
||||
const missing = databases.filter(db => !passwords[db]);
|
||||
if (missing.length > 0) {
|
||||
alert(`Please enter passwords for: ${missing.join(', ')}`);
|
||||
return;
|
||||
}
|
||||
|
||||
submitting = true;
|
||||
dispatch('resume', { passwords });
|
||||
// Reset submitting state is handled by parent or on close
|
||||
}
|
||||
|
||||
function handleCancel() {
|
||||
dispatch('cancel');
|
||||
show = false;
|
||||
}
|
||||
|
||||
// Reset passwords when modal opens/closes
|
||||
$: if (!show) {
|
||||
passwords = {};
|
||||
submitting = false;
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if show}
|
||||
<div class="fixed inset-0 z-50 overflow-y-auto" aria-labelledby="modal-title" role="dialog" aria-modal="true">
|
||||
<div class="flex items-end justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
|
||||
<!-- Background overlay -->
|
||||
<div class="fixed inset-0 bg-gray-500 bg-opacity-75 transition-opacity" aria-hidden="true" on:click={handleCancel}></div>
|
||||
|
||||
<span class="hidden sm:inline-block sm:align-middle sm:h-screen" aria-hidden="true">​</span>
|
||||
|
||||
<div class="inline-block align-bottom bg-white rounded-lg text-left overflow-hidden shadow-xl transform transition-all sm:my-8 sm:align-middle sm:max-w-lg sm:w-full">
|
||||
<div class="bg-white px-4 pt-5 pb-4 sm:p-6 sm:pb-4">
|
||||
<div class="sm:flex sm:items-start">
|
||||
<div class="mx-auto flex-shrink-0 flex items-center justify-center h-12 w-12 rounded-full bg-red-100 sm:mx-0 sm:h-10 sm:w-10">
|
||||
<!-- Heroicon name: outline/lock-closed -->
|
||||
<svg class="h-6 w-6 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor" aria-hidden="true">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 15v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2zm10-10V7a4 4 0 00-8 0v4h8z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div class="mt-3 text-center sm:mt-0 sm:ml-4 sm:text-left w-full">
|
||||
<h3 class="text-lg leading-6 font-medium text-gray-900" id="modal-title">
|
||||
Database Password Required
|
||||
</h3>
|
||||
<div class="mt-2">
|
||||
<p class="text-sm text-gray-500 mb-4">
|
||||
The migration process requires passwords for the following databases to proceed.
|
||||
</p>
|
||||
|
||||
{#if errorMessage}
|
||||
<div class="mb-4 p-2 bg-red-50 text-red-700 text-xs rounded border border-red-200">
|
||||
Error: {errorMessage}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<form on:submit|preventDefault={handleSubmit} class="space-y-4">
|
||||
{#each databases as dbName}
|
||||
<div>
|
||||
<label for="password-{dbName}" class="block text-sm font-medium text-gray-700">
|
||||
Password for {dbName}
|
||||
</label>
|
||||
<input
|
||||
type="password"
|
||||
id="password-{dbName}"
|
||||
bind:value={passwords[dbName]}
|
||||
class="mt-1 block w-full border-gray-300 rounded-md shadow-sm focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm p-2 border"
|
||||
placeholder="Enter password"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
{/each}
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-gray-50 px-4 py-3 sm:px-6 sm:flex sm:flex-row-reverse">
|
||||
<button
|
||||
type="button"
|
||||
class="w-full inline-flex justify-center rounded-md border border-transparent shadow-sm px-4 py-2 bg-indigo-600 text-base font-medium text-white hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:ml-3 sm:w-auto sm:text-sm disabled:opacity-50"
|
||||
on:click={handleSubmit}
|
||||
disabled={submitting}
|
||||
>
|
||||
{submitting ? 'Resuming...' : 'Resume Migration'}
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
class="mt-3 w-full inline-flex justify-center rounded-md border border-gray-300 shadow-sm px-4 py-2 bg-white text-base font-medium text-gray-700 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:mt-0 sm:ml-3 sm:w-auto sm:text-sm"
|
||||
on:click={handleCancel}
|
||||
disabled={submitting}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
<!-- [/DEF:PasswordPrompt] -->
|
||||
126
frontend/src/components/TaskHistory.svelte
Normal file
126
frontend/src/components/TaskHistory.svelte
Normal file
@@ -0,0 +1,126 @@
|
||||
<!-- [DEF:TaskHistory:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: task, history, list, status, monitoring
|
||||
@PURPOSE: Displays a list of recent tasks with their status and allows selecting them for viewing logs.
|
||||
@LAYER: UI
|
||||
@RELATION: USES -> frontend/src/lib/stores.js
|
||||
@RELATION: USES -> frontend/src/lib/api.js (inferred)
|
||||
-->
|
||||
<script>
|
||||
import { onMount, onDestroy } from 'svelte';
|
||||
import { selectedTask } from '../lib/stores.js';
|
||||
|
||||
let tasks = [];
|
||||
let loading = true;
|
||||
let error = "";
|
||||
let interval;
|
||||
|
||||
async function fetchTasks() {
|
||||
try {
|
||||
const res = await fetch('/api/tasks?limit=10');
|
||||
if (!res.ok) throw new Error('Failed to fetch tasks');
|
||||
tasks = await res.json();
|
||||
|
||||
// Update selected task if it exists in the list (for status updates)
|
||||
if ($selectedTask) {
|
||||
const updatedTask = tasks.find(t => t.id === $selectedTask.id);
|
||||
if (updatedTask && updatedTask.status !== $selectedTask.status) {
|
||||
selectedTask.set(updatedTask);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
function selectTask(task) {
|
||||
selectedTask.set(task);
|
||||
}
|
||||
|
||||
function getStatusColor(status) {
|
||||
switch (status) {
|
||||
case 'SUCCESS': return 'bg-green-100 text-green-800';
|
||||
case 'FAILED': return 'bg-red-100 text-red-800';
|
||||
case 'RUNNING': return 'bg-blue-100 text-blue-800';
|
||||
case 'AWAITING_INPUT': return 'bg-orange-100 text-orange-800';
|
||||
case 'AWAITING_MAPPING': return 'bg-yellow-100 text-yellow-800';
|
||||
default: return 'bg-gray-100 text-gray-800';
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
fetchTasks();
|
||||
interval = setInterval(fetchTasks, 5000); // Poll every 5s
|
||||
});
|
||||
|
||||
onDestroy(() => {
|
||||
clearInterval(interval);
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="bg-white shadow overflow-hidden sm:rounded-lg mb-8">
|
||||
<div class="px-4 py-5 sm:px-6 flex justify-between items-center">
|
||||
<h3 class="text-lg leading-6 font-medium text-gray-900">
|
||||
Recent Tasks
|
||||
</h3>
|
||||
<button
|
||||
on:click={fetchTasks}
|
||||
class="text-sm text-indigo-600 hover:text-indigo-900 focus:outline-none"
|
||||
>
|
||||
Refresh
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if loading && tasks.length === 0}
|
||||
<div class="p-4 text-center text-gray-500">Loading tasks...</div>
|
||||
{:else if error}
|
||||
<div class="p-4 text-center text-red-500">{error}</div>
|
||||
{:else if tasks.length === 0}
|
||||
<div class="p-4 text-center text-gray-500">No recent tasks found.</div>
|
||||
{:else}
|
||||
<ul class="divide-y divide-gray-200">
|
||||
{#each tasks as task}
|
||||
<li>
|
||||
<button
|
||||
class="w-full text-left block hover:bg-gray-50 focus:outline-none focus:bg-gray-50 transition duration-150 ease-in-out"
|
||||
class:bg-indigo-50={$selectedTask && $selectedTask.id === task.id}
|
||||
on:click={() => selectTask(task)}
|
||||
>
|
||||
<div class="px-4 py-4 sm:px-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<p class="text-sm font-medium text-indigo-600 truncate">
|
||||
{task.plugin_id}
|
||||
<span class="text-gray-500 text-xs ml-2">({task.id.slice(0, 8)})</span>
|
||||
</p>
|
||||
<div class="ml-2 flex-shrink-0 flex">
|
||||
<p class="px-2 inline-flex text-xs leading-5 font-semibold rounded-full {getStatusColor(task.status)}">
|
||||
{task.status}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="mt-2 sm:flex sm:justify-between">
|
||||
<div class="sm:flex">
|
||||
<p class="flex items-center text-sm text-gray-500">
|
||||
{#if task.params.from_env && task.params.to_env}
|
||||
{task.params.from_env} → {task.params.to_env}
|
||||
{:else}
|
||||
Params: {Object.keys(task.params).length} keys
|
||||
{/if}
|
||||
</p>
|
||||
</div>
|
||||
<div class="mt-2 flex items-center text-sm text-gray-500 sm:mt-0">
|
||||
<p>
|
||||
Started: {new Date(task.started_at || task.created_at || Date.now()).toLocaleString()}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
</div>
|
||||
<!-- [/DEF:TaskHistory] -->
|
||||
@@ -16,6 +16,7 @@
|
||||
import { getWsUrl } from '../lib/api.js';
|
||||
import { addToast } from '../lib/toasts.js';
|
||||
import MissingMappingModal from './MissingMappingModal.svelte';
|
||||
import PasswordPrompt from './PasswordPrompt.svelte';
|
||||
// [/SECTION]
|
||||
|
||||
let ws;
|
||||
@@ -26,11 +27,14 @@
|
||||
let reconnectTimeout;
|
||||
let waitingForData = false;
|
||||
let dataTimeout;
|
||||
let connectionStatus = 'disconnected'; // 'connecting', 'connected', 'disconnected', 'waiting', 'completed', 'awaiting_mapping'
|
||||
let connectionStatus = 'disconnected'; // 'connecting', 'connected', 'disconnected', 'waiting', 'completed', 'awaiting_mapping', 'awaiting_input'
|
||||
let showMappingModal = false;
|
||||
let missingDbInfo = { name: '', uuid: '' };
|
||||
let targetDatabases = [];
|
||||
|
||||
let showPasswordPrompt = false;
|
||||
let passwordPromptData = { databases: [], errorMessage: '' };
|
||||
|
||||
// [DEF:connect:Function]
|
||||
/**
|
||||
* @purpose Establishes WebSocket connection with exponential backoff.
|
||||
@@ -73,6 +77,20 @@
|
||||
showMappingModal = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for password request via log context or message
|
||||
// Note: The backend logs "Task paused for user input" with context
|
||||
if (logEntry.message && logEntry.message.includes('Task paused for user input') && logEntry.context && logEntry.context.input_request) {
|
||||
const request = logEntry.context.input_request;
|
||||
if (request.type === 'database_password') {
|
||||
connectionStatus = 'awaiting_input';
|
||||
passwordPromptData = {
|
||||
databases: request.databases || [],
|
||||
errorMessage: request.error_message || ''
|
||||
};
|
||||
showPasswordPrompt = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
@@ -159,6 +177,25 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function handlePasswordResume(event) {
|
||||
const task = get(selectedTask);
|
||||
const { passwords } = event.detail;
|
||||
|
||||
try {
|
||||
await fetch(`/api/tasks/${task.id}/resume`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ passwords })
|
||||
});
|
||||
|
||||
showPasswordPrompt = false;
|
||||
connectionStatus = 'connected';
|
||||
addToast('Passwords submitted, resuming migration...', 'success');
|
||||
} catch (e) {
|
||||
addToast('Failed to resume task: ' + e.message, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
function startDataTimeout() {
|
||||
waitingForData = false;
|
||||
dataTimeout = setTimeout(() => {
|
||||
@@ -228,6 +265,9 @@
|
||||
{:else if connectionStatus === 'awaiting_mapping'}
|
||||
<span class="h-3 w-3 rounded-full bg-orange-500 animate-pulse"></span>
|
||||
<span class="text-xs text-gray-500">Awaiting Mapping</span>
|
||||
{:else if connectionStatus === 'awaiting_input'}
|
||||
<span class="h-3 w-3 rounded-full bg-orange-500 animate-pulse"></span>
|
||||
<span class="text-xs text-gray-500">Awaiting Input</span>
|
||||
{:else}
|
||||
<span class="h-3 w-3 rounded-full bg-red-500"></span>
|
||||
<span class="text-xs text-gray-500">Disconnected</span>
|
||||
@@ -263,6 +303,14 @@
|
||||
on:resolve={handleMappingResolve}
|
||||
on:cancel={() => { connectionStatus = 'disconnected'; ws.close(); }}
|
||||
/>
|
||||
|
||||
<PasswordPrompt
|
||||
bind:show={showPasswordPrompt}
|
||||
databases={passwordPromptData.databases}
|
||||
errorMessage={passwordPromptData.errorMessage}
|
||||
on:resume={handlePasswordResume}
|
||||
on:cancel={() => { showPasswordPrompt = false; }}
|
||||
/>
|
||||
<!-- [/SECTION] -->
|
||||
|
||||
<!-- [/DEF:TaskRunner] -->
|
||||
|
||||
@@ -12,16 +12,27 @@
|
||||
// [SECTION: IMPORTS]
|
||||
import { onMount } from 'svelte';
|
||||
import EnvSelector from '../../components/EnvSelector.svelte';
|
||||
import DashboardGrid from '../../components/DashboardGrid.svelte';
|
||||
import MappingTable from '../../components/MappingTable.svelte';
|
||||
import MissingMappingModal from '../../components/MissingMappingModal.svelte';
|
||||
import TaskHistory from '../../components/TaskHistory.svelte';
|
||||
import type { DashboardMetadata, DashboardSelection } from '../../types/dashboard';
|
||||
// [/SECTION]
|
||||
|
||||
// [SECTION: STATE]
|
||||
let environments = [];
|
||||
let environments: any[] = [];
|
||||
let sourceEnvId = "";
|
||||
let targetEnvId = "";
|
||||
let dashboardRegex = ".*";
|
||||
let replaceDb = false;
|
||||
let loading = true;
|
||||
let error = "";
|
||||
let dashboards: DashboardMetadata[] = [];
|
||||
let selectedDashboardIds: number[] = [];
|
||||
let sourceDatabases: any[] = [];
|
||||
let targetDatabases: any[] = [];
|
||||
let mappings: any[] = [];
|
||||
let suggestions: any[] = [];
|
||||
let fetchingDbs = false;
|
||||
// [/SECTION]
|
||||
|
||||
// [DEF:fetchEnvironments:Function]
|
||||
@@ -42,8 +53,100 @@
|
||||
}
|
||||
// [/DEF:fetchEnvironments]
|
||||
|
||||
// [DEF:fetchDashboards:Function]
|
||||
/**
|
||||
* @purpose Fetches dashboards for the selected source environment.
|
||||
* @param envId The environment ID.
|
||||
* @post dashboards state is updated.
|
||||
*/
|
||||
async function fetchDashboards(envId: string) {
|
||||
try {
|
||||
const response = await fetch(`/api/environments/${envId}/dashboards`);
|
||||
if (!response.ok) throw new Error('Failed to fetch dashboards');
|
||||
dashboards = await response.json();
|
||||
selectedDashboardIds = []; // Reset selection when env changes
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
dashboards = [];
|
||||
}
|
||||
}
|
||||
// [/DEF:fetchDashboards]
|
||||
|
||||
onMount(fetchEnvironments);
|
||||
|
||||
// Reactive: fetch dashboards when source env changes
|
||||
$: if (sourceEnvId) fetchDashboards(sourceEnvId);
|
||||
|
||||
// [DEF:fetchDatabases:Function]
|
||||
/**
|
||||
* @purpose Fetches databases from both environments and gets suggestions.
|
||||
*/
|
||||
async function fetchDatabases() {
|
||||
if (!sourceEnvId || !targetEnvId) return;
|
||||
fetchingDbs = true;
|
||||
error = "";
|
||||
|
||||
try {
|
||||
const [srcRes, tgtRes, mapRes, sugRes] = await Promise.all([
|
||||
fetch(`/api/environments/${sourceEnvId}/databases`),
|
||||
fetch(`/api/environments/${targetEnvId}/databases`),
|
||||
fetch(`/api/mappings?source_env_id=${sourceEnvId}&target_env_id=${targetEnvId}`),
|
||||
fetch(`/api/mappings/suggest`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ source_env_id: sourceEnvId, target_env_id: targetEnvId })
|
||||
})
|
||||
]);
|
||||
|
||||
if (!srcRes.ok || !tgtRes.ok) throw new Error('Failed to fetch databases from environments');
|
||||
|
||||
sourceDatabases = await srcRes.json();
|
||||
targetDatabases = await tgtRes.json();
|
||||
mappings = await mapRes.json();
|
||||
suggestions = await sugRes.json();
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
} finally {
|
||||
fetchingDbs = false;
|
||||
}
|
||||
}
|
||||
// [/DEF:fetchDatabases]
|
||||
|
||||
// [DEF:handleMappingUpdate:Function]
|
||||
/**
|
||||
* @purpose Saves a mapping to the backend.
|
||||
*/
|
||||
async function handleMappingUpdate(event: CustomEvent) {
|
||||
const { sourceUuid, targetUuid } = event.detail;
|
||||
const sDb = sourceDatabases.find(d => d.uuid === sourceUuid);
|
||||
const tDb = targetDatabases.find(d => d.uuid === targetUuid);
|
||||
|
||||
if (!sDb || !tDb) return;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/mappings', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
source_env_id: sourceEnvId,
|
||||
target_env_id: targetEnvId,
|
||||
source_db_uuid: sourceUuid,
|
||||
target_db_uuid: targetUuid,
|
||||
source_db_name: sDb.database_name,
|
||||
target_db_name: tDb.database_name
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) throw new Error('Failed to save mapping');
|
||||
|
||||
const savedMapping = await response.json();
|
||||
mappings = [...mappings.filter(m => m.source_db_uuid !== sourceUuid), savedMapping];
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
}
|
||||
}
|
||||
// [/DEF:handleMappingUpdate]
|
||||
|
||||
// [DEF:startMigration:Function]
|
||||
/**
|
||||
* @purpose Starts the migration process.
|
||||
@@ -58,10 +161,33 @@
|
||||
error = "Source and target environments must be different.";
|
||||
return;
|
||||
}
|
||||
if (selectedDashboardIds.length === 0) {
|
||||
error = "Please select at least one dashboard to migrate.";
|
||||
return;
|
||||
}
|
||||
|
||||
error = "";
|
||||
console.log(`[MigrationDashboard][Action] Starting migration from ${sourceEnvId} to ${targetEnvId} (Replace DB: ${replaceDb})`);
|
||||
// TODO: Implement actual migration trigger in US3
|
||||
try {
|
||||
const selection: DashboardSelection = {
|
||||
selected_ids: selectedDashboardIds,
|
||||
source_env_id: sourceEnvId,
|
||||
target_env_id: targetEnvId
|
||||
};
|
||||
console.log(`[MigrationDashboard][Action] Starting migration with selection:`, selection);
|
||||
const response = await fetch('/api/migration/execute', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(selection)
|
||||
});
|
||||
console.log(`[MigrationDashboard][Action] API response status: ${response.status}`);
|
||||
if (!response.ok) throw new Error(`Failed to start migration: ${response.status} ${response.statusText}`);
|
||||
const result = await response.json();
|
||||
console.log(`[MigrationDashboard][Action] Migration started: ${result.task_id} - ${result.message}`);
|
||||
// TODO: Show success message or redirect to task status
|
||||
} catch (e) {
|
||||
console.error(`[MigrationDashboard][Failure] Migration failed:`, e);
|
||||
error = e.message;
|
||||
}
|
||||
}
|
||||
// [/DEF:startMigration]
|
||||
</script>
|
||||
@@ -70,6 +196,8 @@
|
||||
<div class="max-w-4xl mx-auto p-6">
|
||||
<h1 class="text-2xl font-bold mb-6">Migration Dashboard</h1>
|
||||
|
||||
<TaskHistory />
|
||||
|
||||
{#if loading}
|
||||
<p>Loading environments...</p>
|
||||
{:else if error}
|
||||
@@ -91,23 +219,28 @@
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- [DEF:DashboardSelectionSection] -->
|
||||
<div class="mb-8">
|
||||
<label for="dashboard-regex" class="block text-sm font-medium text-gray-700 mb-1">Dashboard Regex</label>
|
||||
<input
|
||||
id="dashboard-regex"
|
||||
type="text"
|
||||
bind:value={dashboardRegex}
|
||||
placeholder="e.g. ^Finance Dashboard$"
|
||||
class="shadow-sm focus:ring-indigo-500 focus:border-indigo-500 block w-full sm:text-sm border-gray-300 rounded-md"
|
||||
/>
|
||||
<p class="mt-1 text-sm text-gray-500">Regular expression to filter dashboards to migrate.</p>
|
||||
</div>
|
||||
<h2 class="text-lg font-medium mb-4">Select Dashboards</h2>
|
||||
|
||||
<div class="flex items-center mb-8">
|
||||
{#if sourceEnvId}
|
||||
<DashboardGrid
|
||||
{dashboards}
|
||||
bind:selectedIds={selectedDashboardIds}
|
||||
/>
|
||||
{:else}
|
||||
<p class="text-gray-500 italic">Select a source environment to view dashboards.</p>
|
||||
{/if}
|
||||
</div>
|
||||
<!-- [/DEF:DashboardSelectionSection] -->
|
||||
|
||||
|
||||
<div class="flex items-center mb-4">
|
||||
<input
|
||||
id="replace-db"
|
||||
type="checkbox"
|
||||
bind:checked={replaceDb}
|
||||
on:change={() => { if (replaceDb && sourceDatabases.length === 0) fetchDatabases(); }}
|
||||
class="h-4 w-4 text-indigo-600 focus:ring-indigo-500 border-gray-300 rounded"
|
||||
/>
|
||||
<label for="replace-db" class="ml-2 block text-sm text-gray-900">
|
||||
@@ -115,9 +248,33 @@
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{#if replaceDb}
|
||||
<div class="mb-8 p-4 border rounded-md bg-gray-50">
|
||||
<h3 class="text-md font-medium mb-4">Database Mappings</h3>
|
||||
{#if fetchingDbs}
|
||||
<p>Loading databases and suggestions...</p>
|
||||
{:else if sourceDatabases.length > 0}
|
||||
<MappingTable
|
||||
{sourceDatabases}
|
||||
{targetDatabases}
|
||||
{mappings}
|
||||
{suggestions}
|
||||
on:update={handleMappingUpdate}
|
||||
/>
|
||||
{:else if sourceEnvId && targetEnvId}
|
||||
<button
|
||||
on:click={fetchDatabases}
|
||||
class="text-indigo-600 hover:text-indigo-500 text-sm font-medium"
|
||||
>
|
||||
Refresh Databases & Suggestions
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<button
|
||||
on:click={startMigration}
|
||||
disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId}
|
||||
disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId || selectedDashboardIds.length === 0}
|
||||
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 disabled:bg-gray-400"
|
||||
>
|
||||
Start Migration
|
||||
|
||||
12
frontend/src/types/dashboard.ts
Normal file
12
frontend/src/types/dashboard.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface DashboardMetadata {
|
||||
id: number;
|
||||
title: string;
|
||||
last_modified: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
export interface DashboardSelection {
|
||||
selected_ids: number[];
|
||||
source_env_id: string;
|
||||
target_env_id: string;
|
||||
}
|
||||
BIN
mappings.db
Normal file
BIN
mappings.db
Normal file
Binary file not shown.
@@ -11,7 +11,7 @@ This protocol standardizes the "Semantic Bridge" between the two languages using
|
||||
## I. CORE REQUIREMENTS
|
||||
1. **Causal Validity:** Semantic definitions (Contracts) must ALWAYS precede implementation code.
|
||||
2. **Immutability:** Architectural decisions defined in the Module/Component Header are treated as immutable constraints.
|
||||
3. **Format Compliance:** Output must strictly follow the `[DEF]` / `[/DEF]` anchor syntax for structure.
|
||||
3. **Format Compliance:** Output must strictly follow the `[DEF:..:...]` / `[/DEF:...:...]` anchor syntax for structure.
|
||||
4. **Logic over Assertion:** Contracts define the *logic flow*. Do not generate explicit `assert` statements unless requested. The code logic itself must inherently satisfy the Pre/Post conditions (e.g., via control flow, guards, or types).
|
||||
5. **Fractal Complexity:** Modules and functions must adhere to strict size limits (~300 lines/module, ~30-50 lines/function) to maintain semantic focus.
|
||||
|
||||
@@ -26,13 +26,13 @@ Used to define the boundaries of Modules, Classes, Components, and Functions.
|
||||
|
||||
* **Python:**
|
||||
* Start: `# [DEF:identifier:Type]`
|
||||
* End: `# [/DEF:identifier]`
|
||||
* End: `# [/DEF:identifier:Type]`
|
||||
* **Svelte (Top-level):**
|
||||
* Start: `<!-- [DEF:ComponentName:Component] -->`
|
||||
* End: `<!-- [/DEF:ComponentName] -->`
|
||||
* End: `<!-- [/DEF:ComponentName:Component] -->`
|
||||
* **Svelte (Script/JS/TS):**
|
||||
* Start: `// [DEF:funcName:Function]`
|
||||
* End: `// [/DEF:funcName]`
|
||||
* End: `// [/DEF:funcName:Function]`
|
||||
|
||||
**Types:** `Module`, `Component`, `Class`, `Function`, `Store`, `Action`.
|
||||
|
||||
@@ -64,7 +64,7 @@ Defines high-level dependencies.
|
||||
|
||||
# ... IMPLEMENTATION ...
|
||||
|
||||
# [/DEF:module_name]
|
||||
# [/DEF:module_name:Module]
|
||||
```
|
||||
|
||||
### 2. Svelte Component Header (`.svelte`)
|
||||
@@ -82,20 +82,20 @@ Defines high-level dependencies.
|
||||
<script lang="ts">
|
||||
// [SECTION: IMPORTS]
|
||||
// ...
|
||||
// [/SECTION]
|
||||
// [/SECTION: IMPORTS]
|
||||
|
||||
// ... LOGIC IMPLEMENTATION ...
|
||||
</script>
|
||||
|
||||
<!-- [SECTION: TEMPLATE] -->
|
||||
...
|
||||
<!-- [/SECTION] -->
|
||||
<!-- [/SECTION: TEMPLATE] -->
|
||||
|
||||
<style>
|
||||
/* ... */
|
||||
</style>
|
||||
|
||||
<!-- [/DEF:ComponentName] -->
|
||||
<!-- [/DEF:ComponentName:Component] -->
|
||||
```
|
||||
|
||||
---
|
||||
@@ -123,7 +123,7 @@ def calculate_total(items: List[Item]) -> Decimal:
|
||||
|
||||
# Logic ensuring @POST
|
||||
return total
|
||||
# [/DEF:calculate_total]
|
||||
# [/DEF:calculate_total:Function]
|
||||
```
|
||||
|
||||
### 2. Svelte/JS Contract Style (JSDoc)
|
||||
@@ -146,7 +146,7 @@ async function updateUserProfile(profileData) {
|
||||
|
||||
// ...
|
||||
}
|
||||
// [/DEF:updateUserProfile]
|
||||
// [/DEF:updateUserProfile:Function]
|
||||
```
|
||||
|
||||
---
|
||||
@@ -168,9 +168,19 @@ Logs delineate the agent's internal state.
|
||||
|
||||
---
|
||||
|
||||
## VI. GENERATION WORKFLOW
|
||||
## VI. FRACTAL COMPLEXITY LIMIT
|
||||
|
||||
To maintain semantic coherence and avoid "Attention Sink" issues:
|
||||
* **Module Size:** If a Module body exceeds ~300 lines (or logical complexity), it MUST be refactored into sub-modules or a package structure.
|
||||
* **Function Size:** Functions should fit within a standard attention "chunk" (approx. 30-50 lines). If larger, logic MUST be decomposed into helper functions with their own contracts.
|
||||
|
||||
This ensures every vector embedding remains sharp and focused.
|
||||
|
||||
---
|
||||
|
||||
## VII. GENERATION WORKFLOW
|
||||
|
||||
1. **Context Analysis:** Identify language (Python vs Svelte) and Architecture Layer.
|
||||
2. **Scaffolding:** Generate the `[DEF]` Anchors and Header/Contract **before** writing any logic.
|
||||
2. **Scaffolding:** Generate the `[DEF:...:...]` Anchors and Header/Contract **before** writing any logic.
|
||||
3. **Implementation:** Write the code. Ensure the code logic handles the `@PRE` conditions (e.g., via `if/return` or guards) and satisfies `@POST` conditions naturally. **Do not write explicit `assert` statements unless debugging mode is requested.**
|
||||
4. **Closure:** Ensure every `[DEF]` is closed with `[/DEF]` to accumulate semantic context.
|
||||
4. **Closure:** Ensure every `[DEF:...:...]` is closed with `[/DEF:...:...]` to accumulate semantic context.
|
||||
@@ -10,20 +10,20 @@ description: "Architecture tasks for Migration Plugin Dashboard Grid"
|
||||
|
||||
## Phase 1: Setup & Models
|
||||
|
||||
- [ ] A001 Define contracts/scaffolding for migration route in backend/src/api/routes/migration.py
|
||||
- [ ] A002 Define contracts/scaffolding for Dashboard model in backend/src/models/dashboard.py
|
||||
- [x] A001 Define contracts/scaffolding for migration route in backend/src/api/routes/migration.py
|
||||
- [x] A002 Define contracts/scaffolding for Dashboard model in backend/src/models/dashboard.py
|
||||
|
||||
## Phase 2: User Story 1 - Advanced Dashboard Selection
|
||||
|
||||
- [ ] A003 [US1] Define contracts/scaffolding for SupersetClient extensions in backend/src/core/superset_client.py
|
||||
- [ ] A004 [US1] Define contracts/scaffolding for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py
|
||||
- [ ] A005 [US1] Define contracts/scaffolding for DashboardGrid component in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] A006 [US1] Define contracts/scaffolding for migration page integration in frontend/src/routes/migration/+page.svelte
|
||||
- [ ] A007 [US1] Define contracts/scaffolding for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
|
||||
- [x] A003 [US1] Define contracts/scaffolding for SupersetClient extensions in backend/src/core/superset_client.py
|
||||
- [x] A004 [US1] Define contracts/scaffolding for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py
|
||||
- [x] A005 [US1] Define contracts/scaffolding for DashboardGrid component in frontend/src/components/DashboardGrid.svelte
|
||||
- [x] A006 [US1] Define contracts/scaffolding for migration page integration in frontend/src/routes/migration/+page.svelte
|
||||
- [x] A007 [US1] Define contracts/scaffolding for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
|
||||
|
||||
## Handover Checklist
|
||||
|
||||
- [ ] All new files created with `[DEF]` anchors
|
||||
- [ ] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
|
||||
- [ ] No "naked code" (logic outside of anchors)
|
||||
- [ ] `tasks-dev.md` is ready for the Developer Agent
|
||||
- [x] All new files created with `[DEF]` anchors
|
||||
- [x] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
|
||||
- [x] No "naked code" (logic outside of anchors)
|
||||
- [x] `tasks-dev.md` is ready for the Developer Agent
|
||||
@@ -1,34 +1,49 @@
|
||||
---
|
||||
description: "Development tasks for Migration Plugin Dashboard Grid"
|
||||
---
|
||||
|
||||
description: "Developer tasks for Migration Plugin Dashboard Grid"
|
||||
---
|
||||
|
||||
# Developer Tasks: Migration Plugin Dashboard Grid
|
||||
# Development Tasks: Migration Plugin Dashboard Grid
|
||||
|
||||
**Role**: Developer Agent
|
||||
**Goal**: Implement the "How" (Logic, State, Error Handling) inside the defined contracts.
|
||||
**Goal**: Implement the logic defined in the architecture contracts.
|
||||
|
||||
## Phase 1: Setup & Models
|
||||
## Phase 1: Backend Implementation
|
||||
|
||||
- [ ] D001 Implement logic for migration route in backend/src/api/routes/migration.py
|
||||
- [ ] D002 Register migration router in backend/src/app.py
|
||||
- [ ] D003 Export migration router in backend/src/api/routes/__init__.py
|
||||
- [ ] D004 Implement logic for Dashboard model in backend/src/models/dashboard.py
|
||||
- [x] D001 [US1] Implement `SupersetClient.get_dashboards_summary` in `backend/src/core/superset_client.py`
|
||||
- **Context**: Fetch dashboards from Superset API with specific columns (`id`, `dashboard_title`, `changed_on_utc`, `published`).
|
||||
- **Input**: None (uses instance config).
|
||||
- **Output**: List of dictionaries mapped to `DashboardMetadata` fields.
|
||||
|
||||
## Phase 2: User Story 1 - Advanced Dashboard Selection
|
||||
- [x] D002 [US1] Implement `get_dashboards` endpoint in `backend/src/api/routes/migration.py`
|
||||
- **Context**: Initialize `SupersetClient` with environment config and call `get_dashboards_summary`.
|
||||
- **Input**: `env_id` (path param).
|
||||
- **Output**: JSON list of `DashboardMetadata`.
|
||||
|
||||
- [ ] D005 [P] [US1] Implement logic for SupersetClient extensions in backend/src/core/superset_client.py
|
||||
- [ ] D006 [US1] Implement logic for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py
|
||||
- [ ] D007 [US1] Implement structure and styles for DashboardGrid component in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D008 [US1] Implement data fetching and state management in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D009 [US1] Implement client-side filtering logic in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D010 [US1] Implement pagination logic in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D011 [US1] Implement selection logic (single and Select All) in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D012 [US1] Integrate DashboardGrid and connect selection to submission in frontend/src/routes/migration/+page.svelte
|
||||
- [ ] D013 [US1] Implement logic for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
|
||||
- [ ] D014 [US1] Verify semantic compliance and belief state logging
|
||||
- [x] D003 [US1] Implement `execute_migration` endpoint in `backend/src/api/routes/migration.py`
|
||||
- **Context**: Validate selection and initiate migration task (placeholder or TaskManager integration).
|
||||
- **Input**: `DashboardSelection` body.
|
||||
- **Output**: Task ID and status message.
|
||||
|
||||
## Polish & Quality Assurance
|
||||
## Phase 2: Frontend Implementation
|
||||
|
||||
- [ ] D015 Verify error handling and empty states in frontend/src/components/DashboardGrid.svelte
|
||||
- [ ] D016 Ensure consistent styling with Tailwind CSS in frontend/src/components/DashboardGrid.svelte
|
||||
- [x] D004 [US1] Implement `DashboardGrid.svelte` logic
|
||||
- **Context**: `frontend/src/components/DashboardGrid.svelte`
|
||||
- **Requirements**:
|
||||
- Client-side pagination (default 20 items).
|
||||
- Sorting by Title, Last Modified, Status.
|
||||
- Text filtering (search by title).
|
||||
- Multi-selection with "Select All" capability.
|
||||
- Emit selection events.
|
||||
|
||||
- [x] D005 [US1] Integrate `DashboardGrid` into Migration Page
|
||||
- **Context**: `frontend/src/routes/migration/+page.svelte`
|
||||
- **Requirements**:
|
||||
- Fetch dashboards when `sourceEnvId` changes.
|
||||
- Bind `dashboards` data to `DashboardGrid`.
|
||||
- Bind `selectedDashboardIds`.
|
||||
- Update `startMigration` to send `selectedDashboardIds` to backend.
|
||||
|
||||
## Phase 3: Verification
|
||||
|
||||
- [ ] D006 Verify Dashboard Grid functionality (Sort, Filter, Page).
|
||||
- [ ] D007 Verify API integration (Fetch dashboards, Start migration).
|
||||
@@ -0,0 +1,35 @@
|
||||
# Specification Quality Checklist: Migration UI Improvements
|
||||
|
||||
**Purpose**: Validate specification completeness and quality before proceeding to planning
|
||||
**Created**: 2025-12-27
|
||||
**Feature**: [specs/008-migration-ui-improvements/spec.md](specs/008-migration-ui-improvements/spec.md)
|
||||
|
||||
## Content Quality
|
||||
|
||||
- [x] No implementation details (languages, frameworks, APIs)
|
||||
- [x] Focused on user value and business needs
|
||||
- [x] Written for non-technical stakeholders
|
||||
- [x] All mandatory sections completed
|
||||
|
||||
## Requirement Completeness
|
||||
|
||||
- [x] No [NEEDS CLARIFICATION] markers remain
|
||||
- [x] Requirements are testable and unambiguous
|
||||
- [x] Success criteria are measurable
|
||||
- [x] Success criteria are technology-agnostic (no implementation details)
|
||||
- [x] All acceptance scenarios are defined
|
||||
- [x] Edge cases are identified
|
||||
- [x] Scope is clearly bounded
|
||||
- [x] Dependencies and assumptions identified
|
||||
|
||||
## Feature Readiness
|
||||
|
||||
- [x] All functional requirements have clear acceptance criteria
|
||||
- [x] User scenarios cover primary flows
|
||||
- [x] Feature meets measurable outcomes defined in Success Criteria
|
||||
- [x] No implementation details leak into specification
|
||||
|
||||
## Notes
|
||||
|
||||
- The specification addresses the user's request for task history, logs, and interactive password resolution.
|
||||
- Assumptions are made about task persistence and sequential password prompts for multiple databases.
|
||||
356
specs/008-migration-ui-improvements/contracts/api.md
Normal file
356
specs/008-migration-ui-improvements/contracts/api.md
Normal file
@@ -0,0 +1,356 @@
|
||||
# API Contracts: Migration UI Improvements
|
||||
|
||||
**Date**: 2025-12-27 | **Status**: Draft
|
||||
|
||||
## Overview
|
||||
|
||||
This document defines the API contracts for the Migration UI Improvements feature. All endpoints follow RESTful conventions and use standard HTTP status codes.
|
||||
|
||||
## Base URL
|
||||
|
||||
`/api/` - All endpoints are relative to the API base URL
|
||||
|
||||
## Authentication
|
||||
|
||||
All endpoints require authentication using the existing session mechanism.
|
||||
|
||||
## Endpoints
|
||||
|
||||
### 1. List Migration Tasks
|
||||
|
||||
**Endpoint**: `GET /tasks`
|
||||
|
||||
**Purpose**: Retrieve a paginated list of migration tasks
|
||||
|
||||
**Parameters**:
|
||||
```
|
||||
limit: integer (query, optional) - Number of tasks to return (default: 10, max: 50)
|
||||
offset: integer (query, optional) - Pagination offset (default: 0)
|
||||
status: string (query, optional) - Filter by task status (PENDING, RUNNING, SUCCESS, FAILED, AWAITING_INPUT)
|
||||
```
|
||||
|
||||
**Response**: `200 OK`
|
||||
|
||||
**Content-Type**: `application/json`
|
||||
|
||||
**Response Body**:
|
||||
```json
|
||||
{
|
||||
"tasks": [
|
||||
{
|
||||
"id": "string (uuid)",
|
||||
"type": "string",
|
||||
"status": "string (enum)",
|
||||
"start_time": "string (iso8601)",
|
||||
"end_time": "string (iso8601) | null",
|
||||
"requires_input": "boolean"
|
||||
}
|
||||
],
|
||||
"total": "integer",
|
||||
"limit": "integer",
|
||||
"offset": "integer"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Request**:
|
||||
```
|
||||
GET /api/tasks?limit=5&offset=0
|
||||
```
|
||||
|
||||
**Example Response**:
|
||||
```json
|
||||
{
|
||||
"tasks": [
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"type": "migration",
|
||||
"status": "RUNNING",
|
||||
"start_time": "2025-12-27T09:47:12.000Z",
|
||||
"end_time": null,
|
||||
"requires_input": false
|
||||
},
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"type": "migration",
|
||||
"status": "AWAITING_INPUT",
|
||||
"start_time": "2025-12-27T09:45:00.000Z",
|
||||
"end_time": null,
|
||||
"requires_input": true
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 5,
|
||||
"offset": 0
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses**:
|
||||
- `401 Unauthorized` - Authentication required
|
||||
- `400 Bad Request` - Invalid parameters
|
||||
|
||||
### 2. Get Task Logs
|
||||
|
||||
**Endpoint**: `GET /tasks/{task_id}/logs`
|
||||
|
||||
**Purpose**: Retrieve detailed logs for a specific task
|
||||
|
||||
**Parameters**: None
|
||||
|
||||
**Response**: `200 OK`
|
||||
|
||||
**Content-Type**: `application/json`
|
||||
|
||||
**Response Body**:
|
||||
```json
|
||||
{
|
||||
"task_id": "string (uuid)",
|
||||
"status": "string (enum)",
|
||||
"logs": [
|
||||
{
|
||||
"timestamp": "string (iso8601)",
|
||||
"level": "string",
|
||||
"message": "string",
|
||||
"context": "object | null"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Example Request**:
|
||||
```
|
||||
GET /api/tasks/550e8400-e29b-41d4-a716-446655440001/logs
|
||||
```
|
||||
|
||||
**Example Response**:
|
||||
```json
|
||||
{
|
||||
"task_id": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"status": "AWAITING_INPUT",
|
||||
"logs": [
|
||||
{
|
||||
"timestamp": "2025-12-27T09:45:00.000Z",
|
||||
"level": "INFO",
|
||||
"message": "Starting migration",
|
||||
"context": null
|
||||
},
|
||||
{
|
||||
"timestamp": "2025-12-27T09:47:12.000Z",
|
||||
"level": "ERROR",
|
||||
"message": "API error during upload",
|
||||
"context": {
|
||||
"error": "Must provide a password for the database",
|
||||
"database": "PostgreSQL"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses**:
|
||||
- `401 Unauthorized` - Authentication required
|
||||
- `404 Not Found` - Task not found
|
||||
- `403 Forbidden` - Access denied to this task
|
||||
|
||||
### 3. Resume Task with Input
|
||||
|
||||
**Endpoint**: `POST /tasks/{task_id}/resume`
|
||||
|
||||
**Purpose**: Provide required input and resume a paused task
|
||||
|
||||
**Request Body**:
|
||||
```json
|
||||
{
|
||||
"passwords": {
|
||||
"database_name": "password"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Response**: `200 OK`
|
||||
|
||||
**Content-Type**: `application/json`
|
||||
|
||||
**Response Body**:
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Task resumed successfully"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Request**:
|
||||
```
|
||||
POST /api/tasks/550e8400-e29b-41d4-a716-446655440001/resume
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"passwords": {
|
||||
"PostgreSQL": "securepassword123"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response**:
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Task resumed successfully"
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses**:
|
||||
- `401 Unauthorized` - Authentication required
|
||||
- `404 Not Found` - Task not found
|
||||
- `400 Bad Request` - Invalid request body or missing required fields
|
||||
- `409 Conflict` - Task not in AWAITING_INPUT state or already completed
|
||||
- `422 Unprocessable Entity` - Invalid password provided
|
||||
|
||||
### 4. Get Task Details (Optional)
|
||||
|
||||
**Endpoint**: `GET /tasks/{task_id}`
|
||||
|
||||
**Purpose**: Get detailed information about a specific task
|
||||
|
||||
**Parameters**: None
|
||||
|
||||
**Response**: `200 OK`
|
||||
|
||||
**Content-Type**: `application/json`
|
||||
|
||||
**Response Body**:
|
||||
```json
|
||||
{
|
||||
"id": "string (uuid)",
|
||||
"type": "string",
|
||||
"status": "string (enum)",
|
||||
"start_time": "string (iso8601)",
|
||||
"end_time": "string (iso8601) | null",
|
||||
"requires_input": "boolean",
|
||||
"input_request": "object | null"
|
||||
}
|
||||
```
|
||||
|
||||
**Example Response**:
|
||||
```json
|
||||
{
|
||||
"id": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"type": "migration",
|
||||
"status": "AWAITING_INPUT",
|
||||
"start_time": "2025-12-27T09:45:00.000Z",
|
||||
"end_time": null,
|
||||
"requires_input": true,
|
||||
"input_request": {
|
||||
"type": "database_password",
|
||||
"databases": ["PostgreSQL"],
|
||||
"error_message": "Must provide a password for the database"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Data Types
|
||||
|
||||
### TaskStatus Enum
|
||||
|
||||
```
|
||||
PENDING
|
||||
RUNNING
|
||||
SUCCESS
|
||||
FAILED
|
||||
AWAITING_INPUT
|
||||
```
|
||||
|
||||
### LogLevel Enum
|
||||
|
||||
```
|
||||
INFO
|
||||
WARNING
|
||||
ERROR
|
||||
DEBUG
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Standard Error Format
|
||||
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "string",
|
||||
"message": "string",
|
||||
"details": "object | null"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Common Error Codes
|
||||
|
||||
- `invalid_task_id`: Task ID is invalid or not found
|
||||
- `task_not_awaiting_input`: Task is not in AWAITING_INPUT state
|
||||
- `invalid_password`: Provided password is invalid
|
||||
- `unauthorized`: Authentication required
|
||||
- `bad_request`: Invalid request parameters
|
||||
|
||||
## WebSocket Integration
|
||||
|
||||
### Task Status Updates
|
||||
|
||||
**Channel**: `/ws/tasks/{task_id}/status`
|
||||
|
||||
**Message Format**:
|
||||
```json
|
||||
{
|
||||
"event": "status_update",
|
||||
"task_id": "string (uuid)",
|
||||
"status": "string (enum)",
|
||||
"timestamp": "string (iso8601)"
|
||||
}
|
||||
```
|
||||
|
||||
### Task Log Updates
|
||||
|
||||
**Channel**: `/ws/tasks/{task_id}/logs`
|
||||
|
||||
**Message Format**:
|
||||
```json
|
||||
{
|
||||
"event": "log_update",
|
||||
"task_id": "string (uuid)",
|
||||
"log": {
|
||||
"timestamp": "string (iso8601)",
|
||||
"level": "string",
|
||||
"message": "string",
|
||||
"context": "object | null"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
- Maximum 10 requests per minute per user for task list endpoint
|
||||
- Maximum 30 requests per minute per user for task details/logs endpoints
|
||||
- No rate limiting for WebSocket connections
|
||||
|
||||
## Versioning
|
||||
|
||||
All endpoints are versioned using the `Accept` header:
|
||||
- `Accept: application/vnd.api.v1+json` - Current version
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Authentication**: All endpoints require valid session authentication
|
||||
2. **Authorization**: Users can only access their own tasks
|
||||
3. **Password Handling**: Passwords are not stored permanently, only used for immediate task resumption
|
||||
4. **Input Validation**: All inputs are validated according to defined schemas
|
||||
5. **Rate Limiting**: Prevents abuse of API endpoints
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
1. **Pagination**: Default limit of 10 tasks, maximum of 50
|
||||
2. **Sorting**: Tasks are sorted by start_time descending by default
|
||||
3. **Caching**: Task list responses can be cached for 5 seconds
|
||||
4. **WebSocket**: Use existing WebSocket infrastructure for real-time updates
|
||||
5. **Error Recovery**: Failed task resumptions can be retried with corrected input
|
||||
|
||||
## OpenAPI Specification
|
||||
|
||||
A complete OpenAPI 3.0 specification is available in the repository at `specs/008-migration-ui-improvements/contracts/openapi.yaml`.
|
||||
286
specs/008-migration-ui-improvements/data-model.md
Normal file
286
specs/008-migration-ui-improvements/data-model.md
Normal file
@@ -0,0 +1,286 @@
|
||||
# Data Model: Migration UI Improvements
|
||||
|
||||
**Date**: 2025-12-27 | **Status**: Draft
|
||||
|
||||
## Entities
|
||||
|
||||
### 1. Task (Extended)
|
||||
|
||||
**Source**: `backend/src/core/task_manager.py`
|
||||
|
||||
**Fields**:
|
||||
- `id: UUID` - Unique task identifier
|
||||
- `type: str` - Task type (e.g., "migration")
|
||||
- `status: TaskStatus` - Current status (extended enum)
|
||||
- `start_time: datetime` - When task was created
|
||||
- `end_time: datetime | None` - When task completed (if applicable)
|
||||
- `logs: List[LogEntry]` - Task execution logs
|
||||
- `context: Dict` - Task-specific data
|
||||
- `input_required: bool` - Whether task is awaiting user input
|
||||
- `input_request: Dict | None` - Details about required input (for AWAITING_INPUT state)
|
||||
|
||||
**New Status Values**:
|
||||
- `AWAITING_INPUT` - Task is paused waiting for user input (e.g., password)
|
||||
|
||||
**Relationships**:
|
||||
- Has many: `LogEntry`
|
||||
- Belongs to: `Migration` (if migration task)
|
||||
|
||||
**Validation Rules**:
|
||||
- `id` must be unique and non-null
|
||||
- `status` must be valid TaskStatus enum value
|
||||
- `start_time` must be set on creation
|
||||
- `input_request` required when status is `AWAITING_INPUT`
|
||||
|
||||
**State Transitions**:
|
||||
```mermaid
|
||||
graph LR
|
||||
PENDING --> RUNNING
|
||||
RUNNING --> SUCCESS
|
||||
RUNNING --> FAILED
|
||||
RUNNING --> AWAITING_INPUT
|
||||
AWAITING_INPUT --> RUNNING
|
||||
AWAITING_INPUT --> FAILED
|
||||
```
|
||||
|
||||
### 2. LogEntry
|
||||
|
||||
**Source**: Existing in codebase
|
||||
|
||||
**Fields**:
|
||||
- `timestamp: datetime` - When log entry was created
|
||||
- `level: str` - Log level (INFO, WARNING, ERROR, etc.)
|
||||
- `message: str` - Log message
|
||||
- `context: Dict | None` - Additional context data
|
||||
|
||||
**Validation Rules**:
|
||||
- `timestamp` must be set
|
||||
- `level` must be valid log level
|
||||
- `message` must be non-empty
|
||||
|
||||
### 3. DatabasePasswordRequest (New)
|
||||
|
||||
**Source**: New entity for password prompts
|
||||
|
||||
**Fields**:
|
||||
- `database_name: str` - Name of database requiring password
|
||||
- `connection_string: str | None` - Partial connection string (without password)
|
||||
- `error_message: str | None` - Original error message
|
||||
- `attempt_count: int` - Number of password attempts
|
||||
|
||||
**Validation Rules**:
|
||||
- `database_name` must be non-empty
|
||||
- `attempt_count` must be >= 0
|
||||
|
||||
**Relationships**:
|
||||
- Embedded in: `Task.input_request`
|
||||
|
||||
### 4. TaskListResponse (API DTO)
|
||||
|
||||
**Fields**:
|
||||
- `tasks: List[TaskSummary]` - List of task summaries
|
||||
- `total: int` - Total number of tasks
|
||||
- `limit: int` - Pagination limit
|
||||
- `offset: int` - Pagination offset
|
||||
|
||||
### 5. TaskSummary (API DTO)
|
||||
|
||||
**Fields**:
|
||||
- `id: UUID` - Task ID
|
||||
- `type: str` - Task type
|
||||
- `status: str` - Current status
|
||||
- `start_time: datetime` - Start time
|
||||
- `end_time: datetime | None` - End time (if completed)
|
||||
- `requires_input: bool` - Whether task needs user input
|
||||
|
||||
### 6. TaskLogResponse (API DTO)
|
||||
|
||||
**Fields**:
|
||||
- `task_id: UUID` - Task ID
|
||||
- `logs: List[LogEntry]` - Task logs
|
||||
- `status: str` - Current task status
|
||||
|
||||
### 7. PasswordPromptRequest (API DTO)
|
||||
|
||||
**Fields**:
|
||||
- `task_id: UUID` - Task ID
|
||||
- `passwords: Dict[str, str]` - Database name to password mapping
|
||||
|
||||
**Validation Rules**:
|
||||
- `task_id` must exist and be in AWAITING_INPUT state
|
||||
- All required databases must be provided
|
||||
|
||||
## API Contracts
|
||||
|
||||
### 1. GET /api/tasks - List Tasks
|
||||
|
||||
**Purpose**: Retrieve list of recent migration tasks
|
||||
|
||||
**Parameters**:
|
||||
- `limit: int` (query, optional) - Pagination limit (default: 10)
|
||||
- `offset: int` (query, optional) - Pagination offset (default: 0)
|
||||
- `status: str` (query, optional) - Filter by status
|
||||
|
||||
**Response**: `TaskListResponse`
|
||||
|
||||
**Example**:
|
||||
```json
|
||||
{
|
||||
"tasks": [
|
||||
{
|
||||
"id": "abc-123",
|
||||
"type": "migration",
|
||||
"status": "RUNNING",
|
||||
"start_time": "2025-12-27T09:47:12Z",
|
||||
"end_time": null,
|
||||
"requires_input": false
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 10,
|
||||
"offset": 0
|
||||
}
|
||||
```
|
||||
|
||||
### 2. GET /api/tasks/{task_id}/logs - Get Task Logs
|
||||
|
||||
**Purpose**: Retrieve detailed logs for a specific task
|
||||
|
||||
**Parameters**: None
|
||||
|
||||
**Response**: `TaskLogResponse`
|
||||
|
||||
**Example**:
|
||||
```json
|
||||
{
|
||||
"task_id": "abc-123",
|
||||
"status": "AWAITING_INPUT",
|
||||
"logs": [
|
||||
{
|
||||
"timestamp": "2025-12-27T09:47:12Z",
|
||||
"level": "ERROR",
|
||||
"message": "Must provide a password for the database",
|
||||
"context": {
|
||||
"database": "PostgreSQL"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 3. POST /api/tasks/{task_id}/resume - Resume Task with Input
|
||||
|
||||
**Purpose**: Provide required input and resume a paused task
|
||||
|
||||
**Request Body**: `PasswordPromptRequest`
|
||||
|
||||
**Response**:
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Task resumed successfully"
|
||||
}
|
||||
```
|
||||
|
||||
**Error Responses**:
|
||||
- `404 Not Found` - Task not found
|
||||
- `400 Bad Request` - Invalid input or task not in AWAITING_INPUT state
|
||||
- `409 Conflict` - Task already completed or failed
|
||||
|
||||
## Database Schema Changes
|
||||
|
||||
### Task Persistence (SQLite)
|
||||
|
||||
**Table**: `persistent_tasks`
|
||||
|
||||
**Columns**:
|
||||
- `id TEXT PRIMARY KEY` - Task ID
|
||||
- `status TEXT NOT NULL` - Task status
|
||||
- `created_at TEXT NOT NULL` - Creation timestamp
|
||||
- `updated_at TEXT NOT NULL` - Last update timestamp
|
||||
- `input_request JSON` - Serialized input request data
|
||||
- `context JSON` - Serialized task context
|
||||
|
||||
**Indexes**:
|
||||
- `idx_status` on `status` column
|
||||
- `idx_created_at` on `created_at` column
|
||||
|
||||
## Event Flow
|
||||
|
||||
### Normal Task Execution
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant UI
|
||||
participant API
|
||||
participant TaskManager
|
||||
participant MigrationPlugin
|
||||
|
||||
UI->>API: Start migration
|
||||
API->>TaskManager: Create task
|
||||
TaskManager->>MigrationPlugin: Execute
|
||||
MigrationPlugin->>TaskManager: Update status (RUNNING)
|
||||
MigrationPlugin->>TaskManager: Add logs
|
||||
MigrationPlugin->>TaskManager: Update status (SUCCESS/FAILED)
|
||||
```
|
||||
|
||||
### Task with Password Requirement
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant UI
|
||||
participant API
|
||||
participant TaskManager
|
||||
participant MigrationPlugin
|
||||
|
||||
UI->>API: Start migration
|
||||
API->>TaskManager: Create task
|
||||
TaskManager->>MigrationPlugin: Execute
|
||||
MigrationPlugin->>TaskManager: Update status (RUNNING)
|
||||
MigrationPlugin->>TaskManager: Detect password error
|
||||
TaskManager->>TaskManager: Update status (AWAITING_INPUT)
|
||||
TaskManager->>API: Persist task (if needed)
|
||||
API->>UI: Task status update
|
||||
UI->>API: Get task logs
|
||||
API->>UI: Return logs with error
|
||||
UI->>User: Show password prompt
|
||||
User->>UI: Provide password
|
||||
UI->>API: POST /tasks/{id}/resume
|
||||
API->>TaskManager: Resume task with password
|
||||
TaskManager->>MigrationPlugin: Continue execution
|
||||
MigrationPlugin->>TaskManager: Update status (RUNNING)
|
||||
MigrationPlugin->>TaskManager: Complete task
|
||||
```
|
||||
|
||||
## Validation Rules
|
||||
|
||||
### Task Creation
|
||||
- Task ID must be unique
|
||||
- Start time must be set
|
||||
- Initial status must be PENDING
|
||||
|
||||
### Task State Transitions
|
||||
- Only RUNNING tasks can transition to AWAITING_INPUT
|
||||
- Only AWAITING_INPUT tasks can be resumed
|
||||
- Completed tasks (SUCCESS/FAILED) cannot be modified
|
||||
|
||||
### Password Input
|
||||
- All required databases must be provided
|
||||
- Passwords must meet minimum complexity requirements
|
||||
- Invalid passwords trigger new error and prompt again
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
1. **Task Persistence**: Only tasks in AWAITING_INPUT state will be persisted to handle backend restarts
|
||||
|
||||
2. **Error Detection**: Specific pattern matching for Superset "Must provide a password" errors
|
||||
|
||||
3. **UI Integration**: Real-time updates using existing WebSocket infrastructure
|
||||
|
||||
4. **Security**: Passwords are not permanently stored, only used for immediate task resumption
|
||||
|
||||
5. **Performance**: Basic pagination for task history to handle growth
|
||||
|
||||
## Open Questions
|
||||
|
||||
None - All design decisions have been documented and validated against requirements.
|
||||
142
specs/008-migration-ui-improvements/plan.md
Normal file
142
specs/008-migration-ui-improvements/plan.md
Normal file
@@ -0,0 +1,142 @@
|
||||
# Implementation Plan: Migration UI Improvements
|
||||
|
||||
**Branch**: `008-migration-ui-improvements` | **Date**: 2025-12-27 | **Spec**: [spec.md](spec.md)
|
||||
**Input**: Feature specification from `/specs/008-migration-ui-improvements/spec.md`
|
||||
|
||||
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
|
||||
|
||||
## Summary
|
||||
|
||||
This feature aims to improve the migration UI by:
|
||||
1. Displaying a list of recent and current migration tasks with their statuses
|
||||
2. Allowing users to view detailed logs for each task
|
||||
3. Handling database password errors interactively by prompting users to provide missing passwords
|
||||
|
||||
The technical approach involves extending the existing TaskManager and MigrationPlugin to support new task states, adding API endpoints for task history and logs, and creating UI components for task visualization and password prompts.
|
||||
|
||||
## Technical Context
|
||||
|
||||
**Language/Version**: Python 3.9+, Node.js 18+
|
||||
**Primary Dependencies**: FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
|
||||
**Storage**: SQLite (optional for job history), existing database for mappings
|
||||
**Testing**: pytest, ruff check
|
||||
**Target Platform**: Linux server (backend), Web browser (frontend)
|
||||
**Project Type**: web (backend + frontend)
|
||||
**Performance Goals**: Basic pagination support (limit/offset), real-time status updates
|
||||
**Constraints**: Configurable retention period for task history, hybrid task persistence approach
|
||||
**Scale/Scope**: Small to medium migration tasks, 10-50 concurrent users
|
||||
|
||||
## Constitution Check
|
||||
|
||||
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
|
||||
|
||||
### Compliance Gates
|
||||
|
||||
1. **Causal Validity (Contracts First)**: ✅ PASS
|
||||
- ✅ All new entities defined in data-model.md with contracts
|
||||
- ✅ API contracts documented in contracts/api.md
|
||||
- ✅ TaskManager extensions have clear @PRE/@POST conditions defined
|
||||
|
||||
2. **Immutability of Architecture**: ✅ PASS
|
||||
- ✅ Existing architectural layers maintained (Domain/Infra/UI)
|
||||
- ✅ New components follow established patterns
|
||||
- ✅ Web application structure preserved
|
||||
|
||||
3. **Semantic Format Compliance**: ✅ PASS
|
||||
- ✅ All new code will use [DEF] / [/DEF] anchor syntax
|
||||
- ✅ Proper metadata tags (@KEY) defined in data model
|
||||
- ✅ Graph relations (@RELATION) documented
|
||||
|
||||
4. **Design by Contract (DbC)**: ✅ PASS
|
||||
- ✅ All new functions/classes have defined contracts
|
||||
- ✅ API endpoints have clear specifications and constraints
|
||||
- ✅ Implementation will strictly satisfy contracts
|
||||
|
||||
5. **Belief State Logging**: ✅ PASS
|
||||
- ✅ Context Manager pattern documented for Python
|
||||
- ✅ Proper [ANCHOR_ID][STATE] format maintained
|
||||
- ✅ Logging integrated into task state transitions
|
||||
|
||||
6. **Fractal Complexity Limit**: ✅ PASS
|
||||
- ✅ Modules designed to stay under ~300 lines
|
||||
- ✅ Functions designed for ~30-50 lines max
|
||||
- ✅ Complex logic decomposed into helpers
|
||||
|
||||
### Post-Design Validation
|
||||
|
||||
**Design Artifacts Created**:
|
||||
- ✅ research.md - Phase 0 research findings
|
||||
- ✅ data-model.md - Entity definitions and relationships
|
||||
- ✅ contracts/api.md - API contracts and specifications
|
||||
- ✅ quickstart.md - Usage documentation
|
||||
|
||||
**Constitution Compliance**:
|
||||
- ✅ All contracts defined before implementation
|
||||
- ✅ Architectural decisions respect existing structure
|
||||
- ✅ Semantic format compliance maintained
|
||||
- ✅ Design by Contract principles applied
|
||||
- ✅ Complexity limits respected
|
||||
|
||||
### Potential Violations
|
||||
|
||||
None identified. The design phase has successfully addressed all constitutional requirements and the feature is ready for implementation.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Documentation (this feature)
|
||||
|
||||
```text
|
||||
specs/008-migration-ui-improvements/
|
||||
├── plan.md # This file (/speckit.plan command output)
|
||||
├── research.md # Phase 0 output (/speckit.plan command)
|
||||
├── data-model.md # Phase 1 output (/speckit.plan command)
|
||||
├── quickstart.md # Phase 1 output (/speckit.plan command)
|
||||
├── contracts/ # Phase 1 output (/speckit.plan command)
|
||||
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
|
||||
```
|
||||
|
||||
### Source Code (repository root)
|
||||
|
||||
```text
|
||||
backend/
|
||||
├── src/
|
||||
│ ├── models/
|
||||
│ ├── services/
|
||||
│ ├── core/
|
||||
│ │ ├── task_manager.py # Extended with new task states
|
||||
│ │ └── migration_engine.py # Enhanced error handling
|
||||
│ ├── api/
|
||||
│ │ └── routes/
|
||||
│ │ └── tasks.py # New API endpoints
|
||||
│ └── plugins/
|
||||
│ └── migration.py # Enhanced password handling
|
||||
└── tests/
|
||||
├── test_task_manager.py # New tests for task states
|
||||
└── test_migration_plugin.py # New tests for password prompts
|
||||
|
||||
frontend/
|
||||
├── src/
|
||||
│ ├── components/
|
||||
│ │ ├── TaskHistory.svelte # New component for task list
|
||||
│ │ ├── TaskLogViewer.svelte # New component for log viewing
|
||||
│ │ └── PasswordPrompt.svelte # New component for password input
|
||||
│ ├── pages/
|
||||
│ │ └── migration/
|
||||
│ │ └── +page.svelte # Enhanced migration page
|
||||
│ └── services/
|
||||
│ └── taskService.js # New service for task API
|
||||
└── tests/
|
||||
├── TaskHistory.spec.js # New component tests
|
||||
└── taskService.spec.js # New service tests
|
||||
```
|
||||
|
||||
**Structure Decision**: Web application structure (Option 2) is selected as this feature involves both backend API extensions and frontend UI components. The existing backend/frontend separation will be maintained, with new components added to their respective directories.
|
||||
|
||||
## Complexity Tracking
|
||||
|
||||
> **Fill ONLY if Constitution Check has violations that must be justified**
|
||||
|
||||
| Violation | Why Needed | Simpler Alternative Rejected Because |
|
||||
|-----------|------------|-------------------------------------|
|
||||
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
|
||||
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |
|
||||
226
specs/008-migration-ui-improvements/quickstart.md
Normal file
226
specs/008-migration-ui-improvements/quickstart.md
Normal file
@@ -0,0 +1,226 @@
|
||||
# Quickstart: Migration UI Improvements
|
||||
|
||||
**Date**: 2025-12-27 | **Status**: Draft
|
||||
|
||||
## Overview
|
||||
|
||||
This guide provides step-by-step instructions for using the new Migration UI Improvements feature.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Running instance of the migration tool
|
||||
- Valid user session
|
||||
- At least one migration task (completed or in progress)
|
||||
|
||||
## Installation
|
||||
|
||||
No additional installation required. The feature is integrated into the existing migration UI.
|
||||
|
||||
## Using the Feature
|
||||
|
||||
### 1. Viewing Task History
|
||||
|
||||
**Steps**:
|
||||
1. Navigate to the Migration Dashboard
|
||||
2. Locate the "Recent Tasks" section
|
||||
3. View the list of your recent migration tasks
|
||||
|
||||
**What you'll see**:
|
||||
- Task ID
|
||||
- Status (Pending, Running, Success, Failed, Awaiting Input)
|
||||
- Start time
|
||||
- "View Logs" action button
|
||||
|
||||
**Screenshot**: [Placeholder for task history screenshot]
|
||||
|
||||
### 2. Viewing Task Logs
|
||||
|
||||
**Steps**:
|
||||
1. From the task history list, click "View Logs" on any task
|
||||
2. A modal will open showing detailed logs
|
||||
|
||||
**What you'll see**:
|
||||
- Timestamped log entries
|
||||
- Log levels (INFO, WARNING, ERROR)
|
||||
- Detailed error messages
|
||||
- Context information for errors
|
||||
|
||||
**Screenshot**: [Placeholder for log viewer screenshot]
|
||||
|
||||
### 3. Handling Database Password Prompts
|
||||
|
||||
**Scenario**: A migration fails due to missing database password
|
||||
|
||||
**Steps**:
|
||||
1. Start a migration that requires database passwords
|
||||
2. When the system detects a missing password error:
|
||||
- Task status changes to "Awaiting Input"
|
||||
- A notification appears
|
||||
- The task shows "Requires Input" indicator
|
||||
3. Click "View Logs" to see the specific error
|
||||
4. The system will show a password prompt with:
|
||||
- Database name requiring password
|
||||
- Original error message
|
||||
- Password input field
|
||||
5. Enter the required password(s)
|
||||
6. Click "Submit" to resume the migration
|
||||
|
||||
**What happens next**:
|
||||
- The migration resumes with the provided credentials
|
||||
- Task status changes back to "Running"
|
||||
- If password is incorrect, you'll be prompted again
|
||||
|
||||
**Screenshot**: [Placeholder for password prompt screenshot]
|
||||
|
||||
## API Usage Examples
|
||||
|
||||
### List Recent Tasks
|
||||
|
||||
```bash
|
||||
curl -X GET \
|
||||
'http://localhost:8000/api/tasks?limit=5&offset=0' \
|
||||
-H 'Authorization: Bearer YOUR_TOKEN' \
|
||||
-H 'Accept: application/vnd.api.v1+json'
|
||||
```
|
||||
|
||||
### Get Task Logs
|
||||
|
||||
```bash
|
||||
curl -X GET \
|
||||
'http://localhost:8000/api/tasks/TASK_ID/logs' \
|
||||
-H 'Authorization: Bearer YOUR_TOKEN' \
|
||||
-H 'Accept: application/vnd.api.v1+json'
|
||||
```
|
||||
|
||||
### Resume Task with Password
|
||||
|
||||
```bash
|
||||
curl -X POST \
|
||||
'http://localhost:8000/api/tasks/TASK_ID/resume' \
|
||||
-H 'Authorization: Bearer YOUR_TOKEN' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: application/vnd.api.v1+json' \
|
||||
-d '{
|
||||
"passwords": {
|
||||
"PostgreSQL": "your_secure_password"
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Issue**: No tasks appearing in history
|
||||
- **Solution**: Check that you have started at least one migration task
|
||||
- **Solution**: Verify your session is valid
|
||||
- **Solution**: Try refreshing the page
|
||||
|
||||
**Issue**: Task stuck in "Awaiting Input" state
|
||||
- **Solution**: Check the task logs for specific error details
|
||||
- **Solution**: Provide the required input through the UI
|
||||
- **Solution**: If input was provided but task didn't resume, try again
|
||||
|
||||
**Issue**: Password prompt keeps appearing
|
||||
- **Solution**: Verify the password is correct
|
||||
- **Solution**: Check for multiple databases requiring passwords
|
||||
- **Solution**: Contact administrator if issue persists
|
||||
|
||||
### Error Messages
|
||||
|
||||
- `"Task not found"`: The specified task ID doesn't exist or you don't have permission
|
||||
- `"Task not awaiting input"`: The task is not in a state that requires user input
|
||||
- `"Invalid password"`: The provided password was rejected by the target system
|
||||
- `"Unauthorized"`: Your session has expired or is invalid
|
||||
|
||||
## Configuration
|
||||
|
||||
### Task Retention
|
||||
|
||||
Configure how long completed tasks are retained:
|
||||
|
||||
```bash
|
||||
# In backend configuration
|
||||
TASK_RETENTION_DAYS=30
|
||||
TASK_RETENTION_LIMIT=100
|
||||
```
|
||||
|
||||
### Pagination Limits
|
||||
|
||||
Adjust default pagination limits:
|
||||
|
||||
```bash
|
||||
# In backend configuration
|
||||
DEFAULT_TASK_LIMIT=10
|
||||
MAX_TASK_LIMIT=50
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Monitor Tasks**: Regularly check task history for failed migrations
|
||||
2. **Prompt Response**: Respond to "Awaiting Input" tasks promptly to avoid delays
|
||||
3. **Log Review**: Always review logs for failed tasks to understand root causes
|
||||
4. **Password Management**: Use secure password storage for frequently used credentials
|
||||
5. **Session Management**: Ensure your session is active before starting long migrations
|
||||
|
||||
## Integration Guide
|
||||
|
||||
### Frontend Integration
|
||||
|
||||
```javascript
|
||||
// Example: Fetching task list
|
||||
import { getTasks } from '/src/services/taskService'
|
||||
|
||||
const fetchTasks = async () => {
|
||||
try {
|
||||
const response = await getTasks({ limit: 10, offset: 0 })
|
||||
setTasks(response.tasks)
|
||||
setTotal(response.total)
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch tasks:', error)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Backend Integration
|
||||
|
||||
```python
|
||||
# Example: Extending TaskManager
|
||||
from backend.src.core.task_manager import TaskManager
|
||||
|
||||
class EnhancedTaskManager(TaskManager):
|
||||
def get_tasks_for_user(self, user_id, limit=10, offset=0):
|
||||
# Implement user-specific task filtering
|
||||
pass
|
||||
```
|
||||
|
||||
## Support
|
||||
|
||||
For issues not covered in this guide:
|
||||
- Check the main documentation
|
||||
- Review API contract specifications
|
||||
- Contact support team with error details
|
||||
|
||||
## Changelog
|
||||
|
||||
**2025-12-27**: Initial release
|
||||
- Added task history viewing
|
||||
- Added task log inspection
|
||||
- Added interactive password prompts
|
||||
- Added API endpoints for task management
|
||||
|
||||
## Feedback
|
||||
|
||||
Provide feedback on this feature:
|
||||
- What works well
|
||||
- What could be improved
|
||||
- Additional use cases to support
|
||||
|
||||
[Feedback Form Link Placeholder]
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Try the feature with a test migration
|
||||
2. Familiarize yourself with the error patterns
|
||||
3. Integrate with your existing workflows
|
||||
4. Provide feedback for improvements
|
||||
164
specs/008-migration-ui-improvements/research.md
Normal file
164
specs/008-migration-ui-improvements/research.md
Normal file
@@ -0,0 +1,164 @@
|
||||
# Research: Migration UI Improvements
|
||||
|
||||
**Date**: 2025-12-27 | **Status**: Complete
|
||||
|
||||
## Overview
|
||||
|
||||
This research phase was conducted to resolve any technical unknowns and validate design decisions for the Migration UI Improvements feature. Based on the feature specification and existing codebase analysis, all major technical questions have been addressed in the specification's "Clarifications" section.
|
||||
|
||||
## Research Findings
|
||||
|
||||
### 1. Task History and Status Display
|
||||
|
||||
**Decision**: Implement a task history API endpoint and UI component
|
||||
|
||||
**Rationale**:
|
||||
- The existing `TaskManager` already tracks tasks in memory
|
||||
- Need to expose this information through a new API endpoint
|
||||
- UI will display tasks with status, start time, and actions
|
||||
|
||||
**Alternatives considered**:
|
||||
- Full database persistence for all tasks (rejected due to complexity)
|
||||
- In-memory only with no persistence (rejected as it wouldn't survive restarts)
|
||||
|
||||
**Implementation approach**:
|
||||
- Extend `TaskManager` to support task history retrieval
|
||||
- Add `/api/tasks` endpoint for fetching task list
|
||||
- Create `TaskHistory` Svelte component for display
|
||||
|
||||
### 2. Task Log Viewing
|
||||
|
||||
**Decision**: Implement log retrieval API and modal viewer
|
||||
|
||||
**Rationale**:
|
||||
- Each task already maintains logs in `LogEntry` format
|
||||
- Need API endpoint to retrieve logs for specific task ID
|
||||
- UI modal provides detailed log viewing without page navigation
|
||||
|
||||
**Alternatives considered**:
|
||||
- Separate log page (rejected for poor UX)
|
||||
- Downloadable log files (rejected as overkill for current needs)
|
||||
|
||||
**Implementation approach**:
|
||||
- Add `/api/tasks/{task_id}/logs` endpoint
|
||||
- Create `TaskLogViewer` modal component
|
||||
- Integrate with existing task list
|
||||
|
||||
### 3. Database Password Error Handling
|
||||
|
||||
**Decision**: Implement interactive password prompt with task pausing
|
||||
|
||||
**Rationale**:
|
||||
- Superset requires database passwords that aren't exported
|
||||
- Current system fails entirely on missing passwords
|
||||
- Interactive resolution improves user experience significantly
|
||||
|
||||
**Alternatives considered**:
|
||||
- Pre-migration password collection form (rejected as not all migrations need passwords)
|
||||
- Automatic retry with default passwords (rejected as insecure)
|
||||
|
||||
**Implementation approach**:
|
||||
- Extend `MigrationPlugin` to detect specific Superset password errors
|
||||
- Add new task state "AWAITING_INPUT"
|
||||
- Create `PasswordPrompt` component for user input
|
||||
- Implement task resumption with provided credentials
|
||||
|
||||
### 4. Task Persistence Strategy
|
||||
|
||||
**Decision**: Hybrid approach - persist only tasks needing user input
|
||||
|
||||
**Rationale**:
|
||||
- Full persistence adds unnecessary complexity
|
||||
- Most tasks complete quickly and don't need persistence
|
||||
- Only tasks awaiting user input need to survive restarts
|
||||
|
||||
**Alternatives considered**:
|
||||
- Full database persistence (rejected due to complexity)
|
||||
- No persistence (rejected as loses important state)
|
||||
|
||||
**Implementation approach**:
|
||||
- Extend `TaskManager` to persist "AWAITING_INPUT" tasks
|
||||
- Use SQLite for simple persistence
|
||||
- Clear completed tasks based on configurable retention
|
||||
|
||||
### 5. Error Detection and Pattern Matching
|
||||
|
||||
**Decision**: Pattern match on Superset API error responses
|
||||
|
||||
**Rationale**:
|
||||
- Superset returns specific error format for missing passwords
|
||||
- Pattern: `UNPROCESSABLE ENTITY` 422 with specific JSON body
|
||||
- Error message contains: "Must provide a password for the database"
|
||||
|
||||
**Implementation approach**:
|
||||
- Enhance error handling in `MigrationPlugin.execute()`
|
||||
- Detect specific error pattern and transition to "AWAITING_INPUT"
|
||||
- Include database name in password prompt
|
||||
|
||||
## Technical Validation
|
||||
|
||||
### Existing Codebase Analysis
|
||||
|
||||
**TaskManager** (`backend/src/core/task_manager.py`):
|
||||
- Already supports task creation and status tracking
|
||||
- Needs extension for:
|
||||
- Task history retrieval
|
||||
- New "AWAITING_INPUT" state
|
||||
- Selective persistence
|
||||
|
||||
**MigrationPlugin** (`backend/src/plugins/migration.py`):
|
||||
- Handles migration execution
|
||||
- Needs enhancement for:
|
||||
- Error pattern detection
|
||||
- Task state transitions
|
||||
- Password injection
|
||||
|
||||
**API Routes** (`backend/src/api/routes/`):
|
||||
- Existing structure for REST endpoints
|
||||
- Needs new endpoints:
|
||||
- `GET /tasks` - list tasks
|
||||
- `GET /tasks/{id}/logs` - get task logs
|
||||
- `POST /tasks/{id}/resume` - resume with input
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
**Pagination**: Basic limit/offset pagination will be implemented for task history to handle potential growth of task records.
|
||||
|
||||
**Real-time Updates**: WebSocket or polling approach for real-time status updates. Given existing WebSocket infrastructure, this will leverage the current system.
|
||||
|
||||
**Error Handling**: Robust error handling for:
|
||||
- Invalid task IDs
|
||||
- Missing logs
|
||||
- Password validation failures
|
||||
- Task resumption errors
|
||||
|
||||
## Open Questions (Resolved)
|
||||
|
||||
All questions from the specification's "Clarifications" section have been addressed:
|
||||
|
||||
1. ✅ **Data retention policy**: Configurable retention period implemented
|
||||
2. ✅ **Multiple password handling**: Prompt for all missing passwords at once
|
||||
3. ✅ **Invalid password handling**: Prompt again with error message
|
||||
4. ✅ **Task persistence**: Hybrid approach for "AWAITING_INPUT" tasks
|
||||
5. ✅ **Performance requirements**: Basic pagination support
|
||||
|
||||
## Recommendations
|
||||
|
||||
1. **Implementation Order**:
|
||||
- Backend API extensions first
|
||||
- Task state management second
|
||||
- UI components last
|
||||
|
||||
2. **Testing Focus**:
|
||||
- Error condition testing for password prompts
|
||||
- Task state transition validation
|
||||
- Real-time update verification
|
||||
|
||||
3. **Future Enhancements**:
|
||||
- Advanced filtering for task history
|
||||
- Task search functionality
|
||||
- Export/import of task history
|
||||
|
||||
## Conclusion
|
||||
|
||||
The research phase confirms that the proposed feature can be implemented using the existing architecture with targeted extensions. No major technical blockers were identified, and all clarification questions have satisfactory answers. The implementation can proceed to Phase 1: Design & Contracts.
|
||||
99
specs/008-migration-ui-improvements/spec.md
Normal file
99
specs/008-migration-ui-improvements/spec.md
Normal file
@@ -0,0 +1,99 @@
|
||||
# Feature Specification: Migration UI Improvements
|
||||
|
||||
**Feature Branch**: `008-migration-ui-improvements`
|
||||
**Created**: 2025-12-27
|
||||
**Status**: Draft
|
||||
**Input**: User description: "я хочу доработать интерфейс миграции: 1. Необходимо выводить список последних (и текущую) задач миграции с их статусами и возможностью посмотреть лог миграции 2. Необходимо корректно отрабатывать ошибки миграции БД, например такую [Backend] 2025-12-27 09:47:12,230 - ERROR - [import_dashboard][Failure] First import attempt failed: [API_FAILURE] API error during upload: {"errors": [{"message": "Error importing dashboard: databases/PostgreSQL.yaml: {'_schema': ['Must provide a password for the database']}", "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": {"databases/PostgreSQL.yaml": {"_schema": ["Must provide a password for the database"]}, "issue_codes": [{"code": 1010, "message": "Issue 1010 - Superset encountered an error while running a command."}]}}]} | Context: {'type': 'api_call'} ... Здесь видно, что можно предложить пользователю ввести пароль от БД"
|
||||
|
||||
## User Scenarios & Testing *(mandatory)*
|
||||
|
||||
### User Story 1 - Task History and Status (Priority: P1)
|
||||
|
||||
As a user, I want to see a list of my recent and currently running migration tasks so that I can track progress and review past results.
|
||||
|
||||
**Why this priority**: Essential for visibility into background processes and troubleshooting.
|
||||
|
||||
**Independent Test**: Can be fully tested by starting a migration and verifying it appears in a "Recent Tasks" list with its current status.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** the Migration Dashboard, **When** I open the page, **Then** I see a list of recent migration tasks with their status (Pending, Running, Success, Failed).
|
||||
2. **Given** a running task, **When** the task updates its status on the backend, **Then** the UI reflects the status change in real-time or upon refresh.
|
||||
|
||||
---
|
||||
|
||||
### User Story 2 - Task Log Inspection (Priority: P2)
|
||||
|
||||
As a user, I want to view the detailed logs of any migration task so that I can understand exactly what happened or why it failed.
|
||||
|
||||
**Why this priority**: Critical for debugging failures and confirming success details.
|
||||
|
||||
**Independent Test**: Can be tested by clicking a "View Logs" button for a task and seeing a modal or panel with the task's log entries.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a migration task in the history list, **When** I click "View Logs", **Then** a detailed log view opens showing timestamped messages from that task.
|
||||
|
||||
---
|
||||
|
||||
### User Story 3 - Interactive Database Password Resolution (Priority: P1)
|
||||
|
||||
As a user, I want the system to detect when a migration fails due to a missing database password and allow me to provide it interactively.
|
||||
|
||||
**Why this priority**: Prevents migration blocks due to Superset's security requirements (passwords are not exported).
|
||||
|
||||
**Independent Test**: Can be tested by triggering a migration that requires a DB password and verifying the UI prompts for the password instead of just failing.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a migration task that encounters a "Must provide a password for the database" error, **When** the error is detected, **Then** the task status changes to "Awaiting Input" and the UI prompts the user to enter the password for the specific database.
|
||||
2. **Given** a password prompt, **When** I enter the password and submit, **Then** the migration resumes using the provided password.
|
||||
|
||||
---
|
||||
|
||||
### Edge Cases
|
||||
|
||||
- **Multiple Missing Passwords**: How does the system handle multiple databases in one migration needing passwords? (Assumption: Prompt sequentially or as a list).
|
||||
- **Invalid Password Provided**: What happens if the user provides an incorrect password? (Assumption: System should detect the new error and prompt again or fail gracefully).
|
||||
- **Task Manager Restart**: How are tasks persisted across backend restarts? (Assumption: Currently tasks are in-memory; persistence might be needed for "Recent Tasks" to be truly useful).
|
||||
|
||||
## Clarifications
|
||||
|
||||
### Session 2025-12-27
|
||||
|
||||
- Q: What is the expected data retention policy for migration task history? Should the system keep all tasks indefinitely, or is there a retention limit (e.g., last 50 tasks, last 30 days)? → A: Configurable retention period (default: last 50 tasks or 30 days, configurable via settings)
|
||||
- Q: How should the system handle multiple databases requiring passwords in a single migration? Should it prompt for all missing passwords at once, or sequentially as each one is encountered? → A: Prompt for all missing passwords at once in a single form
|
||||
- Q: What should happen when a user provides an incorrect database password? Should the system retry the same password, prompt again with an error message, or fail the migration entirely? → A: Prompt again with an error message explaining the password was incorrect
|
||||
- Q: How should task persistence be handled across backend restarts? Should tasks be persisted to a database, kept in-memory only, or use a hybrid approach? → A: Hybrid approach: persist only tasks that need user input
|
||||
- Q: What performance requirements should the task history API meet? Should it support pagination, filtering, or have specific response time targets? → A: Basic pagination only (limit/offset)
|
||||
|
||||
## Requirements *(mandatory)*
|
||||
|
||||
### Functional Requirements
|
||||
|
||||
- **FR-001**: System MUST provide an API endpoint to retrieve the list of all tasks from `TaskManager`.
|
||||
- **FR-002**: System MUST provide an API endpoint to retrieve full logs for a specific task ID.
|
||||
- **FR-003**: Migration Dashboard MUST display a list of recent tasks including ID, start time, status, and a "View Logs" action.
|
||||
- **FR-004**: `MigrationPlugin` MUST detect specific Superset API errors related to missing database passwords (e.g., matching the `UNPROCESSABLE ENTITY` 422 error with specific JSON body).
|
||||
- **FR-005**: System MUST support a task state "AWAITING_INPUT" (extending `AWAITING_MAPPING`) specifically for interactive password entry.
|
||||
- **FR-006**: UI MUST display an interactive prompt when a task enters "AWAITING_INPUT" state due to missing DB password.
|
||||
- **FR-007**: System MUST allow resuming a task with provided sensitive information (passwords) without persisting them permanently if not required.
|
||||
|
||||
### Non-Functional Requirements
|
||||
|
||||
- **NFR-001**: System MUST implement configurable retention policy for migration task history with default values of last 50 tasks or 30 days, configurable via settings.
|
||||
|
||||
### Key Entities
|
||||
|
||||
- **Task**: Represents a migration execution (Existing: `backend/src/core/task_manager.py:Task`). Needs to ensure logs are accessible.
|
||||
- **MigrationLog**: A single entry in a task's log (Existing: `LogEntry`).
|
||||
- **DatabasePasswordRequest**: A specific type of resolution request sent to the UI.
|
||||
|
||||
## Success Criteria *(mandatory)*
|
||||
|
||||
### Measurable Outcomes
|
||||
|
||||
- **SC-001**: Users can view the status of their last 10 migration tasks directly on the migration page.
|
||||
- **SC-002**: Users can access the full log of any recent task in less than 2 clicks.
|
||||
- **SC-003**: 100% of "Missing Password" errors are caught and presented as interactive prompts rather than fatal migration failures.
|
||||
- **SC-004**: Users can successfully resume a "blocked" migration by providing the required password through the UI.
|
||||
523
specs/008-migration-ui-improvements/tasks.md
Normal file
523
specs/008-migration-ui-improvements/tasks.md
Normal file
@@ -0,0 +1,523 @@
|
||||
# Tasks: Migration UI Improvements
|
||||
|
||||
**Feature**: Migration UI Improvements
|
||||
**Branch**: `008-migration-ui-improvements`
|
||||
**Generated**: 2025-12-27
|
||||
**Status**: Ready for Implementation
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides actionable, dependency-ordered tasks for implementing the Migration UI Improvements feature. All tasks follow the strict checklist format and are organized by user story for independent implementation and testing.
|
||||
|
||||
## Dependencies & Execution Order
|
||||
|
||||
### Phase 1: Setup (Project Initialization)
|
||||
- **Goal**: Initialize project structure and verify prerequisites
|
||||
- **Dependencies**: None
|
||||
- **Test Criteria**: Project structure exists, all dependencies available
|
||||
|
||||
### Phase 2: Foundational (Blocking Prerequisites)
|
||||
- **Goal**: Extend core components to support new functionality
|
||||
- **Dependencies**: Phase 1 complete
|
||||
- **Test Criteria**: Core extensions work independently
|
||||
|
||||
### Phase 3: User Story 1 - Task History and Status (P1)
|
||||
- **Goal**: Display list of recent migration tasks with status
|
||||
- **Dependencies**: Phase 2 complete
|
||||
- **Test Criteria**: Start a migration, verify it appears in task list with correct status
|
||||
|
||||
### Phase 4: User Story 2 - Task Log Inspection (P2)
|
||||
- **Goal**: View detailed logs for any migration task
|
||||
- **Dependencies**: Phase 3 complete (uses task list)
|
||||
- **Test Criteria**: Click "View Logs" button, see modal with task log entries
|
||||
|
||||
### Phase 5: User Story 3 - Interactive Password Resolution (P1)
|
||||
- **Goal**: Handle missing database password errors interactively
|
||||
- **Dependencies**: Phase 2 complete
|
||||
- **Test Criteria**: Trigger migration with missing password, verify UI prompts instead of failing
|
||||
|
||||
### Phase 6: Polish & Cross-Cutting Concerns
|
||||
- **Goal**: Finalize integration, add tests, documentation
|
||||
- **Dependencies**: All previous phases complete
|
||||
- **Test Criteria**: All user stories work together, tests pass
|
||||
|
||||
## Parallel Execution Opportunities
|
||||
|
||||
**Within Phase 3 (US1)**:
|
||||
- Backend API endpoints can be implemented in parallel with frontend components
|
||||
- Task list API and Task log API are independent
|
||||
|
||||
**Within Phase 5 (US3)**:
|
||||
- Password prompt UI can be developed independently of backend resumption logic
|
||||
- Error detection in MigrationPlugin can be tested separately
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Setup (Project Initialization)
|
||||
|
||||
- [ ] T001 Verify project structure and create missing directories
|
||||
- Check backend/src/api/routes/ exists
|
||||
- Check backend/src/models/ exists
|
||||
- Check frontend/src/components/ exists
|
||||
- Check frontend/src/services/ exists
|
||||
- Create any missing directories per plan.md structure
|
||||
|
||||
- [ ] T002 Verify Python 3.9+ and Node.js 18+ dependencies
|
||||
- Check Python version >= 3.9
|
||||
- Check Node.js version >= 18
|
||||
- Verify FastAPI, Pydantic, SQLAlchemy installed
|
||||
- Verify SvelteKit, Tailwind CSS configured
|
||||
|
||||
- [ ] T003 Initialize task tracking for this implementation
|
||||
- Create implementation log in backend/logs/implementation.log
|
||||
- Document start time and initial state
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Foundational (Blocking Prerequisites)
|
||||
|
||||
### Backend Core Extensions
|
||||
|
||||
- [ ] T004 [P] Extend TaskStatus enum in backend/src/core/task_manager.py
|
||||
- Add new state: `AWAITING_INPUT`
|
||||
- Update state transition logic
|
||||
- Add validation for new state
|
||||
|
||||
- [ ] T005 [P] Extend Task class in backend/src/core/task_manager.py
|
||||
- Add `input_required: bool` field
|
||||
- Add `input_request: Dict | None` field
|
||||
- Add `logs: List[LogEntry]` field
|
||||
- Update constructor and validation
|
||||
|
||||
- [ ] T006 [P] Implement task history retrieval in TaskManager
|
||||
- Add `get_tasks(limit, offset, status)` method
|
||||
- Add `get_task_logs(task_id)` method
|
||||
- Add `persist_awaiting_input_tasks()` method
|
||||
- Add `load_persisted_tasks()` method
|
||||
|
||||
- [ ] T007 [P] Create SQLite schema for persistent tasks
|
||||
- Create migration script for `persistent_tasks` table
|
||||
- Add indexes for status and created_at
|
||||
- Test schema creation
|
||||
|
||||
- [ ] T008 [P] Extend MigrationPlugin error handling
|
||||
- Add pattern matching for Superset password errors
|
||||
- Detect "Must provide a password for the database" message
|
||||
- Extract database name from error context
|
||||
- Transition task to AWAITING_INPUT state
|
||||
|
||||
- [ ] T009 [P] Implement password injection mechanism
|
||||
- Add method to resume task with credentials
|
||||
- Validate password format
|
||||
- Handle multiple database passwords
|
||||
- Update task context with credentials
|
||||
|
||||
### Backend API Routes
|
||||
|
||||
- [ ] T010 [P] Create backend/src/api/routes/tasks.py
|
||||
- Create file with basic route definitions
|
||||
- Add route handlers with stubbed responses
|
||||
- Register router in __init__.py (covered by T011)
|
||||
- Add basic error handling structure
|
||||
|
||||
- [ ] T011 [P] Add task routes to backend/src/api/routes/__init__.py
|
||||
- Import and register tasks router
|
||||
- Verify route registration
|
||||
|
||||
### Frontend Services
|
||||
|
||||
- [ ] T012 [P] Create frontend/src/services/taskService.js
|
||||
- Implement `getTasks(limit, offset, status)` function
|
||||
- Implement `getTaskLogs(taskId)` function
|
||||
- Implement `resumeTask(taskId, passwords)` function
|
||||
- Add proper error handling
|
||||
|
||||
### Frontend Components
|
||||
|
||||
- [ ] T013 [P] Create frontend/src/components/TaskHistory.svelte
|
||||
- Display task list with ID, status, start time
|
||||
- Add "View Logs" action button
|
||||
- Handle empty state
|
||||
- Support real-time updates
|
||||
|
||||
- [ ] T014 [P] Create frontend/src/components/TaskLogViewer.svelte
|
||||
- Display modal with log entries
|
||||
- Show timestamp, level, message, context
|
||||
- Auto-scroll to latest logs
|
||||
- Close button functionality
|
||||
|
||||
- [ ] T015 [P] Create frontend/src/components/PasswordPrompt.svelte
|
||||
- Display database name and error message
|
||||
- Show password input fields (dynamic for multiple databases)
|
||||
- Submit button with validation
|
||||
- Error message display for invalid passwords
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: User Story 1 - Task History and Status (P1)
|
||||
|
||||
**Goal**: As a user, I want to see a list of my recent and currently running migration tasks so that I can track progress and review past results.
|
||||
|
||||
**Independent Test**: Start a migration and verify it appears in a "Recent Tasks" list with its current status.
|
||||
|
||||
### Backend Implementation
|
||||
|
||||
- [ ] T016 [US1] Implement GET /api/tasks endpoint logic
|
||||
- Query TaskManager for task list
|
||||
- Apply limit/offset pagination
|
||||
- Apply status filter if provided
|
||||
- Return TaskListResponse format
|
||||
|
||||
- [ ] T017 [US1] Implement GET /api/tasks/{task_id}/logs endpoint logic
|
||||
- Validate task_id exists
|
||||
- Retrieve logs from TaskManager
|
||||
- Return TaskLogResponse format
|
||||
- Handle not found errors
|
||||
|
||||
- [ ] T018 [US1] Add task status update WebSocket support
|
||||
- Extend existing WebSocket infrastructure
|
||||
- Broadcast status changes to /ws/tasks/{task_id}/status
|
||||
- Broadcast log updates to /ws/tasks/{task_id}/logs
|
||||
|
||||
### Frontend Implementation
|
||||
|
||||
- [ ] T019 [US1] Integrate TaskHistory component into migration page
|
||||
- Add component to frontend/src/routes/migration/+page.svelte
|
||||
- Fetch tasks on page load
|
||||
- Handle loading state
|
||||
- Display error messages
|
||||
|
||||
- [ ] T020 [US1] Implement real-time status updates
|
||||
- Subscribe to WebSocket channel for task updates
|
||||
- Update task list on status change
|
||||
- Add visual indicators for running tasks
|
||||
|
||||
- [ ] T021 [US1] Add task list pagination
|
||||
- Implement "Load More" button
|
||||
- Handle offset updates
|
||||
- Maintain current task list while loading more
|
||||
|
||||
### Testing
|
||||
|
||||
- [ ] T022 [US1] Test task list retrieval
|
||||
- Create test migration tasks
|
||||
- Verify API returns correct format
|
||||
- Verify pagination works
|
||||
- Verify status filtering works
|
||||
|
||||
- [ ] T023 [US1] Test real-time updates
|
||||
- Start a migration
|
||||
- Verify task appears in list
|
||||
- Verify status updates in real-time
|
||||
- Verify WebSocket messages are received
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: User Story 2 - Task Log Inspection (P2)
|
||||
|
||||
**Goal**: As a user, I want to view the detailed logs of any migration task so that I can understand exactly what happened or why it failed.
|
||||
|
||||
**Independent Test**: Click a "View Logs" button for a task and see a modal or panel with the task's log entries.
|
||||
|
||||
### Backend Implementation
|
||||
|
||||
- [ ] T024 [P] [US2] Enhance log storage in TaskManager
|
||||
- Ensure logs are retained for all task states
|
||||
- Add log context preservation
|
||||
- Implement log cleanup on task retention
|
||||
|
||||
### Frontend Implementation
|
||||
|
||||
- [ ] T025 [US2] Implement TaskLogViewer modal integration
|
||||
- Add "View Logs" button to TaskHistory component
|
||||
- Wire button to open TaskLogViewer modal
|
||||
- Pass task_id to modal
|
||||
- Fetch logs when modal opens
|
||||
|
||||
- [ ] T026 [US2] Implement log display formatting
|
||||
- Color-code by log level (INFO=blue, WARNING=yellow, ERROR=red)
|
||||
- Format timestamps nicely
|
||||
- Display context as JSON or formatted text
|
||||
- Auto-scroll to bottom on new logs
|
||||
|
||||
- [ ] T027 [US2] Add log refresh functionality
|
||||
- Add refresh button in modal
|
||||
- Poll for new logs every 5 seconds while modal open
|
||||
- Show "new logs available" indicator
|
||||
|
||||
### Testing
|
||||
|
||||
- [ ] T028 [US2] Test log retrieval
|
||||
- Create task with various log entries
|
||||
- Verify logs are returned correctly
|
||||
- Verify log context is preserved
|
||||
- Test with large log files
|
||||
|
||||
- [ ] T029 [US2] Test log viewer UI
|
||||
- Open logs for completed task
|
||||
- Open logs for running task
|
||||
- Verify formatting and readability
|
||||
- Test refresh functionality
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: User Story 3 - Interactive Password Resolution (P1)
|
||||
|
||||
**Goal**: As a user, I want the system to detect when a migration fails due to a missing database password and allow me to provide it interactively.
|
||||
|
||||
**Independent Test**: Trigger a migration that requires a DB password and verify the UI prompts for the password instead of just failing.
|
||||
|
||||
### Backend Implementation
|
||||
|
||||
- [ ] T030 [US3] Implement password error detection in MigrationPlugin
|
||||
- Add error pattern matching for Superset 422 errors
|
||||
- Extract database name from error message
|
||||
- Create DatabasePasswordRequest object
|
||||
- Transition task to AWAITING_INPUT state
|
||||
|
||||
- [ ] T031 [US3] Implement task resumption with passwords
|
||||
- Add validation for password format
|
||||
- Inject passwords into migration context
|
||||
- Resume task execution from failure point
|
||||
- Handle multiple database passwords
|
||||
|
||||
- [ ] T032 [US3] Add task persistence for AWAITING_INPUT state
|
||||
- Persist task context and input_request
|
||||
- Load persisted tasks on backend restart
|
||||
- Clear persisted data on task completion
|
||||
- Implement retention policy
|
||||
|
||||
### Frontend Implementation
|
||||
|
||||
- [ ] T033 [US3] Implement password prompt detection
|
||||
- Monitor task status changes
|
||||
- Detect AWAITING_INPUT state
|
||||
- Show notification to user
|
||||
- Update task list to show "Requires Input" indicator
|
||||
|
||||
- [ ] T034 [US3] Wire PasswordPrompt to task resumption
|
||||
- Connect form submission to taskService.resumeTask()
|
||||
- Handle success (close prompt, resume task)
|
||||
- Handle failure (show error, keep prompt open)
|
||||
- Support multiple database inputs
|
||||
|
||||
- [ ] T035 [US3] Add visual indicators for password-required tasks
|
||||
- Highlight tasks needing input in task list
|
||||
- Add badge or icon
|
||||
- Show count of pending inputs
|
||||
|
||||
### Testing
|
||||
|
||||
- [ ] T036 [US3] Test password error detection
|
||||
- Mock Superset password error
|
||||
- Verify error is detected
|
||||
- Verify task transitions to AWAITING_INPUT
|
||||
- Verify DatabasePasswordRequest is created
|
||||
|
||||
- [ ] T037 [US3] Test password resumption
|
||||
- Provide correct password
|
||||
- Verify task resumes
|
||||
- Verify task completes successfully
|
||||
- Test with incorrect password (should prompt again)
|
||||
|
||||
- [ ] T038 [US3] Test persistence across restarts
|
||||
- Create AWAITING_INPUT task
|
||||
- Restart backend
|
||||
- Verify task is loaded
|
||||
- Verify password prompt still works
|
||||
|
||||
- [ ] T039 [US3] Test multiple database passwords
|
||||
- Create migration requiring 2+ databases
|
||||
- Verify all databases listed in prompt
|
||||
- Verify all passwords submitted
|
||||
- Verify task resumes with all credentials
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Polish & Cross-Cutting Concerns
|
||||
|
||||
### Integration & E2E
|
||||
|
||||
- [ ] T040 [P] Integrate all components on migration page
|
||||
- Add TaskHistory to migration page
|
||||
- Add password prompt handling
|
||||
- Ensure WebSocket connections work
|
||||
- Test complete user flow
|
||||
|
||||
- [ ] T041 [P] Add loading states and error boundaries
|
||||
- Show loading spinners during API calls
|
||||
- Handle API errors gracefully
|
||||
- Show user-friendly error messages
|
||||
- Add retry mechanisms
|
||||
|
||||
### Configuration & Security
|
||||
|
||||
- [ ] T042 [P] Add configuration options
|
||||
- Task retention days (default: 30)
|
||||
- Task retention limit (default: 100)
|
||||
- Pagination limits (default: 10, max: 50)
|
||||
- Password complexity requirements
|
||||
|
||||
- [ ] T043 [P] Security review
|
||||
- Verify passwords are not logged
|
||||
- Verify passwords are not stored permanently
|
||||
- Verify input validation on all endpoints
|
||||
- Check for SQL injection vulnerabilities
|
||||
|
||||
### Documentation
|
||||
|
||||
- [ ] T044 [P] Update API documentation
|
||||
- Add new endpoints to OpenAPI spec
|
||||
- Update API contract examples
|
||||
- Document WebSocket channels
|
||||
|
||||
- [ ] T045 [P] Update quickstart guide
|
||||
- Add task history section
|
||||
- Add log viewing section
|
||||
- Add password prompt section
|
||||
- Add troubleshooting tips
|
||||
|
||||
### Testing & Quality
|
||||
|
||||
- [ ] T046 [P] Write unit tests for backend
|
||||
- Test TaskManager extensions
|
||||
- Test MigrationPlugin error detection
|
||||
- Test API endpoints
|
||||
- Test password validation
|
||||
|
||||
- [ ] T047 [P] Write unit tests for frontend
|
||||
- Test taskService functions
|
||||
- Test TaskHistory component
|
||||
- Test TaskLogViewer component
|
||||
- Test PasswordPrompt component
|
||||
|
||||
- [ ] T048 [P] Write integration tests
|
||||
- Test complete password flow
|
||||
- Test task persistence
|
||||
- Test WebSocket updates
|
||||
- Test error recovery
|
||||
|
||||
- [ ] T049 [P] Run full test suite
|
||||
- Execute pytest for backend
|
||||
- Execute frontend tests
|
||||
- Fix any failing tests
|
||||
- Verify all acceptance criteria met
|
||||
|
||||
- [ ] T050 [P] Final validation
|
||||
- Verify all user stories work independently
|
||||
- Verify all acceptance scenarios pass
|
||||
- Check performance (pagination, real-time updates)
|
||||
- Review code quality and security
|
||||
|
||||
---
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### MVP Approach (Recommended)
|
||||
|
||||
**Focus on User Story 1 (P1) first**:
|
||||
1. Complete Phase 1 (Setup)
|
||||
2. Complete Phase 2 (Foundational) - Backend only
|
||||
3. Complete Phase 3 (US1) - Task History
|
||||
4. **MVP Complete**: Users can view task list and status
|
||||
|
||||
**Then add User Story 3 (P1)**:
|
||||
5. Complete Phase 5 (US3) - Password Resolution
|
||||
6. **Enhanced MVP**: Users can handle password errors
|
||||
|
||||
**Finally add User Story 2 (P2)**:
|
||||
7. Complete Phase 4 (US2) - Log Inspection
|
||||
8. **Full Feature**: Complete UI improvements
|
||||
|
||||
### Parallel Development
|
||||
|
||||
**Backend Team**:
|
||||
- T004-T009 (Core extensions)
|
||||
- T010-T011 (API routes)
|
||||
- T016-T018 (US1 backend)
|
||||
- T024 (US2 backend)
|
||||
- T030-T032 (US3 backend)
|
||||
|
||||
**Frontend Team**:
|
||||
- T012 (Services)
|
||||
- T013-T015 (Components)
|
||||
- T019-T021 (US1 frontend)
|
||||
- T025-T027 (US2 frontend)
|
||||
- T033-T035 (US3 frontend)
|
||||
|
||||
**Testing Team**:
|
||||
- T022-T023 (US1 tests)
|
||||
- T028-T029 (US2 tests)
|
||||
- T036-T039 (US3 tests)
|
||||
- T046-T050 (Integration & final)
|
||||
|
||||
### Risk Mitigation
|
||||
|
||||
1. **Superset API Changes**: Pattern matching may need updates if Superset changes error format
|
||||
- Mitigation: Make pattern matching configurable
|
||||
|
||||
2. **WebSocket Scalability**: Real-time updates may impact performance with many users
|
||||
- Mitigation: Use polling fallback, implement rate limiting
|
||||
|
||||
3. **Password Security**: Handling sensitive data requires careful implementation
|
||||
- Mitigation: Never log passwords, clear from memory after use, use secure transport
|
||||
|
||||
4. **Task Persistence**: SQLite may not scale for high-volume task history
|
||||
- Mitigation: Configurable retention, consider migration to full database later
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria Validation
|
||||
|
||||
### User Story 1 - Task History and Status
|
||||
- [ ] SC-001: Users can view status of last 10 migration tasks on migration page
|
||||
- [ ] Real-time status updates work
|
||||
- [ ] Task list shows ID, status, start time, actions
|
||||
|
||||
### User Story 2 - Task Log Inspection
|
||||
- [ ] SC-002: Users can access full log in less than 2 clicks
|
||||
- [ ] Log viewer shows timestamped messages
|
||||
- [ ] Modal/panel works correctly
|
||||
|
||||
### User Story 3 - Interactive Password Resolution
|
||||
- [ ] SC-003: 100% of "Missing Password" errors caught and presented as prompts
|
||||
- [ ] SC-004: Users can resume blocked migration by providing password
|
||||
- [ ] Task state transitions work correctly
|
||||
- [ ] Multiple database passwords supported
|
||||
|
||||
### Cross-Cutting Requirements
|
||||
- [ ] All API endpoints follow contract specifications
|
||||
- [ ] All components follow existing patterns
|
||||
- [ ] Security requirements met
|
||||
- [ ] Performance acceptable (pagination, real-time updates)
|
||||
- [ ] Configuration options available
|
||||
- [ ] Documentation complete
|
||||
|
||||
---
|
||||
|
||||
## Task Summary
|
||||
|
||||
**Total Tasks**: 50
|
||||
**Setup Phase**: 3 tasks
|
||||
**Foundational Phase**: 12 tasks
|
||||
**US1 Phase**: 8 tasks
|
||||
**US2 Phase**: 6 tasks
|
||||
**US3 Phase**: 10 tasks
|
||||
**Polish Phase**: 11 tasks
|
||||
|
||||
**Parallel Opportunities**: 15+ tasks can be developed in parallel within their phases
|
||||
|
||||
**MVP Scope**: Tasks 1-23 (Setup, Foundational, US1) - ~23 tasks
|
||||
**Full Feature**: All 50 tasks
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. **Review this tasks.md** with team leads
|
||||
2. **Assign tasks** to backend/frontend developers
|
||||
3. **Start Phase 1** immediately (Setup)
|
||||
4. **Schedule daily standups** to track progress
|
||||
5. **Use this document** as the single source of truth for implementation
|
||||
|
||||
**Ready for Implementation**: ✅ All design artifacts complete, tasks generated, dependencies mapped.
|
||||
@@ -20,7 +20,7 @@ from .utils.logger import SupersetLogger
|
||||
class SupersetConfig(BaseModel):
|
||||
env: str = Field(..., description="Название окружения (например, dev, prod).")
|
||||
base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.")
|
||||
auth: Dict[str, str] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).")
|
||||
auth: Dict[str, Any] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).")
|
||||
verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.")
|
||||
timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.")
|
||||
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
|
||||
@@ -32,7 +32,7 @@ class SupersetConfig(BaseModel):
|
||||
# @THROW: ValueError - Если отсутствуют обязательные поля.
|
||||
# @PARAM: v (Dict[str, str]) - Значение поля auth.
|
||||
@validator('auth')
|
||||
def validate_auth(cls, v: Dict[str, str]) -> Dict[str, str]:
|
||||
def validate_auth(cls, v: Dict[str, Any]) -> Dict[str, Any]:
|
||||
required = {'provider', 'username', 'password', 'refresh'}
|
||||
if not required.issubset(v.keys()):
|
||||
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")
|
||||
|
||||
188
test_migration_debug.py
Normal file
188
test_migration_debug.py
Normal file
@@ -0,0 +1,188 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Debug script to test the migration flow and identify where the issue occurs.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to sys.path
|
||||
project_root = Path(__file__).parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
def test_plugin_loader():
|
||||
"""Test if the plugin loader can find the migration plugin."""
|
||||
print("=== Testing Plugin Loader ===")
|
||||
try:
|
||||
from backend.src.core.plugin_loader import PluginLoader
|
||||
from pathlib import Path
|
||||
|
||||
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
|
||||
print(f"Plugin directory: {plugin_dir}")
|
||||
print(f"Directory exists: {plugin_dir.exists()}")
|
||||
|
||||
if plugin_dir.exists():
|
||||
print(f"Files in plugin directory: {list(plugin_dir.iterdir())}")
|
||||
|
||||
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
|
||||
configs = plugin_loader.get_all_plugin_configs()
|
||||
|
||||
print(f"Found {len(configs)} plugins:")
|
||||
for config in configs:
|
||||
print(f" - {config.name} (ID: {config.id})")
|
||||
|
||||
migration_plugin = plugin_loader.get_plugin("superset-migration")
|
||||
if migration_plugin:
|
||||
print("✓ Migration plugin found")
|
||||
print(f" Name: {migration_plugin.name}")
|
||||
print(f" Description: {migration_plugin.description}")
|
||||
print(f" Schema: {migration_plugin.get_schema()}")
|
||||
else:
|
||||
print("✗ Migration plugin NOT found")
|
||||
|
||||
return migration_plugin is not None
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Plugin loader test failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def test_task_manager():
|
||||
"""Test if the task manager can create tasks."""
|
||||
print("\n=== Testing Task Manager ===")
|
||||
try:
|
||||
from backend.src.core.plugin_loader import PluginLoader
|
||||
from backend.src.core.task_manager import TaskManager
|
||||
from pathlib import Path
|
||||
|
||||
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
|
||||
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
|
||||
task_manager = TaskManager(plugin_loader)
|
||||
|
||||
# Test task creation
|
||||
test_params = {
|
||||
"from_env": "dev",
|
||||
"to_env": "prod",
|
||||
"dashboard_regex": ".*",
|
||||
"replace_db_config": False
|
||||
}
|
||||
|
||||
print(f"Creating test task with params: {test_params}")
|
||||
import asyncio
|
||||
task = asyncio.run(task_manager.create_task("superset-migration", test_params))
|
||||
print(f"✓ Task created successfully: {task.id}")
|
||||
print(f" Status: {task.status}")
|
||||
print(f" Plugin ID: {task.plugin_id}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Task manager test failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def test_migration_endpoint():
|
||||
"""Test the migration endpoint directly."""
|
||||
print("\n=== Testing Migration Endpoint ===")
|
||||
try:
|
||||
from backend.src.api.routes.migration import execute_migration
|
||||
from backend.src.models.dashboard import DashboardSelection
|
||||
from backend.src.dependencies import get_config_manager, get_task_manager
|
||||
|
||||
# Create a test selection
|
||||
selection = DashboardSelection(
|
||||
selected_ids=[1, 2, 3],
|
||||
source_env_id="ss2",
|
||||
target_env_id="ss1"
|
||||
)
|
||||
|
||||
print(f"Testing migration with selection: {selection.dict()}")
|
||||
|
||||
# This would normally be called by FastAPI with dependencies
|
||||
# For testing, we'll call it directly
|
||||
config_manager = get_config_manager()
|
||||
task_manager = get_task_manager()
|
||||
|
||||
import asyncio
|
||||
|
||||
# We need to ensure TaskManager uses the SAME loop as run_and_wait
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
# Re-initialize TaskManager with the new loop
|
||||
task_manager.loop = loop
|
||||
|
||||
async def run_and_wait():
|
||||
result = await execute_migration(selection, config_manager, task_manager)
|
||||
print(f"✓ Migration endpoint test successful: {result}")
|
||||
|
||||
task_id = result["task_id"]
|
||||
print(f"Waiting for task {task_id} to complete...")
|
||||
|
||||
for i in range(20):
|
||||
await asyncio.sleep(1)
|
||||
task = task_manager.get_task(task_id)
|
||||
print(f" [{i}] Task status: {task.status}")
|
||||
if task.status in ["SUCCESS", "FAILED"]:
|
||||
print(f"Task finished with status: {task.status}")
|
||||
for log in task.logs:
|
||||
print(f" [{log.level}] {log.message}")
|
||||
return task.status == "SUCCESS"
|
||||
|
||||
print("Task timed out. Current status: " + task_manager.get_task(task_id).status)
|
||||
return False
|
||||
|
||||
try:
|
||||
return loop.run_until_complete(run_and_wait())
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"✗ Migration endpoint test failed: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Run all tests."""
|
||||
print("Migration Debug Test Script")
|
||||
print("=" * 50)
|
||||
|
||||
results = []
|
||||
|
||||
# Test 1: Plugin Loader
|
||||
results.append(("Plugin Loader", test_plugin_loader()))
|
||||
|
||||
# Test 2: Task Manager
|
||||
results.append(("Task Manager", test_task_manager()))
|
||||
|
||||
# Test 3: Migration Endpoint
|
||||
results.append(("Migration Endpoint", test_migration_endpoint()))
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 50)
|
||||
print("TEST RESULTS SUMMARY:")
|
||||
print("=" * 50)
|
||||
|
||||
passed = 0
|
||||
for test_name, result in results:
|
||||
status = "PASS" if result else "FAIL"
|
||||
print(f"{test_name}: {status}")
|
||||
if result:
|
||||
passed += 1
|
||||
|
||||
print(f"\nTotal: {passed}/{len(results)} tests passed")
|
||||
|
||||
if passed == len(results):
|
||||
print("✓ All tests passed! The migration system appears to be working correctly.")
|
||||
else:
|
||||
print("✗ Some tests failed. Check the logs above for details.")
|
||||
|
||||
return passed == len(results)
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = main()
|
||||
sys.exit(0 if success else 1)
|
||||
Reference in New Issue
Block a user