16 Commits

Author SHA1 Message Date
a032fe8457 Password promt 2025-12-30 17:21:12 +03:00
4c9d554432 TaskManager refactor 2025-12-29 10:13:37 +03:00
6962a78112 mappings+migrate 2025-12-27 10:16:41 +03:00
3d75a21127 tech_lead / coder 2roles 2025-12-27 08:02:59 +03:00
07914c8728 semantic add 2025-12-27 07:14:08 +03:00
cddc259b76 new loggers logic in constitution 2025-12-27 06:51:28 +03:00
dcbf0a7d7f tasks ready 2025-12-27 06:37:03 +03:00
65f61c1f80 Merge branch '001-migration-ui-redesign' into master 2025-12-27 05:58:35 +03:00
cb7386f274 superset_tool logger rework 2025-12-27 05:53:30 +03:00
83e34e1799 feat(logging): implement configurable belief state logging
- Add LoggingConfig model and logging field to GlobalSettings
- Implement belief_scope context manager for structured logging
- Add configure_logger for dynamic level and file rotation settings
- Add logging configuration UI to Settings page
- Update ConfigManager to apply logging settings on initialization and updates
2025-12-27 05:39:33 +03:00
d197303b9f 006 plan ready 2025-12-26 19:36:49 +03:00
a43f8fb021 001-migration-ui-redesign (#3)
Reviewed-on: #3
2025-12-26 18:17:58 +03:00
4aa01b6470 Merge branch 'migration' into 001-migration-ui-redesign 2025-12-26 18:16:24 +03:00
35b423979d spec rules 2025-12-25 22:28:42 +03:00
2ffc3cc68f feat(migration): implement interactive mapping resolution workflow
- Add SQLite database integration for environments and mappings
- Update TaskManager to support pausing tasks (AWAITING_MAPPING)
- Modify MigrationPlugin to detect missing mappings and wait for resolution
- Add frontend UI for handling missing mappings interactively
- Create dedicated migration routes and API endpoints
- Update .gitignore and project documentation
2025-12-25 22:27:29 +03:00
4448352ef9 Merge pull request '001-fix-ui-ws-validation' (#2) from 001-fix-ui-ws-validation into migration
Reviewed-on: #2
2025-12-21 00:29:19 +03:00
98 changed files with 8149 additions and 600 deletions

77
.gitignore vendored
View File

@@ -1,21 +1,64 @@
*__pycache__* # Python
*.ps1 __pycache__/
keyring passwords.py *.py[cod]
*logs* *$py.class
*github* *.so
*venv* .Python
*git*
*tech_spec*
dashboards
# Python specific
*.pyc
dist/
*.egg-info/
# Node.js specific
node_modules/
build/ build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
.venv
venv/
ENV/
env/
backend/backups/*
# Node.js
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.svelte-kit/
.vite/
build/
dist/
.env* .env*
config.json config.json
backend/backups/* # Logs
*.log
backend/backend.log
# OS
.DS_Store
Thumbs.db
# IDE
.vscode/
.idea/
*.swp
*.swo
# Project specific
*.ps1
keyring passwords.py
*github*
*git*
*tech_spec*
dashboards
backend/mappings.db

View File

@@ -6,9 +6,16 @@ Auto-generated from all feature plans. Last updated: 2025-12-19
- Python 3.9+, Node.js 18+ + `uvicorn`, `npm`, `bash` (003-project-launch-script) - Python 3.9+, Node.js 18+ + `uvicorn`, `npm`, `bash` (003-project-launch-script)
- Python 3.9+, Node.js 18+ + SvelteKit, FastAPI, Tailwind CSS (inferred from existing frontend) (004-integrate-svelte-kit) - Python 3.9+, Node.js 18+ + SvelteKit, FastAPI, Tailwind CSS (inferred from existing frontend) (004-integrate-svelte-kit)
- N/A (Frontend integration) (004-integrate-svelte-kit) - N/A (Frontend integration) (004-integrate-svelte-kit)
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic (001-fix-ui-ws-validation) - Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic (005-fix-ui-ws-validation)
- N/A (Configuration based) (005-fix-ui-ws-validation) - N/A (Configuration based) (005-fix-ui-ws-validation)
- Filesystem (plugins, logs, backups), SQLite (optional, for job history if needed) (005-fix-ui-ws-validation) - Filesystem (plugins, logs, backups), SQLite (optional, for job history if needed) (005-fix-ui-ws-validation)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS (007-migration-dashboard-grid)
- N/A (Superset API integration) (007-migration-dashboard-grid)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API (007-migration-dashboard-grid)
- N/A (Superset API integration - read-only for metadata) (007-migration-dashboard-grid)
- Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
- SQLite (optional for job history), existing database for mappings (008-migration-ui-improvements)
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui) - Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
@@ -29,9 +36,9 @@ cd src; pytest; ruff check .
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
## Recent Changes ## Recent Changes
- 001-fix-ui-ws-validation: Added Python 3.9+ (Backend), Node.js 18+ (Frontend Build) - 008-migration-ui-improvements: Added Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
- 005-fix-ui-ws-validation: Added Python 3.9+ (Backend), Node.js 18+ (Frontend Build) - 008-migration-ui-improvements: Added Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
- 005-fix-ui-ws-validation: Added Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic - 007-migration-dashboard-grid: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API
<!-- MANUAL ADDITIONS START --> <!-- MANUAL ADDITIONS START -->

25
.kilocodemodes Normal file
View File

@@ -0,0 +1,25 @@
customModes:
- slug: tester
name: Tester
description: QA and Plan Verification Specialist
roleDefinition: >-
You are Kilo Code, acting as a QA and Verification Specialist. Your primary goal is to validate that the project implementation aligns strictly with the defined specifications and task plans.
Your responsibilities include:
- Reading and analyzing task plans and specifications (typically in the `specs/` directory).
- Verifying that implemented code matches the requirements.
- Executing tests and validating system behavior via CLI or Browser.
- Updating the status of tasks in the plan files (e.g., marking checkboxes [x]) as they are verified.
- Identifying and reporting missing features or bugs.
whenToUse: >-
Use this mode when you need to audit the progress of a project, verify completed tasks against the plan, run quality assurance checks, or update the status of task lists in specification documents.
groups:
- read
- edit
- command
- browser
- mcp
customInstructions: >-
1. Always begin by loading the relevant plan or task list from the `specs/` directory.
2. Do not assume a task is done just because it is checked; verify the code or functionality first if asked to audit.
3. When updating task lists, ensure you only mark items as complete if you have verified them.

View File

@@ -1,29 +1,99 @@
# ss-tools Constitution <!--
SYNC IMPACT REPORT
Version: 1.5.0 (Fractal Complexity Limit)
Changes:
- Added Section VI (Fractal Complexity Limit) to enforce strict module (~300 lines) and function (~30-50 lines) size limits.
- Aims to maintain semantic coherence and avoid "Attention Sink".
Templates Status:
- .specify/templates/plan-template.md: ✅ Aligned.
- .specify/templates/spec-template.md: ✅ Aligned.
- .specify/templates/tasks-arch-template.md: ✅ Aligned (New role-based split).
- .specify/templates/tasks-dev-template.md: ✅ Aligned (New role-based split).
-->
# Semantic Code Generation Constitution
## Core Principles ## Core Principles
### I. SPA-First Architecture ### I. Causal Validity (Contracts First)
The frontend MUST be a Static Single Page Application (SPA) served by the Python backend. No Node.js server is permitted in production. The backend serves the `index.html` entry point for all non-API routes. Semantic definitions (Contracts) must ALWAYS precede implementation code. Logic is downstream of definition. We define the structure and constraints (`[DEF]`, `@PRE`, `@POST`) before writing the executable logic. This ensures that the "what" and "why" govern the "how".
### II. API-Driven Communication ### II. Immutability of Architecture
All data retrieval and state changes MUST be performed via the backend REST API or WebSockets. The frontend should not access the database or filesystem directly. Once defined, architectural decisions in the Module Header (`@LAYER`, `@INVARIANT`, `@CONSTRAINT`) are treated as immutable constraints for that module. Changes to these require an explicit refactoring step, not ad-hoc modification during implementation.
### III. Modern Stack Consistency ### III. Semantic Format Compliance
The project strictly uses SvelteKit (Frontend), FastAPI (Backend), and Tailwind CSS (Styling). New dependencies must be justified and approved. All output must strictly follow the `[DEF]` / `[/DEF]` anchor syntax with specific Metadata Tags (`@KEY`) and Graph Relations (`@RELATION`). **Crucially, the closing anchor must strictly match the full content of the opening anchor (e.g., `[DEF:identifier:Type]` must close with `[/DEF:identifier:Type]`).**
### IV. Semantic Protocol Adherence (GRACE-Poly) **Standardized Graph Relations**
All code generation and modification MUST adhere to the Semantic Protocol defined in `semantic_protocol.md`. To ensure the integrity of the Semantic Graph, `@RELATION` must use a strict taxonomy:
- **Anchors**: Use `[DEF:id:Type]` and `[/DEF:id]` to define semantic boundaries. - `DEPENDS_ON` (Structural dependency)
- **Contracts**: Define `@PRE` and `@POST` conditions in headers. - `CALLS` (Flow control)
- **Logging**: Use structured logging with `[AnchorID][State]` format. - `CREATES` (Instantiation)
- **Immutability**: Respect architectural decisions in headers. - `INHERITS_FROM` / `IMPLEMENTS` (OOP hierarchy)
- `READS_STATE` / `WRITES_STATE` (Data flow)
- `DISPATCHES` / `HANDLES` (Event flow)
Ad-hoc relationships are forbidden. This structure is non-negotiable as it ensures the codebase remains machine-readable, fractal-structured, and optimized for Sparse Attention navigation by AI agents.
### IV. Design by Contract (DbC)
Contracts are the Source of Truth. Functions and Classes must define their purpose, specifications, and constraints (`@PRE`, `@POST`, `@THROW`) in the metadata block before implementation. Implementation must strictly satisfy these contracts.
### V. Belief State Logging
Logs must define the agent's internal state for debugging and coherence checks. We use a strict format: `[{ANCHOR_ID}][{STATE}] {MESSAGE}`. For Python, a **Context Manager** pattern MUST be used to automatically handle `Entry`, `Exit`, and `Coherence` states, ensuring structural integrity and error capturing.
### VI. Fractal Complexity Limit
To maintain semantic coherence and avoid "Attention Sink" issues:
- **Module Size**: If a Module body exceeds ~300 lines (or logical complexity), it MUST be refactored into sub-modules or a package structure.
- **Function Size**: Functions should fit within a standard attention "chunk" (approx. 30-50 lines). If larger, logic MUST be decomposed into helper functions with their own contracts.
This ensures every vector embedding remains sharp and focused.
## File Structure Standards
### Python Modules
Every `.py` file must start with a Module definition header (`[DEF:module_name:Module]`) containing:
- `@SEMANTICS`: Keywords for vector search.
- `@PURPOSE`: Primary responsibility of the module.
- `@LAYER`: Architecture layer (Domain/Infra/UI).
- `@RELATION`: Dependencies.
- `@INVARIANT` & `@CONSTRAINT`: Immutable rules.
- `@PUBLIC_API`: Exported symbols.
### Svelte Components
Every `.svelte` file must start with a Component definition header (`[DEF:ComponentName:Component]`) wrapped in an HTML comment `<!-- ... -->` containing:
- `@SEMANTICS`: Keywords for vector search.
- `@PURPOSE`: Primary responsibility of the component.
- `@LAYER`: Architecture layer (UI/State/Layout).
- `@RELATION`: Child components, Stores used, API calls.
- `@PROPS`: Input properties.
- `@EVENTS`: Emitted events.
- `@INVARIANT`: Immutable UI/State rules.
## Generation Workflow
The development process follows a strict sequence enforced by Agent Roles:
### 1. Architecture Phase (Mode: `tech-lead`)
**Input**: `tasks-arch.md`
**Responsibility**:
- Analyze request and graph position.
- Generate `[DEF]` anchors, Headers, and Contracts (`@PRE`, `@POST`).
- **Output**: Scaffolding files with no implementation logic.
### 2. Implementation Phase (Mode: `code`)
**Input**: `tasks-dev.md` + Scaffolding files
**Responsibility**:
- Read contracts defined by Architect.
- Write implementation code that strictly satisfies contracts.
- **Output**: Working code with passing tests.
### 3. Validation
If logic conflicts with Contract -> Stop -> Report Error.
## Governance ## Governance
This Constitution establishes the "Semantic Code Generation Protocol" as the supreme law of this repository.
### Compliance - **Automated Enforcement**: All code generation tools and agents must parse and validate adherence to the `[DEF]` syntax and Contract requirements.
All Pull Requests and code modifications must be verified against this Constitution. Violations of Core Principles are considered critical defects. - **Amendments**: Changes to the syntax or core principles require a formal amendment to this Constitution and a corresponding update to the constitution
- **Review**: Code reviews must verify that implementation matches the preceding contracts and that no "naked code" exists outside of semantic anchors.
- **Compliance**: Failure to adhere to the `[DEF]` / `[/DEF]` structure (including matching closing tags) constitutes a build failure.
### Amendments **Version**: 1.5.0 | **Ratified**: 2025-12-19 | **Last Amended**: 2025-12-27
Changes to this Constitution require a formal RFC process and approval from the project lead.
**Version**: 1.0.0 | **Ratified**: 2025-12-20

View File

@@ -9,8 +9,8 @@
# #
# OPTIONS: # OPTIONS:
# --json Output in JSON format # --json Output in JSON format
# --require-tasks Require tasks.md to exist (for implementation phase) # --require-tasks Require tasks-arch.md and tasks-dev.md to exist (for implementation phase)
# --include-tasks Include tasks.md in AVAILABLE_DOCS list # --include-tasks Include task files in AVAILABLE_DOCS list
# --paths-only Only output path variables (no validation) # --paths-only Only output path variables (no validation)
# --help, -h Show help message # --help, -h Show help message
# #
@@ -49,8 +49,8 @@ Consolidated prerequisite checking for Spec-Driven Development workflow.
OPTIONS: OPTIONS:
--json Output in JSON format --json Output in JSON format
--require-tasks Require tasks.md to exist (for implementation phase) --require-tasks Require tasks-arch.md and tasks-dev.md to exist (for implementation phase)
--include-tasks Include tasks.md in AVAILABLE_DOCS list --include-tasks Include task files in AVAILABLE_DOCS list
--paths-only Only output path variables (no prerequisite validation) --paths-only Only output path variables (no prerequisite validation)
--help, -h Show this help message --help, -h Show this help message
@@ -58,7 +58,7 @@ EXAMPLES:
# Check task prerequisites (plan.md required) # Check task prerequisites (plan.md required)
./check-prerequisites.sh --json ./check-prerequisites.sh --json
# Check implementation prerequisites (plan.md + tasks.md required) # Check implementation prerequisites (plan.md + task files required)
./check-prerequisites.sh --json --require-tasks --include-tasks ./check-prerequisites.sh --json --require-tasks --include-tasks
# Get feature paths only (no validation) # Get feature paths only (no validation)
@@ -86,15 +86,16 @@ check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1
if $PATHS_ONLY; then if $PATHS_ONLY; then
if $JSON_MODE; then if $JSON_MODE; then
# Minimal JSON paths payload (no validation performed) # Minimal JSON paths payload (no validation performed)
printf '{"REPO_ROOT":"%s","BRANCH":"%s","FEATURE_DIR":"%s","FEATURE_SPEC":"%s","IMPL_PLAN":"%s","TASKS":"%s"}\n' \ printf '{"REPO_ROOT":"%s","BRANCH":"%s","FEATURE_DIR":"%s","FEATURE_SPEC":"%s","IMPL_PLAN":"%s","TASKS_ARCH":"%s","TASKS_DEV":"%s"}\n' \
"$REPO_ROOT" "$CURRENT_BRANCH" "$FEATURE_DIR" "$FEATURE_SPEC" "$IMPL_PLAN" "$TASKS" "$REPO_ROOT" "$CURRENT_BRANCH" "$FEATURE_DIR" "$FEATURE_SPEC" "$IMPL_PLAN" "$TASKS_ARCH" "$TASKS_DEV"
else else
echo "REPO_ROOT: $REPO_ROOT" echo "REPO_ROOT: $REPO_ROOT"
echo "BRANCH: $CURRENT_BRANCH" echo "BRANCH: $CURRENT_BRANCH"
echo "FEATURE_DIR: $FEATURE_DIR" echo "FEATURE_DIR: $FEATURE_DIR"
echo "FEATURE_SPEC: $FEATURE_SPEC" echo "FEATURE_SPEC: $FEATURE_SPEC"
echo "IMPL_PLAN: $IMPL_PLAN" echo "IMPL_PLAN: $IMPL_PLAN"
echo "TASKS: $TASKS" echo "TASKS_ARCH: $TASKS_ARCH"
echo "TASKS_DEV: $TASKS_DEV"
fi fi
exit 0 exit 0
fi fi
@@ -112,11 +113,20 @@ if [[ ! -f "$IMPL_PLAN" ]]; then
exit 1 exit 1
fi fi
# Check for tasks.md if required # Check for task files if required
if $REQUIRE_TASKS && [[ ! -f "$TASKS" ]]; then if $REQUIRE_TASKS; then
echo "ERROR: tasks.md not found in $FEATURE_DIR" >&2 # Check for split tasks first
echo "Run /speckit.tasks first to create the task list." >&2 if [[ -f "$TASKS_ARCH" ]] && [[ -f "$TASKS_DEV" ]]; then
: # Split tasks exist, proceed
# Fallback to unified tasks.md
elif [[ -f "$TASKS" ]]; then
: # Unified tasks exist, proceed
else
echo "ERROR: No valid task files found in $FEATURE_DIR" >&2
echo "Expected 'tasks-arch.md' AND 'tasks-dev.md' (split) OR 'tasks.md' (unified)" >&2
echo "Run /speckit.tasks first to create the task lists." >&2
exit 1 exit 1
fi
fi fi
# Build list of available documents # Build list of available documents
@@ -133,9 +143,14 @@ fi
[[ -f "$QUICKSTART" ]] && docs+=("quickstart.md") [[ -f "$QUICKSTART" ]] && docs+=("quickstart.md")
# Include tasks.md if requested and it exists # Include task files if requested and they exist
if $INCLUDE_TASKS && [[ -f "$TASKS" ]]; then if $INCLUDE_TASKS; then
if [[ -f "$TASKS_ARCH" ]] || [[ -f "$TASKS_DEV" ]]; then
[[ -f "$TASKS_ARCH" ]] && docs+=("tasks-arch.md")
[[ -f "$TASKS_DEV" ]] && docs+=("tasks-dev.md")
elif [[ -f "$TASKS" ]]; then
docs+=("tasks.md") docs+=("tasks.md")
fi
fi fi
# Output results # Output results
@@ -161,6 +176,11 @@ else
check_file "$QUICKSTART" "quickstart.md" check_file "$QUICKSTART" "quickstart.md"
if $INCLUDE_TASKS; then if $INCLUDE_TASKS; then
if [[ -f "$TASKS_ARCH" ]] || [[ -f "$TASKS_DEV" ]]; then
check_file "$TASKS_ARCH" "tasks-arch.md"
check_file "$TASKS_DEV" "tasks-dev.md"
else
check_file "$TASKS" "tasks.md" check_file "$TASKS" "tasks.md"
fi fi
fi
fi fi

View File

@@ -143,7 +143,9 @@ HAS_GIT='$has_git_repo'
FEATURE_DIR='$feature_dir' FEATURE_DIR='$feature_dir'
FEATURE_SPEC='$feature_dir/spec.md' FEATURE_SPEC='$feature_dir/spec.md'
IMPL_PLAN='$feature_dir/plan.md' IMPL_PLAN='$feature_dir/plan.md'
TASKS='$feature_dir/tasks.md' TASKS_ARCH='$feature_dir/tasks-arch.md'
TASKS_DEV='$feature_dir/tasks-dev.md'
TASKS='$feature_dir/tasks.md' # Deprecated
RESEARCH='$feature_dir/research.md' RESEARCH='$feature_dir/research.md'
DATA_MODEL='$feature_dir/data-model.md' DATA_MODEL='$feature_dir/data-model.md'
QUICKSTART='$feature_dir/quickstart.md' QUICKSTART='$feature_dir/quickstart.md'

View File

@@ -0,0 +1,35 @@
---
description: "Architecture task list template (Contracts & Scaffolding)"
---
# Architecture Tasks: [FEATURE NAME]
**Role**: Architect Agent
**Goal**: Define the "What" and "Why" (Contracts, Scaffolding, Models) before implementation.
**Input**: Design documents from `/specs/[###-feature-name]/`
**Output**: Files with `[DEF]` anchors, `@PRE`/`@POST` contracts, and `@RELATION` mappings. No business logic.
## Phase 1: Setup & Models
- [ ] A001 Create/Update data models in [path] with `[DEF]` and contracts
- [ ] A002 Define API route structure/contracts in [path]
- [ ] A003 Define shared utilities/interfaces
## Phase 2: User Story 1 - [Title]
- [ ] A004 [US1] Define contracts for [Component/Service] in [path]
- [ ] A005 [US1] Define contracts for [Endpoint] in [path]
- [ ] A006 [US1] Define contracts for [Frontend Component] in [path]
## Phase 3: User Story 2 - [Title]
- [ ] A007 [US2] Define contracts for [Component/Service] in [path]
- [ ] A008 [US2] Define contracts for [Endpoint] in [path]
## Handover Checklist
- [ ] All new files created with `[DEF]` anchors
- [ ] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
- [ ] No "naked code" (logic outside of anchors)
- [ ] `tasks-dev.md` is ready for the Developer Agent

View File

@@ -0,0 +1,35 @@
---
description: "Developer task list template (Implementation Logic)"
---
# Developer Tasks: [FEATURE NAME]
**Role**: Developer Agent
**Goal**: Implement the "How" (Logic, State, Error Handling) inside the defined contracts.
**Input**: `tasks-arch.md` (completed), Scaffolding files with `[DEF]` anchors.
**Output**: Working code that satisfies `@PRE`/`@POST` conditions.
## Phase 1: Setup & Models
- [ ] D001 Implement logic for [Model] in [path]
- [ ] D002 Implement logic for [API Route] in [path]
- [ ] D003 Implement shared utilities
## Phase 2: User Story 1 - [Title]
- [ ] D004 [US1] Implement logic for [Component/Service] in [path]
- [ ] D005 [US1] Implement logic for [Endpoint] in [path]
- [ ] D006 [US1] Implement logic for [Frontend Component] in [path]
- [ ] D007 [US1] Verify semantic compliance and belief state logging
## Phase 3: User Story 2 - [Title]
- [ ] D008 [US2] Implement logic for [Component/Service] in [path]
- [ ] D009 [US2] Implement logic for [Endpoint] in [path]
## Polish & Quality Assurance
- [ ] DXXX Verify all tests pass
- [ ] DXXX Check error handling and edge cases
- [ ] DXXX Ensure code style compliance

View File

@@ -1,251 +0,0 @@
---
description: "Task list template for feature implementation"
---
# Tasks: [FEATURE NAME]
**Input**: Design documents from `/specs/[###-feature-name]/`
**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/
**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification.
**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
## Format: `[ID] [P?] [Story] Description`
- **[P]**: Can run in parallel (different files, no dependencies)
- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
- Include exact file paths in descriptions
## Path Conventions
- **Single project**: `src/`, `tests/` at repository root
- **Web app**: `backend/src/`, `frontend/src/`
- **Mobile**: `api/src/`, `ios/src/` or `android/src/`
- Paths shown below assume single project - adjust based on plan.md structure
<!--
============================================================================
IMPORTANT: The tasks below are SAMPLE TASKS for illustration purposes only.
The /speckit.tasks command MUST replace these with actual tasks based on:
- User stories from spec.md (with their priorities P1, P2, P3...)
- Feature requirements from plan.md
- Entities from data-model.md
- Endpoints from contracts/
Tasks MUST be organized by user story so each story can be:
- Implemented independently
- Tested independently
- Delivered as an MVP increment
DO NOT keep these sample tasks in the generated tasks.md file.
============================================================================
-->
## Phase 1: Setup (Shared Infrastructure)
**Purpose**: Project initialization and basic structure
- [ ] T001 Create project structure per implementation plan
- [ ] T002 Initialize [language] project with [framework] dependencies
- [ ] T003 [P] Configure linting and formatting tools
---
## Phase 2: Foundational (Blocking Prerequisites)
**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
**⚠️ CRITICAL**: No user story work can begin until this phase is complete
Examples of foundational tasks (adjust based on your project):
- [ ] T004 Setup database schema and migrations framework
- [ ] T005 [P] Implement authentication/authorization framework
- [ ] T006 [P] Setup API routing and middleware structure
- [ ] T007 Create base models/entities that all stories depend on
- [ ] T008 Configure error handling and logging infrastructure
- [ ] T009 Setup environment configuration management
**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
---
## Phase 3: User Story 1 - [Title] (Priority: P1) 🎯 MVP
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️
> **NOTE: Write these tests FIRST, ensure they FAIL before implementation**
- [ ] T010 [P] [US1] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T011 [P] [US1] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 1
- [ ] T012 [P] [US1] Create [Entity1] model in src/models/[entity1].py
- [ ] T013 [P] [US1] Create [Entity2] model in src/models/[entity2].py
- [ ] T014 [US1] Implement [Service] in src/services/[service].py (depends on T012, T013)
- [ ] T015 [US1] Implement [endpoint/feature] in src/[location]/[file].py
- [ ] T016 [US1] Add validation and error handling
- [ ] T017 [US1] Add logging for user story 1 operations
**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently
---
## Phase 4: User Story 2 - [Title] (Priority: P2)
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️
- [ ] T018 [P] [US2] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T019 [P] [US2] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 2
- [ ] T020 [P] [US2] Create [Entity] model in src/models/[entity].py
- [ ] T021 [US2] Implement [Service] in src/services/[service].py
- [ ] T022 [US2] Implement [endpoint/feature] in src/[location]/[file].py
- [ ] T023 [US2] Integrate with User Story 1 components (if needed)
**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently
---
## Phase 5: User Story 3 - [Title] (Priority: P3)
**Goal**: [Brief description of what this story delivers]
**Independent Test**: [How to verify this story works on its own]
### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️
- [ ] T024 [P] [US3] Contract test for [endpoint] in tests/contract/test_[name].py
- [ ] T025 [P] [US3] Integration test for [user journey] in tests/integration/test_[name].py
### Implementation for User Story 3
- [ ] T026 [P] [US3] Create [Entity] model in src/models/[entity].py
- [ ] T027 [US3] Implement [Service] in src/services/[service].py
- [ ] T028 [US3] Implement [endpoint/feature] in src/[location]/[file].py
**Checkpoint**: All user stories should now be independently functional
---
[Add more user story phases as needed, following the same pattern]
---
## Phase N: Polish & Cross-Cutting Concerns
**Purpose**: Improvements that affect multiple user stories
- [ ] TXXX [P] Documentation updates in docs/
- [ ] TXXX Code cleanup and refactoring
- [ ] TXXX Performance optimization across all stories
- [ ] TXXX [P] Additional unit tests (if requested) in tests/unit/
- [ ] TXXX Security hardening
- [ ] TXXX Run quickstart.md validation
---
## Dependencies & Execution Order
### Phase Dependencies
- **Setup (Phase 1)**: No dependencies - can start immediately
- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
- **User Stories (Phase 3+)**: All depend on Foundational phase completion
- User stories can then proceed in parallel (if staffed)
- Or sequentially in priority order (P1 → P2 → P3)
- **Polish (Final Phase)**: Depends on all desired user stories being complete
### User Story Dependencies
- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable
- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable
### Within Each User Story
- Tests (if included) MUST be written and FAIL before implementation
- Models before services
- Services before endpoints
- Core implementation before integration
- Story complete before moving to next priority
### Parallel Opportunities
- All Setup tasks marked [P] can run in parallel
- All Foundational tasks marked [P] can run in parallel (within Phase 2)
- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows)
- All tests for a user story marked [P] can run in parallel
- Models within a story marked [P] can run in parallel
- Different user stories can be worked on in parallel by different team members
---
## Parallel Example: User Story 1
```bash
# Launch all tests for User Story 1 together (if tests requested):
Task: "Contract test for [endpoint] in tests/contract/test_[name].py"
Task: "Integration test for [user journey] in tests/integration/test_[name].py"
# Launch all models for User Story 1 together:
Task: "Create [Entity1] model in src/models/[entity1].py"
Task: "Create [Entity2] model in src/models/[entity2].py"
```
---
## Implementation Strategy
### MVP First (User Story 1 Only)
1. Complete Phase 1: Setup
2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
3. Complete Phase 3: User Story 1
4. **STOP and VALIDATE**: Test User Story 1 independently
5. Deploy/demo if ready
### Incremental Delivery
1. Complete Setup + Foundational → Foundation ready
2. Add User Story 1 → Test independently → Deploy/Demo (MVP!)
3. Add User Story 2 → Test independently → Deploy/Demo
4. Add User Story 3 → Test independently → Deploy/Demo
5. Each story adds value without breaking previous stories
### Parallel Team Strategy
With multiple developers:
1. Team completes Setup + Foundational together
2. Once Foundational is done:
- Developer A: User Story 1
- Developer B: User Story 2
- Developer C: User Story 3
3. Stories complete and integrate independently
---
## Notes
- [P] tasks = different files, no dependencies
- [Story] label maps task to specific user story for traceability
- Each user story should be independently completable and testable
- Verify tests fail before implementing
- Commit after each task or logical group
- Stop at any checkpoint to validate story independently
- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence

BIN
backend/mappings.db Normal file

Binary file not shown.

BIN
backend/migrations.db Normal file

Binary file not shown.

View File

@@ -10,3 +10,5 @@ keyring
httpx httpx
PyYAML PyYAML
websockets websockets
rapidfuzz
sqlalchemy

View File

@@ -0,0 +1,75 @@
# [DEF:backend.src.api.routes.environments:Module]
#
# @SEMANTICS: api, environments, superset, databases
# @PURPOSE: API endpoints for listing environments and their databases.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
#
# @INVARIANT: Environment IDs must exist in the configuration.
# [SECTION: IMPORTS]
from fastapi import APIRouter, Depends, HTTPException
from typing import List, Dict, Optional
from backend.src.dependencies import get_config_manager
from backend.src.core.superset_client import SupersetClient
from superset_tool.models import SupersetConfig
from pydantic import BaseModel
# [/SECTION]
router = APIRouter(prefix="/api/environments", tags=["environments"])
# [DEF:EnvironmentResponse:DataClass]
class EnvironmentResponse(BaseModel):
id: str
name: str
url: str
# [/DEF:EnvironmentResponse]
# [DEF:DatabaseResponse:DataClass]
class DatabaseResponse(BaseModel):
uuid: str
database_name: str
engine: Optional[str]
# [/DEF:DatabaseResponse]
# [DEF:get_environments:Function]
# @PURPOSE: List all configured environments.
# @RETURN: List[EnvironmentResponse]
@router.get("", response_model=List[EnvironmentResponse])
async def get_environments(config_manager=Depends(get_config_manager)):
envs = config_manager.get_environments()
return [EnvironmentResponse(id=e.id, name=e.name, url=e.url) for e in envs]
# [/DEF:get_environments]
# [DEF:get_environment_databases:Function]
# @PURPOSE: Fetch the list of databases from a specific environment.
# @PARAM: id (str) - The environment ID.
# @RETURN: List[Dict] - List of databases.
@router.get("/{id}/databases")
async def get_environment_databases(id: str, config_manager=Depends(get_config_manager)):
envs = config_manager.get_environments()
env = next((e for e in envs if e.id == id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Initialize SupersetClient from environment config
# Note: We need to map Environment model to SupersetConfig
superset_config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={
"provider": "db", # Defaulting to db provider
"username": env.username,
"password": env.password,
"refresh": "false"
}
)
client = SupersetClient(superset_config)
return client.get_databases_summary()
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch databases: {str(e)}")
# [/DEF:get_environment_databases]
# [/DEF:backend.src.api.routes.environments]

View File

@@ -0,0 +1,110 @@
# [DEF:backend.src.api.routes.mappings:Module]
#
# @SEMANTICS: api, mappings, database, fuzzy-matching
# @PURPOSE: API endpoints for managing database mappings and getting suggestions.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.core.database
# @RELATION: DEPENDS_ON -> backend.src.services.mapping_service
#
# @INVARIANT: Mappings are persisted in the SQLite database.
# [SECTION: IMPORTS]
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List, Optional
from backend.src.dependencies import get_config_manager
from backend.src.core.database import get_db
from backend.src.models.mapping import DatabaseMapping
from pydantic import BaseModel
# [/SECTION]
router = APIRouter(prefix="/api/mappings", tags=["mappings"])
# [DEF:MappingCreate:DataClass]
class MappingCreate(BaseModel):
source_env_id: str
target_env_id: str
source_db_uuid: str
target_db_uuid: str
source_db_name: str
target_db_name: str
# [/DEF:MappingCreate]
# [DEF:MappingResponse:DataClass]
class MappingResponse(BaseModel):
id: str
source_env_id: str
target_env_id: str
source_db_uuid: str
target_db_uuid: str
source_db_name: str
target_db_name: str
class Config:
from_attributes = True
# [/DEF:MappingResponse]
# [DEF:SuggestRequest:DataClass]
class SuggestRequest(BaseModel):
source_env_id: str
target_env_id: str
# [/DEF:SuggestRequest]
# [DEF:get_mappings:Function]
# @PURPOSE: List all saved database mappings.
@router.get("", response_model=List[MappingResponse])
async def get_mappings(
source_env_id: Optional[str] = None,
target_env_id: Optional[str] = None,
db: Session = Depends(get_db)
):
query = db.query(DatabaseMapping)
if source_env_id:
query = query.filter(DatabaseMapping.source_env_id == source_env_id)
if target_env_id:
query = query.filter(DatabaseMapping.target_env_id == target_env_id)
return query.all()
# [/DEF:get_mappings]
# [DEF:create_mapping:Function]
# @PURPOSE: Create or update a database mapping.
@router.post("", response_model=MappingResponse)
async def create_mapping(mapping: MappingCreate, db: Session = Depends(get_db)):
# Check if mapping already exists
existing = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == mapping.source_env_id,
DatabaseMapping.target_env_id == mapping.target_env_id,
DatabaseMapping.source_db_uuid == mapping.source_db_uuid
).first()
if existing:
existing.target_db_uuid = mapping.target_db_uuid
existing.target_db_name = mapping.target_db_name
db.commit()
db.refresh(existing)
return existing
new_mapping = DatabaseMapping(**mapping.dict())
db.add(new_mapping)
db.commit()
db.refresh(new_mapping)
return new_mapping
# [/DEF:create_mapping]
# [DEF:suggest_mappings_api:Function]
# @PURPOSE: Get suggested mappings based on fuzzy matching.
@router.post("/suggest")
async def suggest_mappings_api(
request: SuggestRequest,
config_manager=Depends(get_config_manager)
):
from backend.src.services.mapping_service import MappingService
service = MappingService(config_manager)
try:
return await service.get_suggestions(request.source_env_id, request.target_env_id)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# [/DEF:suggest_mappings_api]
# [/DEF:backend.src.api.routes.mappings]

View File

@@ -0,0 +1,76 @@
# [DEF:backend.src.api.routes.migration:Module]
# @SEMANTICS: api, migration, dashboards
# @PURPOSE: API endpoints for migration operations.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
from fastapi import APIRouter, Depends, HTTPException
from typing import List, Dict
from backend.src.dependencies import get_config_manager, get_task_manager
from backend.src.models.dashboard import DashboardMetadata, DashboardSelection
from backend.src.core.superset_client import SupersetClient
from superset_tool.models import SupersetConfig
router = APIRouter(prefix="/api", tags=["migration"])
# [DEF:get_dashboards:Function]
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
# @PRE: Environment ID must be valid.
# @POST: Returns a list of dashboard metadata.
# @PARAM: env_id (str) - The ID of the environment to fetch from.
# @RETURN: List[DashboardMetadata]
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)):
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={'provider': 'db', 'username': env.username, 'password': env.password, 'refresh': False},
verify_ssl=True,
timeout=30
)
client = SupersetClient(config)
dashboards = client.get_dashboards_summary()
return dashboards
# [/DEF:get_dashboards]
# [DEF:execute_migration:Function]
# @PURPOSE: Execute the migration of selected dashboards.
# @PRE: Selection must be valid and environments must exist.
# @POST: Starts the migration task and returns the task ID.
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
# @RETURN: Dict - {"task_id": str, "message": str}
@router.post("/migration/execute")
async def execute_migration(selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager)):
# Validate environments exist
environments = config_manager.get_environments()
env_ids = {e.id for e in environments}
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
raise HTTPException(status_code=400, detail="Invalid source or target environment")
# Create migration task with debug logging
from ...core.logger import logger
# Include replace_db_config in the task parameters
task_params = selection.dict()
task_params['replace_db_config'] = selection.replace_db_config
logger.info(f"Creating migration task with params: {task_params}")
logger.info(f"Available environments: {env_ids}")
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
try:
task = await task_manager.create_task("superset-migration", task_params)
logger.info(f"Task created successfully: {task.id}")
return {"task_id": task.id, "message": "Migration initiated"}
except Exception as e:
logger.error(f"Task creation failed: {e}")
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
# [/DEF:execute_migration]
# [/DEF:backend.src.api.routes.migration]

View File

@@ -16,7 +16,7 @@ from ...core.config_models import AppConfig, Environment, GlobalSettings
from ...dependencies import get_config_manager from ...dependencies import get_config_manager
from ...core.config_manager import ConfigManager from ...core.config_manager import ConfigManager
from ...core.logger import logger from ...core.logger import logger
from superset_tool.client import SupersetClient from ...core.superset_client import SupersetClient
from superset_tool.models import SupersetConfig from superset_tool.models import SupersetConfig
import os import os
# [/SECTION] # [/SECTION]

View File

@@ -3,11 +3,11 @@
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks. # @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
# @LAYER: UI (API) # @LAYER: UI (API)
# @RELATION: Depends on the TaskManager. It is included by the main app. # @RELATION: Depends on the TaskManager. It is included by the main app.
from typing import List, Dict, Any from typing import List, Dict, Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel from pydantic import BaseModel
from ...core.task_manager import TaskManager, Task from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
from ...dependencies import get_task_manager from ...dependencies import get_task_manager
router = APIRouter() router = APIRouter()
@@ -16,6 +16,12 @@ class CreateTaskRequest(BaseModel):
plugin_id: str plugin_id: str
params: Dict[str, Any] params: Dict[str, Any]
class ResolveTaskRequest(BaseModel):
resolution_params: Dict[str, Any]
class ResumeTaskRequest(BaseModel):
passwords: Dict[str, str]
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED) @router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
async def create_task( async def create_task(
request: CreateTaskRequest, request: CreateTaskRequest,
@@ -35,12 +41,15 @@ async def create_task(
@router.get("/", response_model=List[Task]) @router.get("/", response_model=List[Task])
async def list_tasks( async def list_tasks(
limit: int = 10,
offset: int = 0,
status: Optional[TaskStatus] = None,
task_manager: TaskManager = Depends(get_task_manager) task_manager: TaskManager = Depends(get_task_manager)
): ):
""" """
Retrieve a list of all tasks. Retrieve a list of tasks with pagination and optional status filter.
""" """
return task_manager.get_all_tasks() return task_manager.get_tasks(limit=limit, offset=offset, status=status)
@router.get("/{task_id}", response_model=Task) @router.get("/{task_id}", response_model=Task)
async def get_task( async def get_task(
@@ -54,4 +63,58 @@ async def get_task(
if not task: if not task:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
return task return task
@router.get("/{task_id}/logs", response_model=List[LogEntry])
async def get_task_logs(
task_id: str,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Retrieve logs for a specific task.
"""
task = task_manager.get_task(task_id)
if not task:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
return task_manager.get_task_logs(task_id)
@router.post("/{task_id}/resolve", response_model=Task)
async def resolve_task(
task_id: str,
request: ResolveTaskRequest,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Resolve a task that is awaiting mapping.
"""
try:
await task_manager.resolve_task(task_id, request.resolution_params)
return task_manager.get_task(task_id)
except ValueError as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
@router.post("/{task_id}/resume", response_model=Task)
async def resume_task(
task_id: str,
request: ResumeTaskRequest,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Resume a task that is awaiting input (e.g., passwords).
"""
try:
task_manager.resume_task_with_password(task_id, request.passwords)
return task_manager.get_task(task_id)
except ValueError as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
@router.delete("/", status_code=status.HTTP_204_NO_CONTENT)
async def clear_tasks(
status: Optional[TaskStatus] = None,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Clear tasks matching the status filter. If no filter, clears all non-running tasks.
"""
task_manager.clear_tasks(status)
return
# [/DEF] # [/DEF]

View File

@@ -20,7 +20,11 @@ import os
from .dependencies import get_task_manager from .dependencies import get_task_manager
from .core.logger import logger from .core.logger import logger
from .api.routes import plugins, tasks, settings from .api.routes import plugins, tasks, settings, environments, mappings, migration
from .core.database import init_db
# Initialize database
init_db()
# [DEF:App:Global] # [DEF:App:Global]
# @SEMANTICS: app, fastapi, instance # @SEMANTICS: app, fastapi, instance
@@ -45,6 +49,9 @@ app.add_middleware(
app.include_router(plugins.router, prefix="/api/plugins", tags=["Plugins"]) app.include_router(plugins.router, prefix="/api/plugins", tags=["Plugins"])
app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"]) app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
app.include_router(settings.router, prefix="/api/settings", tags=["Settings"]) app.include_router(settings.router, prefix="/api/settings", tags=["Settings"])
app.include_router(environments.router)
app.include_router(mappings.router)
app.include_router(migration.router)
# [DEF:WebSocketEndpoint:Endpoint] # [DEF:WebSocketEndpoint:Endpoint]
# @SEMANTICS: websocket, logs, streaming, real-time # @SEMANTICS: websocket, logs, streaming, real-time
@@ -56,16 +63,30 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
task_manager = get_task_manager() task_manager = get_task_manager()
queue = await task_manager.subscribe_logs(task_id) queue = await task_manager.subscribe_logs(task_id)
try: try:
# Send initial logs if any # Stream new logs
logger.info(f"Starting log stream for task {task_id}")
# Send initial logs first to build context
initial_logs = task_manager.get_task_logs(task_id) initial_logs = task_manager.get_task_logs(task_id)
for log_entry in initial_logs: for log_entry in initial_logs:
# Convert datetime to string for JSON serialization
log_dict = log_entry.dict() log_dict = log_entry.dict()
log_dict['timestamp'] = log_dict['timestamp'].isoformat() log_dict['timestamp'] = log_dict['timestamp'].isoformat()
await websocket.send_json(log_dict) await websocket.send_json(log_dict)
# Stream new logs # Force a check for AWAITING_INPUT status immediately upon connection
logger.info(f"Starting log stream for task {task_id}") # This ensures that if the task is already waiting when the user connects, they get the prompt.
task = task_manager.get_task(task_id)
if task and task.status == "AWAITING_INPUT" and task.input_request:
# Construct a synthetic log entry to trigger the frontend handler
# This is a bit of a hack but avoids changing the websocket protocol significantly
synthetic_log = {
"timestamp": task.logs[-1].timestamp.isoformat() if task.logs else "2024-01-01T00:00:00",
"level": "INFO",
"message": "Task paused for user input (Connection Re-established)",
"context": {"input_request": task.input_request}
}
await websocket.send_json(synthetic_log)
while True: while True:
log_entry = await queue.get() log_entry = await queue.get()
log_dict = log_entry.dict() log_dict = log_entry.dict()
@@ -77,7 +98,9 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
if "Task completed successfully" in log_entry.message or "Task failed" in log_entry.message: if "Task completed successfully" in log_entry.message or "Task failed" in log_entry.message:
# Wait a bit to ensure client receives the last message # Wait a bit to ensure client receives the last message
await asyncio.sleep(2) await asyncio.sleep(2)
break # DO NOT BREAK here - allow client to keep connection open if they want to review logs
# or until they disconnect. Breaking closes the socket immediately.
# break
except WebSocketDisconnect: except WebSocketDisconnect:
logger.info(f"WebSocket connection disconnected for task {task_id}") logger.info(f"WebSocket connection disconnected for task {task_id}")

View File

@@ -16,7 +16,7 @@ import os
from pathlib import Path from pathlib import Path
from typing import Optional, List from typing import Optional, List
from .config_models import AppConfig, Environment, GlobalSettings from .config_models import AppConfig, Environment, GlobalSettings
from .logger import logger from .logger import logger, configure_logger
# [/SECTION] # [/SECTION]
# [DEF:ConfigManager:Class] # [DEF:ConfigManager:Class]
@@ -39,6 +39,9 @@ class ConfigManager:
self.config_path = Path(config_path) self.config_path = Path(config_path)
self.config: AppConfig = self._load_config() self.config: AppConfig = self._load_config()
# Configure logger with loaded settings
configure_logger(self.config.settings.logging)
# 3. Runtime check of @POST # 3. Runtime check of @POST
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig" assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
@@ -69,6 +72,8 @@ class ConfigManager:
return config return config
except Exception as e: except Exception as e:
logger.error(f"[_load_config][Coherence:Failed] Error loading config: {e}") logger.error(f"[_load_config][Coherence:Failed] Error loading config: {e}")
# Fallback but try to preserve existing settings if possible?
# For now, return default to be safe, but log the error prominently.
return AppConfig( return AppConfig(
environments=[], environments=[],
settings=GlobalSettings(backup_path="backups") settings=GlobalSettings(backup_path="backups")
@@ -121,6 +126,9 @@ class ConfigManager:
self.config.settings = settings self.config.settings = settings
self.save() self.save()
# Reconfigure logger with new settings
configure_logger(settings.logging)
logger.info(f"[update_global_settings][Exit] Settings updated") logger.info(f"[update_global_settings][Exit] Settings updated")
# [/DEF:update_global_settings] # [/DEF:update_global_settings]

View File

@@ -19,11 +19,27 @@ class Environment(BaseModel):
is_default: bool = False is_default: bool = False
# [/DEF:Environment] # [/DEF:Environment]
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
class LoggingConfig(BaseModel):
level: str = "INFO"
file_path: Optional[str] = "logs/app.log"
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
# [/DEF:LoggingConfig]
# [DEF:GlobalSettings:DataClass] # [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings. # @PURPOSE: Represents global application settings.
class GlobalSettings(BaseModel): class GlobalSettings(BaseModel):
backup_path: str backup_path: str
default_environment_id: Optional[str] = None default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
# Task retention settings
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# [/DEF:GlobalSettings] # [/DEF:GlobalSettings]
# [DEF:AppConfig:DataClass] # [DEF:AppConfig:DataClass]

View File

@@ -0,0 +1,48 @@
# [DEF:backend.src.core.database:Module]
#
# @SEMANTICS: database, sqlite, sqlalchemy, session, persistence
# @PURPOSE: Configures the SQLite database connection and session management.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> sqlalchemy
# @RELATION: USES -> backend.src.models.mapping
#
# @INVARIANT: A single engine instance is used for the entire application.
# [SECTION: IMPORTS]
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from backend.src.models.mapping import Base
import os
# [/SECTION]
# [DEF:DATABASE_URL:Constant]
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mappings.db")
# [/DEF:DATABASE_URL]
# [DEF:engine:Variable]
engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False})
# [/DEF:engine]
# [DEF:SessionLocal:Class]
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# [/DEF:SessionLocal]
# [DEF:init_db:Function]
# @PURPOSE: Initializes the database by creating all tables.
def init_db():
Base.metadata.create_all(bind=engine)
# [/DEF:init_db]
# [DEF:get_db:Function]
# @PURPOSE: Dependency for getting a database session.
# @POST: Session is closed after use.
# @RETURN: Generator[Session, None, None]
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
# [/DEF:get_db]
# [/DEF:backend.src.core.database]

View File

@@ -4,12 +4,32 @@
# @LAYER: Core # @LAYER: Core
# @RELATION: Used by the main application and other modules to log events. The WebSocketLogHandler is used by the WebSocket endpoint in app.py. # @RELATION: Used by the main application and other modules to log events. The WebSocketLogHandler is used by the WebSocket endpoint in app.py.
import logging import logging
import threading
from datetime import datetime from datetime import datetime
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from collections import deque from collections import deque
from contextlib import contextmanager
from logging.handlers import RotatingFileHandler
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
# Thread-local storage for belief state
_belief_state = threading.local()
# Global flag for belief state logging
_enable_belief_state = True
# [DEF:BeliefFormatter:Class]
# @PURPOSE: Custom logging formatter that adds belief state prefixes to log messages.
class BeliefFormatter(logging.Formatter):
def format(self, record):
msg = super().format(record)
anchor_id = getattr(_belief_state, 'anchor_id', None)
if anchor_id:
msg = f"[{anchor_id}][Action] {msg}"
return msg
# [/DEF:BeliefFormatter]
# Re-using LogEntry from task_manager for consistency # Re-using LogEntry from task_manager for consistency
# [DEF:LogEntry:Class] # [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic # @SEMANTICS: log, entry, record, pydantic
@@ -22,6 +42,81 @@ class LogEntry(BaseModel):
# [/DEF] # [/DEF]
# [DEF:BeliefScope:Function]
# @PURPOSE: Context manager for structured Belief State logging.
@contextmanager
def belief_scope(anchor_id: str, message: str = ""):
# Log Entry if enabled
if _enable_belief_state:
entry_msg = f"[{anchor_id}][Entry]"
if message:
entry_msg += f" {message}"
logger.info(entry_msg)
# Set thread-local anchor_id
old_anchor = getattr(_belief_state, 'anchor_id', None)
_belief_state.anchor_id = anchor_id
try:
yield
# Log Coherence OK and Exit
logger.info(f"[{anchor_id}][Coherence:OK]")
if _enable_belief_state:
logger.info(f"[{anchor_id}][Exit]")
except Exception as e:
# Log Coherence Failed
logger.info(f"[{anchor_id}][Coherence:Failed] {str(e)}")
raise
finally:
# Restore old anchor
_belief_state.anchor_id = old_anchor
# [/DEF:BeliefScope]
# [DEF:ConfigureLogger:Function]
# @PURPOSE: Configures the logger with the provided logging settings.
# @PRE: config is a valid LoggingConfig instance.
# @POST: Logger level, handlers, and belief state flag are updated.
# @PARAM: config (LoggingConfig) - The logging configuration.
def configure_logger(config):
global _enable_belief_state
_enable_belief_state = config.enable_belief_state
# Set logger level
level = getattr(logging, config.level.upper(), logging.INFO)
logger.setLevel(level)
# Remove existing file handlers
handlers_to_remove = [h for h in logger.handlers if isinstance(h, RotatingFileHandler)]
for h in handlers_to_remove:
logger.removeHandler(h)
h.close()
# Add file handler if file_path is set
if config.file_path:
import os
from pathlib import Path
log_file = Path(config.file_path)
log_file.parent.mkdir(parents=True, exist_ok=True)
file_handler = RotatingFileHandler(
config.file_path,
maxBytes=config.max_bytes,
backupCount=config.backup_count
)
file_handler.setFormatter(BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
))
logger.addHandler(file_handler)
# Update existing handlers' formatters to BeliefFormatter
for handler in logger.handlers:
if not isinstance(handler, RotatingFileHandler):
handler.setFormatter(BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
))
# [/DEF:ConfigureLogger]
# [DEF:WebSocketLogHandler:Class] # [DEF:WebSocketLogHandler:Class]
# @SEMANTICS: logging, handler, websocket, buffer # @SEMANTICS: logging, handler, websocket, buffer
# @PURPOSE: A custom logging handler that captures log records into a buffer. It is designed to be extended for real-time log streaming over WebSockets. # @PURPOSE: A custom logging handler that captures log records into a buffer. It is designed to be extended for real-time log streaming over WebSockets.
@@ -72,7 +167,7 @@ logger = logging.getLogger("superset_tools_app")
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
# Create a formatter # Create a formatter
formatter = logging.Formatter( formatter = BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s' '[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
) )

View File

@@ -0,0 +1,97 @@
# [DEF:backend.src.core.migration_engine:Module]
#
# @SEMANTICS: migration, engine, zip, yaml, transformation
# @PURPOSE: Handles the interception and transformation of Superset asset ZIP archives.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> PyYAML
#
# @INVARIANT: ZIP structure must be preserved after transformation.
# [SECTION: IMPORTS]
import zipfile
import yaml
import os
import shutil
import tempfile
from pathlib import Path
from typing import Dict
from .logger import logger, belief_scope
import yaml
# [/SECTION]
# [DEF:MigrationEngine:Class]
# @PURPOSE: Engine for transforming Superset export ZIPs.
class MigrationEngine:
# [DEF:MigrationEngine.transform_zip:Function]
# @PURPOSE: Extracts ZIP, replaces database UUIDs in YAMLs, and re-packages.
# @PARAM: zip_path (str) - Path to the source ZIP file.
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
# @RETURN: bool - True if successful.
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
"""
Transform a Superset export ZIP by replacing database UUIDs.
"""
with belief_scope("MigrationEngine.transform_zip"):
with tempfile.TemporaryDirectory() as temp_dir_str:
temp_dir = Path(temp_dir_str)
try:
# 1. Extract
logger.info(f"[MigrationEngine.transform_zip][Action] Extracting ZIP: {zip_path}")
with zipfile.ZipFile(zip_path, 'r') as zf:
zf.extractall(temp_dir)
# 2. Transform YAMLs
# Datasets are usually in datasets/*.yaml
dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
dataset_files = list(set(dataset_files))
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dataset_files)} dataset files.")
for ds_file in dataset_files:
logger.info(f"[MigrationEngine.transform_zip][Action] Transforming dataset: {ds_file}")
self._transform_yaml(ds_file, db_mapping)
# 3. Re-package
logger.info(f"[MigrationEngine.transform_zip][Action] Re-packaging ZIP to: {output_path} (strip_databases={strip_databases})")
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
for root, dirs, files in os.walk(temp_dir):
rel_root = Path(root).relative_to(temp_dir)
if strip_databases and "databases" in rel_root.parts:
logger.info(f"[MigrationEngine.transform_zip][Action] Skipping file in databases directory: {rel_root}")
continue
for file in files:
file_path = Path(root) / file
arcname = file_path.relative_to(temp_dir)
zf.write(file_path, arcname)
return True
except Exception as e:
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
return False
# [DEF:MigrationEngine._transform_yaml:Function]
# @PURPOSE: Replaces database_uuid in a single YAML file.
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
with open(file_path, 'r') as f:
data = yaml.safe_load(f)
if not data:
return
# Superset dataset YAML structure:
# database_uuid: ...
source_uuid = data.get('database_uuid')
if source_uuid in db_mapping:
data['database_uuid'] = db_mapping[source_uuid]
with open(file_path, 'w') as f:
yaml.dump(data, f)
# [/DEF:MigrationEngine._transform_yaml]
# [/DEF:MigrationEngine]
# [/DEF:backend.src.core.migration_engine]

View File

@@ -47,12 +47,17 @@ class PluginLoader:
Loads a single Python module and extracts PluginBase subclasses. Loads a single Python module and extracts PluginBase subclasses.
""" """
# Try to determine the correct package prefix based on how the app is running # Try to determine the correct package prefix based on how the app is running
if "backend.src" in __name__: # For standalone execution, we need to handle the import differently
if __name__ == "__main__" or "test" in __name__:
# When running as standalone or in tests, use relative import
package_name = f"plugins.{module_name}"
elif "backend.src" in __name__:
package_prefix = "backend.src.plugins" package_prefix = "backend.src.plugins"
package_name = f"{package_prefix}.{module_name}"
else: else:
package_prefix = "src.plugins" package_prefix = "src.plugins"
package_name = f"{package_prefix}.{module_name}" package_name = f"{package_prefix}.{module_name}"
# print(f"DEBUG: Loading plugin {module_name} as {package_name}") # print(f"DEBUG: Loading plugin {module_name} as {package_name}")
spec = importlib.util.spec_from_file_location(package_name, file_path) spec = importlib.util.spec_from_file_location(package_name, file_path)
if spec is None or spec.loader is None: if spec is None or spec.loader is None:
@@ -106,9 +111,11 @@ class PluginLoader:
# validate(instance={}, schema=schema) # validate(instance={}, schema=schema)
self._plugins[plugin_id] = plugin_instance self._plugins[plugin_id] = plugin_instance
self._plugin_configs[plugin_id] = plugin_config self._plugin_configs[plugin_id] = plugin_config
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging from ..core.logger import logger
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
except Exception as e: except Exception as e:
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]: def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:

View File

@@ -0,0 +1,83 @@
# [DEF:backend.src.core.superset_client:Module]
#
# @SEMANTICS: superset, api, client, database, metadata
# @PURPOSE: Extends the base SupersetClient with database-specific metadata fetching.
# @LAYER: Core
# @RELATION: INHERITS_FROM -> superset_tool.client.SupersetClient
#
# @INVARIANT: All database metadata requests must include UUID and name.
# [SECTION: IMPORTS]
from typing import List, Dict, Optional, Tuple
from superset_tool.client import SupersetClient as BaseSupersetClient
from superset_tool.models import SupersetConfig
# [/SECTION]
# [DEF:SupersetClient:Class]
# @PURPOSE: Extended SupersetClient for migration-specific operations.
class SupersetClient(BaseSupersetClient):
# [DEF:SupersetClient.get_databases_summary:Function]
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
# @POST: Returns a list of database dictionaries with 'engine' field.
# @RETURN: List[Dict] - Summary of databases.
def get_databases_summary(self) -> List[Dict]:
"""
Fetch a summary of databases including uuid, name, and engine.
"""
query = {
"columns": ["uuid", "database_name", "backend"]
}
_, databases = self.get_databases(query=query)
# Map 'backend' to 'engine' for consistency with contracts
for db in databases:
db['engine'] = db.pop('backend', None)
return databases
# [/DEF:SupersetClient.get_databases_summary]
# [DEF:SupersetClient.get_database_by_uuid:Function]
# @PURPOSE: Find a database by its UUID.
# @PARAM: db_uuid (str) - The UUID of the database.
# @RETURN: Optional[Dict] - Database info if found, else None.
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
"""
Find a database by its UUID.
"""
query = {
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
}
_, databases = self.get_databases(query=query)
return databases[0] if databases else None
# [/DEF:SupersetClient.get_database_by_uuid]
# [DEF:SupersetClient.get_dashboards_summary:Function]
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
# @POST: Returns a list of dashboard dictionaries.
# @RETURN: List[Dict]
def get_dashboards_summary(self) -> List[Dict]:
"""
Fetches dashboard metadata optimized for the grid.
Returns a list of dictionaries mapped to DashboardMetadata fields.
"""
query = {
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
}
_, dashboards = self.get_dashboards(query=query)
# Map fields to DashboardMetadata schema
result = []
for dash in dashboards:
result.append({
"id": dash.get("id"),
"title": dash.get("dashboard_title"),
"last_modified": dash.get("changed_on_utc"),
"status": "published" if dash.get("published") else "draft"
})
return result
# [/DEF:SupersetClient.get_dashboards_summary]
# [/DEF:SupersetClient]
# [/DEF:backend.src.core.superset_client]

View File

@@ -1,167 +0,0 @@
# [DEF:TaskManagerModule:Module]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
# @LAYER: Core
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
import asyncio
import uuid
from datetime import datetime
from enum import Enum
from typing import Dict, Any, List, Optional
from concurrent.futures import ThreadPoolExecutor
from pydantic import BaseModel, Field
# Assuming PluginBase and PluginConfig are defined in plugin_base.py
# from .plugin_base import PluginBase, PluginConfig # Not needed here, TaskManager interacts with the PluginLoader
# [DEF:TaskStatus:Enum]
# @SEMANTICS: task, status, state, enum
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
class TaskStatus(str, Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
# [/DEF]
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
class LogEntry(BaseModel):
timestamp: datetime = Field(default_factory=datetime.utcnow)
level: str
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF]
# [DEF:Task:Class]
# @SEMANTICS: task, job, execution, state, pydantic
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
class Task(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
plugin_id: str
status: TaskStatus = TaskStatus.PENDING
started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None
user_id: Optional[str] = None
logs: List[LogEntry] = Field(default_factory=list)
params: Dict[str, Any] = Field(default_factory=dict)
# [/DEF]
# [DEF:TaskManager:Class]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
class TaskManager:
"""
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
"""
def __init__(self, plugin_loader):
self.plugin_loader = plugin_loader
self.tasks: Dict[str, Task] = {}
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
self.loop = asyncio.get_event_loop()
# [/DEF]
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
"""
Creates and queues a new task for execution.
"""
if not self.plugin_loader.has_plugin(plugin_id):
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id)
# Validate params against plugin schema (this will be done at a higher level, e.g., API route)
# For now, a basic check
if not isinstance(params, dict):
raise ValueError("Task parameters must be a dictionary.")
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
self.tasks[task.id] = task
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task
async def _run_task(self, task_id: str):
"""
Internal method to execute a task.
"""
task = self.tasks[task_id]
plugin = self.plugin_loader.get_plugin(task.plugin_id)
task.status = TaskStatus.RUNNING
task.started_at = datetime.utcnow()
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
try:
# Execute plugin in a separate thread to avoid blocking the event loop
# if the plugin's execute method is synchronous and potentially CPU-bound.
# If the plugin's execute method is already async, this can be simplified.
await self.loop.run_in_executor(
self.executor,
lambda: asyncio.run(plugin.execute(task.params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(task.params)
)
task.status = TaskStatus.SUCCESS
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
except Exception as e:
task.status = TaskStatus.FAILED
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
finally:
task.finished_at = datetime.utcnow()
# In a real system, you might notify clients via WebSocket here
def get_task(self, task_id: str) -> Optional[Task]:
"""
Retrieves a task by its ID.
"""
return self.tasks.get(task_id)
def get_all_tasks(self) -> List[Task]:
"""
Retrieves all registered tasks.
"""
return list(self.tasks.values())
def get_task_logs(self, task_id: str) -> List[LogEntry]:
"""
Retrieves logs for a specific task.
"""
task = self.tasks.get(task_id)
return task.logs if task else []
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
"""
Adds a log entry to a task and notifies subscribers.
"""
task = self.tasks.get(task_id)
if not task:
return
log_entry = LogEntry(level=level, message=message, context=context)
task.logs.append(log_entry)
# Notify subscribers
if task_id in self.subscribers:
for queue in self.subscribers[task_id]:
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
"""
Subscribes to real-time logs for a task.
"""
queue = asyncio.Queue()
if task_id not in self.subscribers:
self.subscribers[task_id] = []
self.subscribers[task_id].append(queue)
return queue
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
"""
Unsubscribes from real-time logs for a task.
"""
if task_id in self.subscribers:
self.subscribers[task_id].remove(queue)
if not self.subscribers[task_id]:
del self.subscribers[task_id]

View File

@@ -0,0 +1,12 @@
# [DEF:TaskManagerPackage:Module]
# @SEMANTICS: task, manager, package, exports
# @PURPOSE: Exports the public API of the task manager package.
# @LAYER: Core
# @RELATION: Aggregates models and manager.
from .models import Task, TaskStatus, LogEntry
from .manager import TaskManager
__all__ = ["TaskManager", "Task", "TaskStatus", "LogEntry"]
# [/DEF:TaskManagerPackage:Module]

View File

@@ -0,0 +1,379 @@
# [DEF:TaskManagerModule:Module]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
# @LAYER: Core
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
# @INVARIANT: Task IDs are unique.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
import asyncio
from datetime import datetime
from typing import Dict, Any, List, Optional
from concurrent.futures import ThreadPoolExecutor
from .models import Task, TaskStatus, LogEntry
from .persistence import TaskPersistenceService
from ..logger import logger, belief_scope
# [/SECTION]
# [DEF:TaskManager:Class]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
class TaskManager:
"""
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
"""
# [DEF:TaskManager.__init__:Function]
# @PURPOSE: Initialize the TaskManager with dependencies.
# @PRE: plugin_loader is initialized.
# @POST: TaskManager is ready to accept tasks.
# @PARAM: plugin_loader - The plugin loader instance.
def __init__(self, plugin_loader):
with belief_scope("TaskManager.__init__"):
self.plugin_loader = plugin_loader
self.tasks: Dict[str, Task] = {}
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
self.persistence_service = TaskPersistenceService()
try:
self.loop = asyncio.get_running_loop()
except RuntimeError:
self.loop = asyncio.get_event_loop()
self.task_futures: Dict[str, asyncio.Future] = {}
# Load persisted tasks on startup
self.load_persisted_tasks()
# [/DEF:TaskManager.__init__:Function]
# [DEF:TaskManager.create_task:Function]
# @PURPOSE: Creates and queues a new task for execution.
# @PRE: Plugin with plugin_id exists. Params are valid.
# @POST: Task is created, added to registry, and scheduled for execution.
# @PARAM: plugin_id (str) - The ID of the plugin to run.
# @PARAM: params (Dict[str, Any]) - Parameters for the plugin.
# @PARAM: user_id (Optional[str]) - ID of the user requesting the task.
# @RETURN: Task - The created task instance.
# @THROWS: ValueError if plugin not found or params invalid.
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
with belief_scope("TaskManager.create_task", f"plugin_id={plugin_id}"):
if not self.plugin_loader.has_plugin(plugin_id):
logger.error(f"Plugin with ID '{plugin_id}' not found.")
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id)
if not isinstance(params, dict):
logger.error("Task parameters must be a dictionary.")
raise ValueError("Task parameters must be a dictionary.")
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
self.tasks[task.id] = task
logger.info(f"Task {task.id} created and scheduled for execution")
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task
# [/DEF:TaskManager.create_task:Function]
# [DEF:TaskManager._run_task:Function]
# @PURPOSE: Internal method to execute a task.
# @PRE: Task exists in registry.
# @POST: Task is executed, status updated to SUCCESS or FAILED.
# @PARAM: task_id (str) - The ID of the task to run.
async def _run_task(self, task_id: str):
with belief_scope("TaskManager._run_task", f"task_id={task_id}"):
task = self.tasks[task_id]
plugin = self.plugin_loader.get_plugin(task.plugin_id)
logger.info(f"Starting execution of task {task_id} for plugin '{plugin.name}'")
task.status = TaskStatus.RUNNING
task.started_at = datetime.utcnow()
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
try:
# Execute plugin
params = {**task.params, "_task_id": task_id}
if asyncio.iscoroutinefunction(plugin.execute):
await plugin.execute(params)
else:
await self.loop.run_in_executor(
self.executor,
plugin.execute,
params
)
logger.info(f"Task {task_id} completed successfully")
task.status = TaskStatus.SUCCESS
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
except Exception as e:
logger.error(f"Task {task_id} failed: {e}")
task.status = TaskStatus.FAILED
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
finally:
task.finished_at = datetime.utcnow()
logger.info(f"Task {task_id} execution finished with status: {task.status}")
# [/DEF:TaskManager._run_task:Function]
# [DEF:TaskManager.resolve_task:Function]
# @PURPOSE: Resumes a task that is awaiting mapping.
# @PRE: Task exists and is in AWAITING_MAPPING state.
# @POST: Task status updated to RUNNING, params updated, execution resumed.
# @PARAM: task_id (str) - The ID of the task.
# @PARAM: resolution_params (Dict[str, Any]) - Params to resolve the wait.
# @THROWS: ValueError if task not found or not awaiting mapping.
async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]):
with belief_scope("TaskManager.resolve_task", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task or task.status != TaskStatus.AWAITING_MAPPING:
raise ValueError("Task is not awaiting mapping.")
# Update task params with resolution
task.params.update(resolution_params)
task.status = TaskStatus.RUNNING
self._add_log(task_id, "INFO", "Task resumed after mapping resolution.")
# Signal the future to continue
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# [/DEF:TaskManager.resolve_task:Function]
# [DEF:TaskManager.wait_for_resolution:Function]
# @PURPOSE: Pauses execution and waits for a resolution signal.
# @PRE: Task exists.
# @POST: Execution pauses until future is set.
# @PARAM: task_id (str) - The ID of the task.
async def wait_for_resolution(self, task_id: str):
with belief_scope("TaskManager.wait_for_resolution", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
task.status = TaskStatus.AWAITING_MAPPING
self.task_futures[task_id] = self.loop.create_future()
try:
await self.task_futures[task_id]
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:TaskManager.wait_for_resolution:Function]
# [DEF:TaskManager.wait_for_input:Function]
# @PURPOSE: Pauses execution and waits for user input.
# @PRE: Task exists.
# @POST: Execution pauses until future is set via resume_task_with_password.
# @PARAM: task_id (str) - The ID of the task.
async def wait_for_input(self, task_id: str):
with belief_scope("TaskManager.wait_for_input", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
# Status is already set to AWAITING_INPUT by await_input()
self.task_futures[task_id] = self.loop.create_future()
try:
await self.task_futures[task_id]
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:TaskManager.wait_for_input:Function]
# [DEF:TaskManager.get_task:Function]
# @PURPOSE: Retrieves a task by its ID.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: Optional[Task] - The task or None.
def get_task(self, task_id: str) -> Optional[Task]:
return self.tasks.get(task_id)
# [/DEF:TaskManager.get_task:Function]
# [DEF:TaskManager.get_all_tasks:Function]
# @PURPOSE: Retrieves all registered tasks.
# @RETURN: List[Task] - All tasks.
def get_all_tasks(self) -> List[Task]:
return list(self.tasks.values())
# [/DEF:TaskManager.get_all_tasks:Function]
# [DEF:TaskManager.get_tasks:Function]
# @PURPOSE: Retrieves tasks with pagination and optional status filter.
# @PRE: limit and offset are non-negative integers.
# @POST: Returns a list of tasks sorted by start_time descending.
# @PARAM: limit (int) - Maximum number of tasks to return.
# @PARAM: offset (int) - Number of tasks to skip.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @RETURN: List[Task] - List of tasks matching criteria.
def get_tasks(self, limit: int = 10, offset: int = 0, status: Optional[TaskStatus] = None) -> List[Task]:
tasks = list(self.tasks.values())
if status:
tasks = [t for t in tasks if t.status == status]
# Sort by start_time descending (most recent first)
tasks.sort(key=lambda t: t.started_at or datetime.min, reverse=True)
return tasks[offset:offset + limit]
# [/DEF:TaskManager.get_tasks:Function]
# [DEF:TaskManager.get_task_logs:Function]
# @PURPOSE: Retrieves logs for a specific task.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: List[LogEntry] - List of log entries.
def get_task_logs(self, task_id: str) -> List[LogEntry]:
task = self.tasks.get(task_id)
return task.logs if task else []
# [/DEF:TaskManager.get_task_logs:Function]
# [DEF:TaskManager._add_log:Function]
# @PURPOSE: Adds a log entry to a task and notifies subscribers.
# @PRE: Task exists.
# @POST: Log added to task and pushed to queues.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: level (str) - Log level.
# @PARAM: message (str) - Log message.
# @PARAM: context (Optional[Dict]) - Log context.
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
task = self.tasks.get(task_id)
if not task:
return
log_entry = LogEntry(level=level, message=message, context=context)
task.logs.append(log_entry)
# Notify subscribers
if task_id in self.subscribers:
for queue in self.subscribers[task_id]:
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
# [/DEF:TaskManager._add_log:Function]
# [DEF:TaskManager.subscribe_logs:Function]
# @PURPOSE: Subscribes to real-time logs for a task.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: asyncio.Queue - Queue for log entries.
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
queue = asyncio.Queue()
if task_id not in self.subscribers:
self.subscribers[task_id] = []
self.subscribers[task_id].append(queue)
return queue
# [/DEF:TaskManager.subscribe_logs:Function]
# [DEF:TaskManager.unsubscribe_logs:Function]
# @PURPOSE: Unsubscribes from real-time logs for a task.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: queue (asyncio.Queue) - Queue to remove.
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
if task_id in self.subscribers:
if queue in self.subscribers[task_id]:
self.subscribers[task_id].remove(queue)
if not self.subscribers[task_id]:
del self.subscribers[task_id]
# [/DEF:TaskManager.unsubscribe_logs:Function]
# [DEF:TaskManager.persist_awaiting_input_tasks:Function]
# @PURPOSE: Persist tasks in AWAITING_INPUT state using persistence service.
def persist_awaiting_input_tasks(self) -> None:
self.persistence_service.persist_tasks(list(self.tasks.values()))
# [/DEF:TaskManager.persist_awaiting_input_tasks:Function]
# [DEF:TaskManager.load_persisted_tasks:Function]
# @PURPOSE: Load persisted tasks using persistence service.
def load_persisted_tasks(self) -> None:
loaded_tasks = self.persistence_service.load_tasks()
for task in loaded_tasks:
if task.id not in self.tasks:
self.tasks[task.id] = task
# [/DEF:TaskManager.load_persisted_tasks:Function]
# [DEF:TaskManager.await_input:Function]
# @PURPOSE: Transition a task to AWAITING_INPUT state with input request.
# @PRE: Task exists and is in RUNNING state.
# @POST: Task status changed to AWAITING_INPUT, input_request set, persisted.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: input_request (Dict) - Details about required input.
# @THROWS: ValueError if task not found or not RUNNING.
def await_input(self, task_id: str, input_request: Dict[str, Any]) -> None:
with belief_scope("TaskManager.await_input", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task:
raise ValueError(f"Task {task_id} not found")
if task.status != TaskStatus.RUNNING:
raise ValueError(f"Task {task_id} is not RUNNING (current: {task.status})")
task.status = TaskStatus.AWAITING_INPUT
task.input_required = True
task.input_request = input_request
self._add_log(task_id, "INFO", "Task paused for user input", {"input_request": input_request})
self.persist_awaiting_input_tasks()
# [/DEF:TaskManager.await_input:Function]
# [DEF:TaskManager.resume_task_with_password:Function]
# @PURPOSE: Resume a task that is awaiting input with provided passwords.
# @PRE: Task exists and is in AWAITING_INPUT state.
# @POST: Task status changed to RUNNING, passwords injected, task resumed.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: passwords (Dict[str, str]) - Mapping of database name to password.
# @THROWS: ValueError if task not found, not awaiting input, or passwords invalid.
def resume_task_with_password(self, task_id: str, passwords: Dict[str, str]) -> None:
with belief_scope("TaskManager.resume_task_with_password", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task:
raise ValueError(f"Task {task_id} not found")
if task.status != TaskStatus.AWAITING_INPUT:
raise ValueError(f"Task {task_id} is not AWAITING_INPUT (current: {task.status})")
if not isinstance(passwords, dict) or not passwords:
raise ValueError("Passwords must be a non-empty dictionary")
task.params["passwords"] = passwords
task.input_required = False
task.input_request = None
task.status = TaskStatus.RUNNING
self._add_log(task_id, "INFO", "Task resumed with passwords", {"databases": list(passwords.keys())})
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# Remove from persistence as it's no longer awaiting input
self.persistence_service.delete_tasks([task_id])
# [/DEF:TaskManager.resume_task_with_password:Function]
# [DEF:TaskManager.clear_tasks:Function]
# @PURPOSE: Clears tasks based on status filter.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @RETURN: int - Number of tasks cleared.
def clear_tasks(self, status: Optional[TaskStatus] = None) -> int:
with belief_scope("TaskManager.clear_tasks"):
tasks_to_remove = []
for task_id, task in list(self.tasks.items()):
# If status is provided, match it.
# If status is None, match everything EXCEPT RUNNING (unless they are awaiting input/mapping which are technically running but paused?)
# Actually, AWAITING_INPUT and AWAITING_MAPPING are distinct statuses in TaskStatus enum.
# RUNNING is active execution.
should_remove = False
if status:
if task.status == status:
should_remove = True
else:
# Clear all non-active tasks
if task.status not in [TaskStatus.RUNNING]:
should_remove = True
if should_remove:
tasks_to_remove.append(task_id)
for tid in tasks_to_remove:
# Cancel future if exists (e.g. for AWAITING_INPUT/MAPPING)
if tid in self.task_futures:
self.task_futures[tid].cancel()
del self.task_futures[tid]
del self.tasks[tid]
# Remove from persistence
self.persistence_service.delete_tasks(tasks_to_remove)
logger.info(f"Cleared {len(tasks_to_remove)} tasks.")
return len(tasks_to_remove)
# [/DEF:TaskManager.clear_tasks:Function]
# [/DEF:TaskManager:Class]
# [/DEF:TaskManagerModule:Module]

View File

@@ -0,0 +1,67 @@
# [DEF:TaskManagerModels:Module]
# @SEMANTICS: task, models, pydantic, enum, state
# @PURPOSE: Defines the data models and enumerations used by the Task Manager.
# @LAYER: Core
# @RELATION: Used by TaskManager and API routes.
# @INVARIANT: Task IDs are immutable once created.
# @CONSTRAINT: Must use Pydantic for data validation.
# [SECTION: IMPORTS]
import uuid
from datetime import datetime
from enum import Enum
from typing import Dict, Any, List, Optional
from pydantic import BaseModel, Field
# [/SECTION]
# [DEF:TaskStatus:Enum]
# @SEMANTICS: task, status, state, enum
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
class TaskStatus(str, Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
AWAITING_MAPPING = "AWAITING_MAPPING"
AWAITING_INPUT = "AWAITING_INPUT"
# [/DEF:TaskStatus:Enum]
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
class LogEntry(BaseModel):
timestamp: datetime = Field(default_factory=datetime.utcnow)
level: str
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF:LogEntry:Class]
# [DEF:Task:Class]
# @SEMANTICS: task, job, execution, state, pydantic
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
class Task(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
plugin_id: str
status: TaskStatus = TaskStatus.PENDING
started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None
user_id: Optional[str] = None
logs: List[LogEntry] = Field(default_factory=list)
params: Dict[str, Any] = Field(default_factory=dict)
input_required: bool = False
input_request: Optional[Dict[str, Any]] = None
# [DEF:Task.__init__:Function]
# @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status.
# @PRE: If status is AWAITING_INPUT, input_request must be provided.
# @POST: Task instance is created or ValueError is raised.
# @PARAM: **data - Keyword arguments for model initialization.
def __init__(self, **data):
super().__init__(**data)
if self.status == TaskStatus.AWAITING_INPUT and not self.input_request:
raise ValueError("input_request is required when status is AWAITING_INPUT")
# [/DEF:Task.__init__:Function]
# [/DEF:Task:Class]
# [/DEF:TaskManagerModels:Module]

View File

@@ -0,0 +1,142 @@
# [DEF:TaskPersistenceModule:Module]
# @SEMANTICS: persistence, sqlite, task, storage
# @PURPOSE: Handles the persistence of tasks, specifically those awaiting user input, to a SQLite database.
# @LAYER: Core
# @RELATION: Used by TaskManager to save and load tasks.
# @INVARIANT: Database schema must match the Task model structure.
# @CONSTRAINT: Uses synchronous SQLite operations (blocking), should be used carefully.
# [SECTION: IMPORTS]
import sqlite3
import json
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional, Any
from .models import Task, TaskStatus
from ..logger import logger, belief_scope
# [/SECTION]
# [DEF:TaskPersistenceService:Class]
# @SEMANTICS: persistence, service, database
# @PURPOSE: Provides methods to save and load tasks from a local SQLite database.
class TaskPersistenceService:
def __init__(self, db_path: Optional[Path] = None):
if db_path is None:
self.db_path = Path(__file__).parent.parent.parent.parent / "migrations.db"
else:
self.db_path = db_path
self._ensure_db_exists()
# [DEF:TaskPersistenceService._ensure_db_exists:Function]
# @PURPOSE: Ensures the database directory and table exist.
# @PRE: None.
# @POST: Database file and table are created if they didn't exist.
def _ensure_db_exists(self) -> None:
with belief_scope("TaskPersistenceService._ensure_db_exists"):
self.db_path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(str(self.db_path))
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS persistent_tasks (
id TEXT PRIMARY KEY,
status TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
input_request JSON,
context JSON
)
""")
conn.commit()
conn.close()
# [/DEF:TaskPersistenceService._ensure_db_exists:Function]
# [DEF:TaskPersistenceService.persist_tasks:Function]
# @PURPOSE: Persists a list of tasks to the database.
# @PRE: Tasks list contains valid Task objects.
# @POST: Tasks matching the criteria (AWAITING_INPUT) are saved/updated in the DB.
# @PARAM: tasks (List[Task]) - The list of tasks to check and persist.
def persist_tasks(self, tasks: List[Task]) -> None:
with belief_scope("TaskPersistenceService.persist_tasks"):
conn = sqlite3.connect(str(self.db_path))
cursor = conn.cursor()
count = 0
for task in tasks:
if task.status == TaskStatus.AWAITING_INPUT:
cursor.execute("""
INSERT OR REPLACE INTO persistent_tasks
(id, status, created_at, updated_at, input_request, context)
VALUES (?, ?, ?, ?, ?, ?)
""", (
task.id,
task.status.value,
task.started_at.isoformat() if task.started_at else datetime.utcnow().isoformat(),
datetime.utcnow().isoformat(),
json.dumps(task.input_request) if task.input_request else None,
json.dumps(task.params)
))
count += 1
conn.commit()
conn.close()
logger.info(f"Persisted {count} tasks awaiting input.")
# [/DEF:TaskPersistenceService.persist_tasks:Function]
# [DEF:TaskPersistenceService.load_tasks:Function]
# @PURPOSE: Loads persisted tasks from the database.
# @PRE: Database exists.
# @POST: Returns a list of Task objects reconstructed from the DB.
# @RETURN: List[Task] - The loaded tasks.
def load_tasks(self) -> List[Task]:
with belief_scope("TaskPersistenceService.load_tasks"):
if not self.db_path.exists():
return []
conn = sqlite3.connect(str(self.db_path))
cursor = conn.cursor()
cursor.execute("SELECT id, status, created_at, input_request, context FROM persistent_tasks")
rows = cursor.fetchall()
loaded_tasks = []
for row in rows:
task_id, status, created_at, input_request_json, context_json = row
try:
task = Task(
id=task_id,
plugin_id="migration", # Default, assumes migration context for now
status=TaskStatus(status),
started_at=datetime.fromisoformat(created_at),
input_required=True,
input_request=json.loads(input_request_json) if input_request_json else None,
params=json.loads(context_json) if context_json else {}
)
loaded_tasks.append(task)
except Exception as e:
logger.error(f"Failed to load task {task_id}: {e}")
conn.close()
return loaded_tasks
# [/DEF:TaskPersistenceService.load_tasks:Function]
# [DEF:TaskPersistenceService.delete_tasks:Function]
# @PURPOSE: Deletes specific tasks from the database.
# @PARAM: task_ids (List[str]) - List of task IDs to delete.
def delete_tasks(self, task_ids: List[str]) -> None:
if not task_ids:
return
with belief_scope("TaskPersistenceService.delete_tasks"):
conn = sqlite3.connect(str(self.db_path))
cursor = conn.cursor()
placeholders = ', '.join('?' for _ in task_ids)
cursor.execute(f"DELETE FROM persistent_tasks WHERE id IN ({placeholders})", task_ids)
conn.commit()
conn.close()
# [/DEF:TaskPersistenceService.delete_tasks:Function]
# [/DEF:TaskPersistenceService:Class]
# [/DEF:TaskPersistenceModule:Module]

View File

@@ -0,0 +1,53 @@
# [DEF:backend.src.core.utils.matching:Module]
#
# @SEMANTICS: fuzzy, matching, rapidfuzz, database, mapping
# @PURPOSE: Provides utility functions for fuzzy matching database names.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> rapidfuzz
#
# @INVARIANT: Confidence scores are returned as floats between 0.0 and 1.0.
# [SECTION: IMPORTS]
from rapidfuzz import fuzz, process
from typing import List, Dict
# [/SECTION]
# [DEF:suggest_mappings:Function]
# @PURPOSE: Suggests mappings between source and target databases using fuzzy matching.
# @PRE: source_databases and target_databases are lists of dictionaries with 'uuid' and 'database_name'.
# @POST: Returns a list of suggested mappings with confidence scores.
# @PARAM: source_databases (List[Dict]) - Databases from the source environment.
# @PARAM: target_databases (List[Dict]) - Databases from the target environment.
# @PARAM: threshold (int) - Minimum confidence score (0-100).
# @RETURN: List[Dict] - Suggested mappings.
def suggest_mappings(source_databases: List[Dict], target_databases: List[Dict], threshold: int = 60) -> List[Dict]:
"""
Suggest mappings between source and target databases using fuzzy matching.
"""
suggestions = []
if not target_databases:
return suggestions
target_names = [db['database_name'] for db in target_databases]
for s_db in source_databases:
# Use token_sort_ratio as decided in research.md
match = process.extractOne(
s_db['database_name'],
target_names,
scorer=fuzz.token_sort_ratio
)
if match:
name, score, index = match
if score >= threshold:
suggestions.append({
"source_db_uuid": s_db['uuid'],
"target_db_uuid": target_databases[index]['uuid'],
"confidence": score / 100.0
})
return suggestions
# [/DEF:suggest_mappings]
# [/DEF:backend.src.core.utils.matching]

View File

@@ -21,7 +21,12 @@ def get_config_manager() -> ConfigManager:
plugin_dir = Path(__file__).parent / "plugins" plugin_dir = Path(__file__).parent / "plugins"
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir)) plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
from .core.logger import logger
logger.info(f"PluginLoader initialized with directory: {plugin_dir}")
logger.info(f"Available plugins: {[config.name for config in plugin_loader.get_all_plugin_configs()]}")
task_manager = TaskManager(plugin_loader) task_manager = TaskManager(plugin_loader)
logger.info("TaskManager initialized")
def get_plugin_loader() -> PluginLoader: def get_plugin_loader() -> PluginLoader:
"""Dependency injector for the PluginLoader.""" """Dependency injector for the PluginLoader."""

View File

@@ -0,0 +1,28 @@
# [DEF:backend.src.models.dashboard:Module]
# @SEMANTICS: dashboard, model, metadata, migration
# @PURPOSE: Defines data models for dashboard metadata and selection.
# @LAYER: Model
# @RELATION: USED_BY -> backend.src.api.routes.migration
from pydantic import BaseModel
from typing import List
# [DEF:DashboardMetadata:Class]
# @PURPOSE: Represents a dashboard available for migration.
class DashboardMetadata(BaseModel):
id: int
title: str
last_modified: str
status: str
# [/DEF:DashboardMetadata]
# [DEF:DashboardSelection:Class]
# @PURPOSE: Represents the user's selection of dashboards to migrate.
class DashboardSelection(BaseModel):
selected_ids: List[int]
source_env_id: str
target_env_id: str
replace_db_config: bool = False
# [/DEF:DashboardSelection]
# [/DEF:backend.src.models.dashboard]

View File

@@ -0,0 +1,70 @@
# [DEF:backend.src.models.mapping:Module]
#
# @SEMANTICS: database, mapping, environment, migration, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for environment metadata and database mappings using SQLAlchemy.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> sqlalchemy
#
# @INVARIANT: All primary keys are UUID strings.
# @CONSTRAINT: source_env_id and target_env_id must be valid environment IDs.
# [SECTION: IMPORTS]
from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, Enum as SQLEnum
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import func
import uuid
import enum
# [/SECTION]
Base = declarative_base()
# [DEF:MigrationStatus:Class]
# @PURPOSE: Enumeration of possible migration job statuses.
class MigrationStatus(enum.Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
AWAITING_MAPPING = "AWAITING_MAPPING"
# [/DEF:MigrationStatus]
# [DEF:Environment:Class]
# @PURPOSE: Represents a Superset instance environment.
class Environment(Base):
__tablename__ = "environments"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
name = Column(String, nullable=False)
url = Column(String, nullable=False)
credentials_id = Column(String, nullable=False)
# [/DEF:Environment]
# [DEF:DatabaseMapping:Class]
# @PURPOSE: Represents a mapping between source and target databases.
class DatabaseMapping(Base):
__tablename__ = "database_mappings"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
source_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
target_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
source_db_uuid = Column(String, nullable=False)
target_db_uuid = Column(String, nullable=False)
source_db_name = Column(String, nullable=False)
target_db_name = Column(String, nullable=False)
engine = Column(String, nullable=True)
# [/DEF:DatabaseMapping]
# [DEF:MigrationJob:Class]
# @PURPOSE: Represents a single migration execution job.
class MigrationJob(Base):
__tablename__ = "migration_jobs"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
source_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
target_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
status = Column(SQLEnum(MigrationStatus), default=MigrationStatus.PENDING)
replace_db = Column(Boolean, default=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
# [/DEF:MigrationJob]
# [/DEF:backend.src.models.mapping]

View File

@@ -17,6 +17,9 @@ from superset_tool.utils.init_clients import setup_clients
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
from ..dependencies import get_config_manager from ..dependencies import get_config_manager
from superset_tool.utils.logger import SupersetLogger from superset_tool.utils.logger import SupersetLogger
from ..core.migration_engine import MigrationEngine
from ..core.database import SessionLocal
from ..models.mapping import DatabaseMapping, Environment
class MigrationPlugin(PluginBase): class MigrationPlugin(PluginBase):
""" """
@@ -84,48 +87,118 @@ class MigrationPlugin(PluginBase):
} }
async def execute(self, params: Dict[str, Any]): async def execute(self, params: Dict[str, Any]):
from_env = params["from_env"] source_env_id = params.get("source_env_id")
to_env = params["to_env"] target_env_id = params.get("target_env_id")
dashboard_regex = params["dashboard_regex"] selected_ids = params.get("selected_ids")
# Legacy support or alternative params
from_env_name = params.get("from_env")
to_env_name = params.get("to_env")
dashboard_regex = params.get("dashboard_regex")
replace_db_config = params.get("replace_db_config", False) replace_db_config = params.get("replace_db_config", False)
from_db_id = params.get("from_db_id") from_db_id = params.get("from_db_id")
to_db_id = params.get("to_db_id") to_db_id = params.get("to_db_id")
logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True) # [DEF:MigrationPlugin.execute:Action]
logger.info(f"[MigrationPlugin][Entry] Starting migration from {from_env} to {to_env}.") # @PURPOSE: Execute the migration logic with proper task logging.
task_id = params.get("_task_id")
from ..dependencies import get_task_manager
tm = get_task_manager()
class TaskLoggerProxy(SupersetLogger):
def __init__(self):
# Initialize parent with dummy values since we override methods
super().__init__(console=False)
def debug(self, msg, *args, extra=None, **kwargs):
if task_id: tm._add_log(task_id, "DEBUG", msg, extra or {})
def info(self, msg, *args, extra=None, **kwargs):
if task_id: tm._add_log(task_id, "INFO", msg, extra or {})
def warning(self, msg, *args, extra=None, **kwargs):
if task_id: tm._add_log(task_id, "WARNING", msg, extra or {})
def error(self, msg, *args, extra=None, **kwargs):
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
def critical(self, msg, *args, extra=None, **kwargs):
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
def exception(self, msg, *args, **kwargs):
if task_id: tm._add_log(task_id, "ERROR", msg, {"exception": True})
logger = TaskLoggerProxy()
logger.info(f"[MigrationPlugin][Entry] Starting migration task.")
logger.info(f"[MigrationPlugin][Action] Params: {params}")
try: try:
config_manager = get_config_manager() config_manager = get_config_manager()
all_clients = setup_clients(logger, custom_envs=config_manager.get_environments()) environments = config_manager.get_environments()
from_c = all_clients.get(from_env)
to_c = all_clients.get(to_env) # Resolve environments
src_env = None
tgt_env = None
if source_env_id:
src_env = next((e for e in environments if e.id == source_env_id), None)
elif from_env_name:
src_env = next((e for e in environments if e.name == from_env_name), None)
if target_env_id:
tgt_env = next((e for e in environments if e.id == target_env_id), None)
elif to_env_name:
tgt_env = next((e for e in environments if e.name == to_env_name), None)
if not src_env or not tgt_env:
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
from_env_name = src_env.name
to_env_name = tgt_env.name
logger.info(f"[MigrationPlugin][State] Resolved environments: {from_env_name} -> {to_env_name}")
all_clients = setup_clients(logger, custom_envs=environments)
from_c = all_clients.get(from_env_name)
to_c = all_clients.get(to_env_name)
if not from_c or not to_c: if not from_c or not to_c:
raise ValueError(f"One or both environments ('{from_env}', '{to_env}') not found in configuration.") raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
_, all_dashboards = from_c.get_dashboards() _, all_dashboards = from_c.get_dashboards()
dashboards_to_migrate = []
if selected_ids:
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
elif dashboard_regex:
regex_str = str(dashboard_regex) regex_str = str(dashboard_regex)
dashboards_to_migrate = [ dashboards_to_migrate = [
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE) d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
] ]
else:
if not dashboards_to_migrate: logger.warning("[MigrationPlugin][State] No selection criteria provided (selected_ids or dashboard_regex).")
logger.warning("[MigrationPlugin][State] No dashboards found matching the regex.")
return return
db_config_replacement = None if not dashboards_to_migrate:
logger.warning("[MigrationPlugin][State] No dashboards found matching criteria.")
return
# Fetch mappings from database
db_mapping = {}
if replace_db_config: if replace_db_config:
if from_db_id is None or to_db_id is None: db = SessionLocal()
raise ValueError("Source and target database IDs are required when replacing database configuration.") try:
from_db = from_c.get_database(int(from_db_id)) # Find environment IDs by name
to_db = to_c.get_database(int(to_db_id)) src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
old_result = from_db.get("result", {}) tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
new_result = to_db.get("result", {})
db_config_replacement = { if src_env and tgt_env:
"old": {"database_name": old_result.get("database_name"), "uuid": old_result.get("uuid"), "id": str(from_db.get("id"))}, mappings = db.query(DatabaseMapping).filter(
"new": {"database_name": new_result.get("database_name"), "uuid": new_result.get("uuid"), "id": str(to_db.get("id"))} DatabaseMapping.source_env_id == src_env.id,
} DatabaseMapping.target_env_id == tgt_env.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
logger.info(f"[MigrationPlugin][State] Loaded {len(db_mapping)} database mappings.")
finally:
db.close()
engine = MigrationEngine()
for dash in dashboards_to_migrate: for dash in dashboards_to_migrate:
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"] dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
@@ -133,21 +206,86 @@ class MigrationPlugin(PluginBase):
try: try:
exported_content, _ = from_c.export_dashboard(dash_id) exported_content, _ = from_c.export_dashboard(dash_id)
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path: with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path:
if not db_config_replacement: # Always transform to strip databases to avoid password errors
to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
else:
with create_temp_file(suffix=".dir", logger=logger) as tmp_unpack_dir:
with zipfile.ZipFile(tmp_zip_path, "r") as zip_ref:
zip_ref.extractall(tmp_unpack_dir)
update_yamls(db_configs=[db_config_replacement], path=str(tmp_unpack_dir))
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip: with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
create_dashboard_export(zip_path=tmp_new_zip, source_paths=[str(p) for p in Path(tmp_unpack_dir).glob("**/*")]) success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
if not success and replace_db_config:
# Signal missing mapping and wait (only if we care about mappings)
if task_id:
logger.info(f"[MigrationPlugin][Action] Pausing for missing mapping in task {task_id}")
# In a real scenario, we'd pass the missing DB info to the frontend
# For this task, we'll just simulate the wait
await tm.wait_for_resolution(task_id)
# After resolution, retry transformation with updated mappings
# (Mappings would be updated in task.params by resolve_task)
db = SessionLocal()
try:
src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env.id,
DatabaseMapping.target_env_id == tgt_env.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
finally:
db.close()
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
if success:
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug) to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
else:
logger.error(f"[MigrationPlugin][Failure] Failed to transform ZIP for dashboard {title}")
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported.") logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported.")
except Exception as exc: except Exception as exc:
# Check for password error
error_msg = str(exc)
# The error message from Superset is often a JSON string inside a string.
# We need to robustly detect the password requirement.
# Typical error: "Error importing dashboard: databases/PostgreSQL.yaml: {'_schema': ['Must provide a password for the database']}"
if "Must provide a password for the database" in error_msg:
# Extract database name
# Try to find "databases/DBNAME.yaml" pattern
import re
db_name = "unknown"
match = re.search(r"databases/([^.]+)\.yaml", error_msg)
if match:
db_name = match.group(1)
else:
# Fallback: try to find 'database 'NAME'' pattern
match_alt = re.search(r"database '([^']+)'", error_msg)
if match_alt:
db_name = match_alt.group(1)
logger.warning(f"[MigrationPlugin][Action] Detected missing password for database: {db_name}")
if task_id:
input_request = {
"type": "database_password",
"databases": [db_name],
"error_message": error_msg
}
tm.await_input(task_id, input_request)
# Wait for user input
await tm.wait_for_input(task_id)
# Resume with passwords
task = tm.get_task(task_id)
passwords = task.params.get("passwords", {})
# Retry import with password
if passwords:
logger.info(f"[MigrationPlugin][Action] Retrying import for {title} with provided passwords.")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug, passwords=passwords)
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported after password injection.")
# Clear passwords from params after use for security
if "passwords" in task.params:
del task.params["passwords"]
continue
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True) logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
logger.info("[MigrationPlugin][Exit] Migration finished.") logger.info("[MigrationPlugin][Exit] Migration finished.")

View File

@@ -0,0 +1,66 @@
# [DEF:backend.src.services.mapping_service:Module]
#
# @SEMANTICS: service, mapping, fuzzy-matching, superset
# @PURPOSE: Orchestrates database fetching and fuzzy matching suggestions.
# @LAYER: Service
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
# @RELATION: DEPENDS_ON -> backend.src.core.utils.matching
#
# @INVARIANT: Suggestions are based on database names.
# [SECTION: IMPORTS]
from typing import List, Dict
from backend.src.core.superset_client import SupersetClient
from backend.src.core.utils.matching import suggest_mappings
from superset_tool.models import SupersetConfig
# [/SECTION]
# [DEF:MappingService:Class]
# @PURPOSE: Service for handling database mapping logic.
class MappingService:
# [DEF:MappingService.__init__:Function]
def __init__(self, config_manager):
self.config_manager = config_manager
# [DEF:MappingService._get_client:Function]
# @PURPOSE: Helper to get an initialized SupersetClient for an environment.
def _get_client(self, env_id: str) -> SupersetClient:
envs = self.config_manager.get_environments()
env = next((e for e in envs if e.id == env_id), None)
if not env:
raise ValueError(f"Environment {env_id} not found")
superset_config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={
"provider": "db",
"username": env.username,
"password": env.password,
"refresh": "false"
}
)
return SupersetClient(superset_config)
# [DEF:MappingService.get_suggestions:Function]
# @PURPOSE: Fetches databases from both environments and returns fuzzy matching suggestions.
# @PARAM: source_env_id (str) - Source environment ID.
# @PARAM: target_env_id (str) - Target environment ID.
# @RETURN: List[Dict] - Suggested mappings.
async def get_suggestions(self, source_env_id: str, target_env_id: str) -> List[Dict]:
"""
Get suggested mappings between two environments.
"""
source_client = self._get_client(source_env_id)
target_client = self._get_client(target_env_id)
source_dbs = source_client.get_databases_summary()
target_dbs = target_client.get_databases_summary()
return suggest_mappings(source_dbs, target_dbs)
# [/DEF:MappingService.get_suggestions]
# [/DEF:MappingService]
# [/DEF:backend.src.services.mapping_service]

View File

@@ -0,0 +1,44 @@
import pytest
from backend.src.core.logger import belief_scope, logger
def test_belief_scope_logs_entry_action_exit(caplog):
"""Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs."""
caplog.set_level("INFO")
with belief_scope("TestFunction"):
logger.info("Doing something important")
# Check that the logs contain the expected patterns
log_messages = [record.message for record in caplog.records]
assert any("[TestFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
assert any("[TestFunction][Action] Doing something important" in msg for msg in log_messages), "Action log not found"
assert any("[TestFunction][Exit]" in msg for msg in log_messages), "Exit log not found"
def test_belief_scope_error_handling(caplog):
"""Test that belief_scope logs Coherence:Failed on exception."""
caplog.set_level("INFO")
with pytest.raises(ValueError):
with belief_scope("FailingFunction"):
raise ValueError("Something went wrong")
log_messages = [record.message for record in caplog.records]
assert any("[FailingFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
assert any("[FailingFunction][Coherence:Failed]" in msg for msg in log_messages), "Failed coherence log not found"
# Exit should not be logged on failure
def test_belief_scope_success_coherence(caplog):
"""Test that belief_scope logs Coherence:OK on success."""
caplog.set_level("INFO")
with belief_scope("SuccessFunction"):
pass
log_messages = [record.message for record in caplog.records]
assert any("[SuccessFunction][Coherence:OK]" in msg for msg in log_messages), "Success coherence log not found"

42
docs/migration_mapping.md Normal file
View File

@@ -0,0 +1,42 @@
# Database Mapping in Migration
This document describes how to use the database mapping feature during Superset dashboard migrations.
## Overview
When migrating dashboards between different Superset environments (e.g., from Dev to Prod), the underlying databases often have different UUIDs even if they represent the same data source. The Database Mapping feature allows you to define these relationships so that migrated assets automatically point to the correct database in the target environment.
## How it Works
1. **Fuzzy Matching**: The system automatically suggests mappings by comparing database names between environments using the RapidFuzz library.
2. **Persistence**: Mappings are stored in a local SQLite database (`mappings.db`) and are reused for future migrations between the same environment pair.
3. **Asset Interception**: During migration, the system intercepts the Superset export ZIP archive, modifies the `database_uuid` in the dataset YAML files, and re-packages the archive before importing it to the target.
## Usage Instructions
### 1. Define Mappings
1. Navigate to the **Database Mapping** tab in the application.
2. Select your **Source** and **Target** environments.
3. Click **Fetch Databases & Suggestions**.
4. Review the suggested mappings (highlighted in green).
5. If a suggestion is incorrect or missing, use the dropdown in the "Target Database" column to select the correct one.
6. Mappings are saved automatically when you select a target database.
### 2. Run Migration with Database Replacement
1. Go to the **Migration** dashboard.
2. Select the **Source** and **Target** environments.
3. Select the dashboards or datasets you want to migrate.
4. Enable the **Replace Database (Apply Mappings)** toggle.
5. Click **Start Migration**.
### 3. Handling Missing Mappings
If the migration engine encounters a database that has no defined mapping, the process will pause, and a modal will appear prompting you to select a target database on-the-fly. Once selected, the mapping is saved, and the migration continues.
## Troubleshooting
- **Mapping not applied**: Ensure the "Replace Database" toggle is enabled.
- **Wrong database in target**: Check the mapping table for the specific environment pair and correct any errors.
- **Connection errors**: Ensure both Superset environments are reachable and credentials are correct in Settings.

View File

@@ -26,57 +26,85 @@
* ``` * ```
*/ */
declare module '$env/static/private' { declare module '$env/static/private' {
export const LESSOPEN: string;
export const USER: string; export const USER: string;
export const npm_config_user_agent: string; export const npm_config_user_agent: string;
export const XDG_SESSION_TYPE: string;
export const npm_node_execpath: string; export const npm_node_execpath: string;
export const SHLVL: string; export const SHLVL: string;
export const npm_config_noproxy: string; export const npm_config_noproxy: string;
export const LESS: string;
export const HOME: string; export const HOME: string;
export const OLDPWD: string; export const OLDPWD: string;
export const DESKTOP_SESSION: string;
export const npm_package_json: string; export const npm_package_json: string;
export const LSCOLORS: string;
export const ZSH: string;
export const GNOME_SHELL_SESSION_MODE: string;
export const GTK_MODULES: string;
export const PAGER: string;
export const PS1: string; export const PS1: string;
export const npm_config_userconfig: string; export const npm_config_userconfig: string;
export const npm_config_local_prefix: string; export const npm_config_local_prefix: string;
export const SYSTEMD_EXEC_PID: string;
export const DBUS_SESSION_BUS_ADDRESS: string; export const DBUS_SESSION_BUS_ADDRESS: string;
export const WSL_DISTRO_NAME: string; export const COLORTERM: string;
export const COLOR: string; export const COLOR: string;
export const npm_config_metrics_registry: string;
export const WAYLAND_DISPLAY: string; export const WAYLAND_DISPLAY: string;
export const LOGNAME: string; export const LOGNAME: string;
export const NAME: string; export const SDKMAN_CANDIDATES_API: string;
export const WSL_INTEROP: string;
export const PULSE_SERVER: string;
export const _: string; export const _: string;
export const npm_config_prefix: string; export const npm_config_prefix: string;
export const npm_config_npm_version: string; export const MEMORY_PRESSURE_WATCH: string;
export const XDG_SESSION_CLASS: string;
export const USERNAME: string;
export const TERM: string; export const TERM: string;
export const npm_config_cache: string; export const npm_config_cache: string;
export const GNOME_DESKTOP_SESSION_ID: string;
export const npm_config_node_gyp: string; export const npm_config_node_gyp: string;
export const PATH: string; export const PATH: string;
export const SDKMAN_CANDIDATES_DIR: string;
export const NODE: string; export const NODE: string;
export const npm_package_name: string; export const npm_package_name: string;
export const XDG_MENU_PREFIX: string;
export const SDKMAN_BROKER_API: string;
export const GNOME_TERMINAL_SCREEN: string;
export const GNOME_SETUP_DISPLAY: string;
export const XDG_RUNTIME_DIR: string; export const XDG_RUNTIME_DIR: string;
export const DISPLAY: string; export const DISPLAY: string;
export const LANG: string; export const LANG: string;
export const XDG_CURRENT_DESKTOP: string;
export const VIRTUAL_ENV_PROMPT: string; export const VIRTUAL_ENV_PROMPT: string;
export const XMODIFIERS: string;
export const XDG_SESSION_DESKTOP: string;
export const XAUTHORITY: string;
export const LS_COLORS: string; export const LS_COLORS: string;
export const GNOME_TERMINAL_SERVICE: string;
export const SDKMAN_DIR: string;
export const SDKMAN_PLATFORM: string;
export const npm_lifecycle_script: string; export const npm_lifecycle_script: string;
export const SSH_AUTH_SOCK: string;
export const SHELL: string; export const SHELL: string;
export const npm_package_version: string; export const npm_package_version: string;
export const npm_lifecycle_event: string; export const npm_lifecycle_event: string;
export const QT_ACCESSIBILITY: string;
export const GDMSESSION: string;
export const GOOGLE_CLOUD_PROJECT: string; export const GOOGLE_CLOUD_PROJECT: string;
export const LESSCLOSE: string; export const GPG_AGENT_INFO: string;
export const VIRTUAL_ENV: string; export const VIRTUAL_ENV: string;
export const QT_IM_MODULE: string;
export const npm_config_globalconfig: string; export const npm_config_globalconfig: string;
export const npm_config_init_module: string; export const npm_config_init_module: string;
export const JAVA_HOME: string;
export const PWD: string; export const PWD: string;
export const npm_config_globalignorefile: string;
export const npm_execpath: string; export const npm_execpath: string;
export const XDG_DATA_DIRS: string; export const XDG_DATA_DIRS: string;
export const npm_config_global_prefix: string; export const npm_config_global_prefix: string;
export const npm_command: string; export const npm_command: string;
export const WSL2_GUI_APPS_ENABLED: string; export const QT_IM_MODULES: string;
export const HOSTTYPE: string; export const MEMORY_PRESSURE_WRITE: string;
export const WSLENV: string; export const VTE_VERSION: string;
export const INIT_CWD: string; export const INIT_CWD: string;
export const EDITOR: string; export const EDITOR: string;
export const NODE_ENV: string; export const NODE_ENV: string;
@@ -109,57 +137,85 @@ declare module '$env/static/public' {
*/ */
declare module '$env/dynamic/private' { declare module '$env/dynamic/private' {
export const env: { export const env: {
LESSOPEN: string;
USER: string; USER: string;
npm_config_user_agent: string; npm_config_user_agent: string;
XDG_SESSION_TYPE: string;
npm_node_execpath: string; npm_node_execpath: string;
SHLVL: string; SHLVL: string;
npm_config_noproxy: string; npm_config_noproxy: string;
LESS: string;
HOME: string; HOME: string;
OLDPWD: string; OLDPWD: string;
DESKTOP_SESSION: string;
npm_package_json: string; npm_package_json: string;
LSCOLORS: string;
ZSH: string;
GNOME_SHELL_SESSION_MODE: string;
GTK_MODULES: string;
PAGER: string;
PS1: string; PS1: string;
npm_config_userconfig: string; npm_config_userconfig: string;
npm_config_local_prefix: string; npm_config_local_prefix: string;
SYSTEMD_EXEC_PID: string;
DBUS_SESSION_BUS_ADDRESS: string; DBUS_SESSION_BUS_ADDRESS: string;
WSL_DISTRO_NAME: string; COLORTERM: string;
COLOR: string; COLOR: string;
npm_config_metrics_registry: string;
WAYLAND_DISPLAY: string; WAYLAND_DISPLAY: string;
LOGNAME: string; LOGNAME: string;
NAME: string; SDKMAN_CANDIDATES_API: string;
WSL_INTEROP: string;
PULSE_SERVER: string;
_: string; _: string;
npm_config_prefix: string; npm_config_prefix: string;
npm_config_npm_version: string; MEMORY_PRESSURE_WATCH: string;
XDG_SESSION_CLASS: string;
USERNAME: string;
TERM: string; TERM: string;
npm_config_cache: string; npm_config_cache: string;
GNOME_DESKTOP_SESSION_ID: string;
npm_config_node_gyp: string; npm_config_node_gyp: string;
PATH: string; PATH: string;
SDKMAN_CANDIDATES_DIR: string;
NODE: string; NODE: string;
npm_package_name: string; npm_package_name: string;
XDG_MENU_PREFIX: string;
SDKMAN_BROKER_API: string;
GNOME_TERMINAL_SCREEN: string;
GNOME_SETUP_DISPLAY: string;
XDG_RUNTIME_DIR: string; XDG_RUNTIME_DIR: string;
DISPLAY: string; DISPLAY: string;
LANG: string; LANG: string;
XDG_CURRENT_DESKTOP: string;
VIRTUAL_ENV_PROMPT: string; VIRTUAL_ENV_PROMPT: string;
XMODIFIERS: string;
XDG_SESSION_DESKTOP: string;
XAUTHORITY: string;
LS_COLORS: string; LS_COLORS: string;
GNOME_TERMINAL_SERVICE: string;
SDKMAN_DIR: string;
SDKMAN_PLATFORM: string;
npm_lifecycle_script: string; npm_lifecycle_script: string;
SSH_AUTH_SOCK: string;
SHELL: string; SHELL: string;
npm_package_version: string; npm_package_version: string;
npm_lifecycle_event: string; npm_lifecycle_event: string;
QT_ACCESSIBILITY: string;
GDMSESSION: string;
GOOGLE_CLOUD_PROJECT: string; GOOGLE_CLOUD_PROJECT: string;
LESSCLOSE: string; GPG_AGENT_INFO: string;
VIRTUAL_ENV: string; VIRTUAL_ENV: string;
QT_IM_MODULE: string;
npm_config_globalconfig: string; npm_config_globalconfig: string;
npm_config_init_module: string; npm_config_init_module: string;
JAVA_HOME: string;
PWD: string; PWD: string;
npm_config_globalignorefile: string;
npm_execpath: string; npm_execpath: string;
XDG_DATA_DIRS: string; XDG_DATA_DIRS: string;
npm_config_global_prefix: string; npm_config_global_prefix: string;
npm_command: string; npm_command: string;
WSL2_GUI_APPS_ENABLED: string; QT_IM_MODULES: string;
HOSTTYPE: string; MEMORY_PRESSURE_WRITE: string;
WSLENV: string; VTE_VERSION: string;
INIT_CWD: string; INIT_CWD: string;
EDITOR: string; EDITOR: string;
NODE_ENV: string; NODE_ENV: string;

View File

@@ -4,14 +4,18 @@ export const nodes = [
() => import('./nodes/0'), () => import('./nodes/0'),
() => import('./nodes/1'), () => import('./nodes/1'),
() => import('./nodes/2'), () => import('./nodes/2'),
() => import('./nodes/3') () => import('./nodes/3'),
() => import('./nodes/4'),
() => import('./nodes/5')
]; ];
export const server_loads = []; export const server_loads = [];
export const dictionary = { export const dictionary = {
"/": [2], "/": [2],
"/settings": [3] "/migration": [3],
"/migration/mappings": [4],
"/settings": [5]
}; };
export const hooks = { export const hooks = {

View File

@@ -1,3 +1 @@
import * as universal from "../../../../src/routes/settings/+page.ts"; export { default as component } from "../../../../src/routes/migration/+page.svelte";
export { universal };
export { default as component } from "../../../../src/routes/settings/+page.svelte";

View File

@@ -24,7 +24,7 @@ export const options = {
app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n", app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n",
error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n" error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n"
}, },
version_hash: "1eogxsl" version_hash: "1v1g3pu"
}; };
export async function get_hooks() { export async function get_hooks() {

View File

@@ -27,15 +27,17 @@ export {};
declare module "$app/types" { declare module "$app/types" {
export interface AppTypes { export interface AppTypes {
RouteId(): "/" | "/settings"; RouteId(): "/" | "/migration" | "/migration/mappings" | "/settings";
RouteParams(): { RouteParams(): {
}; };
LayoutParams(): { LayoutParams(): {
"/": Record<string, never>; "/": Record<string, never>;
"/migration": Record<string, never>;
"/migration/mappings": Record<string, never>;
"/settings": Record<string, never> "/settings": Record<string, never>
}; };
Pathname(): "/" | "/settings" | "/settings/"; Pathname(): "/" | "/migration" | "/migration/" | "/migration/mappings" | "/migration/mappings/" | "/settings" | "/settings/";
ResolvedPathname(): `${"" | `/${string}`}${ReturnType<AppTypes['Pathname']>}`; ResolvedPathname(): `${"" | `/${string}`}${ReturnType<AppTypes['Pathname']>}`;
Asset(): string & {}; Asset(): string & {};
} }

View File

@@ -0,0 +1,205 @@
<!-- [DEF:DashboardGrid:Component] -->
<!--
@SEMANTICS: dashboard, grid, selection, pagination
@PURPOSE: Displays a grid of dashboards with selection and pagination.
@LAYER: Component
@RELATION: USED_BY -> frontend/src/routes/migration/+page.svelte
@INVARIANT: Selected IDs must be a subset of available dashboards.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { createEventDispatcher } from 'svelte';
import type { DashboardMetadata } from '../types/dashboard';
// [/SECTION]
// [SECTION: PROPS]
export let dashboards: DashboardMetadata[] = [];
export let selectedIds: number[] = [];
// [/SECTION]
// [SECTION: STATE]
let filterText = "";
let currentPage = 0;
let pageSize = 20;
let sortColumn: keyof DashboardMetadata = "title";
let sortDirection: "asc" | "desc" = "asc";
// [/SECTION]
// [SECTION: DERIVED]
$: filteredDashboards = dashboards.filter(d =>
d.title.toLowerCase().includes(filterText.toLowerCase())
);
$: sortedDashboards = [...filteredDashboards].sort((a, b) => {
let aVal = a[sortColumn];
let bVal = b[sortColumn];
if (sortColumn === "id") {
aVal = Number(aVal);
bVal = Number(bVal);
}
if (aVal < bVal) return sortDirection === "asc" ? -1 : 1;
if (aVal > bVal) return sortDirection === "asc" ? 1 : -1;
return 0;
});
$: paginatedDashboards = sortedDashboards.slice(
currentPage * pageSize,
(currentPage + 1) * pageSize
);
$: totalPages = Math.ceil(sortedDashboards.length / pageSize);
$: allSelected = paginatedDashboards.length > 0 && paginatedDashboards.every(d => selectedIds.includes(d.id));
$: someSelected = paginatedDashboards.some(d => selectedIds.includes(d.id));
// [/SECTION]
// [SECTION: EVENTS]
const dispatch = createEventDispatcher<{ selectionChanged: number[] }>();
// [/SECTION]
// [DEF:handleSort:Function]
// @PURPOSE: Toggles sort direction or changes sort column.
function handleSort(column: keyof DashboardMetadata) {
if (sortColumn === column) {
sortDirection = sortDirection === "asc" ? "desc" : "asc";
} else {
sortColumn = column;
sortDirection = "asc";
}
}
// [/DEF:handleSort]
// [DEF:handleSelectionChange:Function]
// @PURPOSE: Handles individual checkbox changes.
function handleSelectionChange(id: number, checked: boolean) {
let newSelected = [...selectedIds];
if (checked) {
if (!newSelected.includes(id)) newSelected.push(id);
} else {
newSelected = newSelected.filter(sid => sid !== id);
}
selectedIds = newSelected;
dispatch('selectionChanged', newSelected);
}
// [/DEF:handleSelectionChange]
// [DEF:handleSelectAll:Function]
// @PURPOSE: Handles select all checkbox.
function handleSelectAll(checked: boolean) {
let newSelected = [...selectedIds];
if (checked) {
paginatedDashboards.forEach(d => {
if (!newSelected.includes(d.id)) newSelected.push(d.id);
});
} else {
paginatedDashboards.forEach(d => {
newSelected = newSelected.filter(sid => sid !== d.id);
});
}
selectedIds = newSelected;
dispatch('selectionChanged', newSelected);
}
// [/DEF:handleSelectAll]
// [DEF:goToPage:Function]
// @PURPOSE: Changes current page.
function goToPage(page: number) {
if (page >= 0 && page < totalPages) {
currentPage = page;
}
}
// [/DEF:goToPage]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="dashboard-grid">
<!-- Filter Input -->
<div class="mb-4">
<input
type="text"
bind:value={filterText}
placeholder="Search dashboards..."
class="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
/>
</div>
<!-- Grid/Table -->
<div class="overflow-x-auto">
<table class="min-w-full bg-white border border-gray-300">
<thead class="bg-gray-50">
<tr>
<th class="px-4 py-2 border-b">
<input
type="checkbox"
checked={allSelected}
indeterminate={someSelected && !allSelected}
on:change={(e) => handleSelectAll((e.target as HTMLInputElement).checked)}
/>
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('title')}>
Title {sortColumn === 'title' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('last_modified')}>
Last Modified {sortColumn === 'last_modified' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
<th class="px-4 py-2 border-b cursor-pointer" on:click={() => handleSort('status')}>
Status {sortColumn === 'status' ? (sortDirection === 'asc' ? '↑' : '↓') : ''}
</th>
</tr>
</thead>
<tbody>
{#each paginatedDashboards as dashboard (dashboard.id)}
<tr class="hover:bg-gray-50">
<td class="px-4 py-2 border-b">
<input
type="checkbox"
checked={selectedIds.includes(dashboard.id)}
on:change={(e) => handleSelectionChange(dashboard.id, (e.target as HTMLInputElement).checked)}
/>
</td>
<td class="px-4 py-2 border-b">{dashboard.title}</td>
<td class="px-4 py-2 border-b">{new Date(dashboard.last_modified).toLocaleDateString()}</td>
<td class="px-4 py-2 border-b">
<span class="px-2 py-1 text-xs font-medium rounded-full {dashboard.status === 'published' ? 'bg-green-100 text-green-800' : 'bg-gray-100 text-gray-800'}">
{dashboard.status}
</span>
</td>
</tr>
{/each}
</tbody>
</table>
</div>
<!-- Pagination Controls -->
<div class="flex items-center justify-between mt-4">
<div class="text-sm text-gray-700">
Showing {currentPage * pageSize + 1} to {Math.min((currentPage + 1) * pageSize, sortedDashboards.length)} of {sortedDashboards.length} dashboards
</div>
<div class="flex space-x-2">
<button
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
disabled={currentPage === 0}
on:click={() => goToPage(currentPage - 1)}
>
Previous
</button>
<button
class="px-3 py-1 text-sm border border-gray-300 rounded-md hover:bg-gray-50 disabled:opacity-50 disabled:cursor-not-allowed"
disabled={currentPage >= totalPages - 1}
on:click={() => goToPage(currentPage + 1)}
>
Next
</button>
</div>
</div>
</div>
<!-- [/SECTION] -->
<style>
/* Component styles */
</style>
<!-- [/DEF:DashboardGrid] -->

View File

@@ -0,0 +1,58 @@
<!-- [DEF:EnvSelector:Component] -->
<!--
@SEMANTICS: environment, selector, dropdown, migration
@PURPOSE: Provides a UI component for selecting source and target environments.
@LAYER: Feature
@RELATION: BINDS_TO -> environments store
@INVARIANT: Source and target environments must be selectable from the list of configured environments.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { onMount, createEventDispatcher } from 'svelte';
// [/SECTION]
// [SECTION: PROPS]
export let label: string = "Select Environment";
export let selectedId: string = "";
export let environments: Array<{id: string, name: string, url: string}> = [];
// [/SECTION]
const dispatch = createEventDispatcher();
// [DEF:handleSelect:Function]
/**
* @purpose Dispatches the selection change event.
* @param {Event} event - The change event from the select element.
*/
function handleSelect(event: Event) {
const target = event.target as HTMLSelectElement;
selectedId = target.value;
dispatch('change', { id: selectedId });
}
// [/DEF:handleSelect]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="flex flex-col space-y-1">
<label for="env-select" class="text-sm font-medium text-gray-700">{label}</label>
<select
id="env-select"
class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md"
value={selectedId}
on:change={handleSelect}
>
<option value="" disabled>-- Choose an environment --</option>
{#each environments as env}
<option value={env.id}>{env.name} ({env.url})</option>
{/each}
</select>
</div>
<!-- [/SECTION] -->
<style>
/* Component specific styles */
</style>
<!-- [/DEF:EnvSelector] -->

View File

@@ -0,0 +1,94 @@
<!-- [DEF:MappingTable:Component] -->
<!--
@SEMANTICS: mapping, table, database, editor
@PURPOSE: Displays and allows editing of database mappings.
@LAYER: Feature
@RELATION: BINDS_TO -> mappings state
@INVARIANT: Each source database can be mapped to one target database.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { createEventDispatcher } from 'svelte';
// [/SECTION]
// [SECTION: PROPS]
export let sourceDatabases: Array<{uuid: string, database_name: string}> = [];
export let targetDatabases: Array<{uuid: string, database_name: string}> = [];
export let mappings: Array<{source_db_uuid: string, target_db_uuid: string}> = [];
export let suggestions: Array<{source_db_uuid: string, target_db_uuid: string, confidence: number}> = [];
// [/SECTION]
const dispatch = createEventDispatcher();
// [DEF:updateMapping:Function]
/**
* @purpose Updates a mapping for a specific source database.
*/
function updateMapping(sourceUuid: string, targetUuid: string) {
dispatch('update', { sourceUuid, targetUuid });
}
// [/DEF:updateMapping]
// [DEF:getSuggestion:Function]
/**
* @purpose Finds a suggestion for a source database.
*/
function getSuggestion(sourceUuid: string) {
return suggestions.find(s => s.source_db_uuid === sourceUuid);
}
// [/DEF:getSuggestion]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="overflow-x-auto">
<table class="min-w-full divide-y divide-gray-200">
<thead class="bg-gray-50">
<tr>
<th class="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Source Database</th>
<th class="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Target Database</th>
<th class="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Status</th>
</tr>
</thead>
<tbody class="bg-white divide-y divide-gray-200">
{#each sourceDatabases as sDb}
{@const mapping = mappings.find(m => m.source_db_uuid === sDb.uuid)}
{@const suggestion = getSuggestion(sDb.uuid)}
<tr class={suggestion && !mapping ? 'bg-green-50' : ''}>
<td class="px-6 py-4 whitespace-nowrap text-sm font-medium text-gray-900">
{sDb.database_name}
</td>
<td class="px-6 py-4 whitespace-nowrap text-sm text-gray-500">
<select
class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md"
value={mapping?.target_db_uuid || suggestion?.target_db_uuid || ""}
on:change={(e) => updateMapping(sDb.uuid, (e.target as HTMLSelectElement).value)}
>
<option value="">-- Select Target --</option>
{#each targetDatabases as tDb}
<option value={tDb.uuid}>{tDb.database_name}</option>
{/each}
</select>
</td>
<td class="px-6 py-4 whitespace-nowrap text-sm text-gray-500">
{#if mapping}
<span class="text-blue-600 font-semibold">Saved</span>
{:else if suggestion}
<span class="text-green-600 font-semibold">Suggested ({Math.round(suggestion.confidence * 100)}%)</span>
{:else}
<span class="text-red-600">Unmapped</span>
{/if}
</td>
</tr>
{/each}
</tbody>
</table>
</div>
<!-- [/SECTION] -->
<style>
/* Component specific styles */
</style>
<!-- [/DEF:MappingTable] -->

View File

@@ -0,0 +1,112 @@
<!-- [DEF:MissingMappingModal:Component] -->
<!--
@SEMANTICS: modal, mapping, prompt, migration
@PURPOSE: Prompts the user to provide a database mapping when one is missing during migration.
@LAYER: Feature
@RELATION: DISPATCHES -> resolve
@INVARIANT: Modal blocks migration progress until resolved or cancelled.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { createEventDispatcher } from 'svelte';
// [/SECTION]
// [SECTION: PROPS]
export let show: boolean = false;
export let sourceDbName: string = "";
export let sourceDbUuid: string = "";
export let targetDatabases: Array<{uuid: string, database_name: string}> = [];
// [/SECTION]
let selectedTargetUuid = "";
const dispatch = createEventDispatcher();
// [DEF:resolve:Function]
function resolve() {
if (!selectedTargetUuid) return;
dispatch('resolve', {
sourceDbUuid,
targetDbUuid: selectedTargetUuid,
targetDbName: targetDatabases.find(d => d.uuid === selectedTargetUuid)?.database_name
});
show = false;
}
// [/DEF:resolve]
// [DEF:cancel:Function]
function cancel() {
dispatch('cancel');
show = false;
}
// [/DEF:cancel]
</script>
<!-- [SECTION: TEMPLATE] -->
{#if show}
<div class="fixed z-10 inset-0 overflow-y-auto" aria-labelledby="modal-title" role="dialog" aria-modal="true">
<div class="flex items-end justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
<div class="fixed inset-0 bg-gray-500 bg-opacity-75 transition-opacity" aria-hidden="true"></div>
<span class="hidden sm:inline-block sm:align-middle sm:h-screen" aria-hidden="true">&#8203;</span>
<div class="inline-block align-bottom bg-white rounded-lg px-4 pt-5 pb-4 text-left overflow-hidden shadow-xl transform transition-all sm:my-8 sm:align-middle sm:max-w-lg sm:w-full sm:p-6">
<div>
<div class="mx-auto flex items-center justify-center h-12 w-12 rounded-full bg-yellow-100">
<svg class="h-6 w-6 text-yellow-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
</div>
<div class="mt-3 text-center sm:mt-5">
<h3 class="text-lg leading-6 font-medium text-gray-900" id="modal-title">
Missing Database Mapping
</h3>
<div class="mt-2">
<p class="text-sm text-gray-500">
The database <strong>{sourceDbName}</strong> is used in the assets being migrated but has no mapping to the target environment. Please select a target database.
</p>
</div>
</div>
</div>
<div class="mt-5 sm:mt-6">
<select
class="block w-full pl-3 pr-10 py-2 text-base border-gray-300 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md"
bind:value={selectedTargetUuid}
>
<option value="" disabled>-- Select Target Database --</option>
{#each targetDatabases as tDb}
<option value={tDb.uuid}>{tDb.database_name}</option>
{/each}
</select>
</div>
<div class="mt-5 sm:mt-6 sm:grid sm:grid-cols-2 sm:gap-3 sm:grid-flow-row-dense">
<button
type="button"
on:click={resolve}
disabled={!selectedTargetUuid}
class="w-full inline-flex justify-center rounded-md border border-transparent shadow-sm px-4 py-2 bg-indigo-600 text-base font-medium text-white hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:col-start-2 sm:text-sm disabled:bg-gray-400"
>
Apply & Continue
</button>
<button
type="button"
on:click={cancel}
class="mt-3 w-full inline-flex justify-center rounded-md border border-gray-300 shadow-sm px-4 py-2 bg-white text-base font-medium text-gray-700 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:mt-0 sm:col-start-1 sm:text-sm"
>
Cancel Migration
</button>
</div>
</div>
</div>
</div>
{/if}
<!-- [/SECTION] -->
<style>
/* Modal specific styles */
</style>
<!-- [/DEF:MissingMappingModal] -->

View File

@@ -16,6 +16,12 @@
> >
Dashboard Dashboard
</a> </a>
<a
href="/migration"
class="text-gray-600 hover:text-blue-600 font-medium {$page.url.pathname.startsWith('/migration') ? 'text-blue-600 border-b-2 border-blue-600' : ''}"
>
Migration
</a>
<a <a
href="/settings" href="/settings"
class="text-gray-600 hover:text-blue-600 font-medium {$page.url.pathname === '/settings' ? 'text-blue-600 border-b-2 border-blue-600' : ''}" class="text-gray-600 hover:text-blue-600 font-medium {$page.url.pathname === '/settings' ? 'text-blue-600 border-b-2 border-blue-600' : ''}"

View File

@@ -0,0 +1,123 @@
<!-- [DEF:PasswordPrompt:Component] -->
<!--
@SEMANTICS: password, prompt, modal, input, security
@PURPOSE: A modal component to prompt the user for database passwords when a migration task is paused.
@LAYER: UI
@RELATION: USES -> frontend/src/lib/api.js (inferred)
@RELATION: EMITS -> resume, cancel
-->
<script>
import { createEventDispatcher } from 'svelte';
export let show = false;
export let databases = []; // List of database names requiring passwords
export let errorMessage = "";
const dispatch = createEventDispatcher();
let passwords = {};
let submitting = false;
function handleSubmit() {
if (submitting) return;
// Validate all passwords entered
const missing = databases.filter(db => !passwords[db]);
if (missing.length > 0) {
alert(`Please enter passwords for: ${missing.join(', ')}`);
return;
}
submitting = true;
dispatch('resume', { passwords });
// Reset submitting state is handled by parent or on close
}
function handleCancel() {
dispatch('cancel');
show = false;
}
// Reset passwords when modal opens/closes
$: if (!show) {
passwords = {};
submitting = false;
}
</script>
{#if show}
<div class="fixed inset-0 z-50 overflow-y-auto" aria-labelledby="modal-title" role="dialog" aria-modal="true">
<div class="flex items-end justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
<!-- Background overlay -->
<div class="fixed inset-0 bg-gray-500 bg-opacity-75 transition-opacity" aria-hidden="true" on:click={handleCancel}></div>
<span class="hidden sm:inline-block sm:align-middle sm:h-screen" aria-hidden="true">&#8203;</span>
<div class="inline-block align-bottom bg-white rounded-lg text-left overflow-hidden shadow-xl transform transition-all sm:my-8 sm:align-middle sm:max-w-lg sm:w-full">
<div class="bg-white px-4 pt-5 pb-4 sm:p-6 sm:pb-4">
<div class="sm:flex sm:items-start">
<div class="mx-auto flex-shrink-0 flex items-center justify-center h-12 w-12 rounded-full bg-red-100 sm:mx-0 sm:h-10 sm:w-10">
<!-- Heroicon name: outline/lock-closed -->
<svg class="h-6 w-6 text-red-600" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor" aria-hidden="true">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 15v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2zm10-10V7a4 4 0 00-8 0v4h8z" />
</svg>
</div>
<div class="mt-3 text-center sm:mt-0 sm:ml-4 sm:text-left w-full">
<h3 class="text-lg leading-6 font-medium text-gray-900" id="modal-title">
Database Password Required
</h3>
<div class="mt-2">
<p class="text-sm text-gray-500 mb-4">
The migration process requires passwords for the following databases to proceed.
</p>
{#if errorMessage}
<div class="mb-4 p-2 bg-red-50 text-red-700 text-xs rounded border border-red-200">
Error: {errorMessage}
</div>
{/if}
<form on:submit|preventDefault={handleSubmit} class="space-y-4">
{#each databases as dbName}
<div>
<label for="password-{dbName}" class="block text-sm font-medium text-gray-700">
Password for {dbName}
</label>
<input
type="password"
id="password-{dbName}"
bind:value={passwords[dbName]}
class="mt-1 block w-full border-gray-300 rounded-md shadow-sm focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm p-2 border"
placeholder="Enter password"
required
/>
</div>
{/each}
</form>
</div>
</div>
</div>
</div>
<div class="bg-gray-50 px-4 py-3 sm:px-6 sm:flex sm:flex-row-reverse">
<button
type="button"
class="w-full inline-flex justify-center rounded-md border border-transparent shadow-sm px-4 py-2 bg-indigo-600 text-base font-medium text-white hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:ml-3 sm:w-auto sm:text-sm disabled:opacity-50"
on:click={handleSubmit}
disabled={submitting}
>
{submitting ? 'Resuming...' : 'Resume Migration'}
</button>
<button
type="button"
class="mt-3 w-full inline-flex justify-center rounded-md border border-gray-300 shadow-sm px-4 py-2 bg-white text-base font-medium text-gray-700 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:mt-0 sm:ml-3 sm:w-auto sm:text-sm"
on:click={handleCancel}
disabled={submitting}
>
Cancel
</button>
</div>
</div>
</div>
</div>
{/if}
<!-- [/DEF:PasswordPrompt] -->

View File

@@ -0,0 +1,179 @@
<!-- [DEF:TaskHistory:Component] -->
<!--
@SEMANTICS: task, history, list, status, monitoring
@PURPOSE: Displays a list of recent tasks with their status and allows selecting them for viewing logs.
@LAYER: UI
@RELATION: USES -> frontend/src/lib/stores.js
@RELATION: USES -> frontend/src/lib/api.js (inferred)
-->
<script>
import { onMount, onDestroy } from 'svelte';
import { selectedTask } from '../lib/stores.js';
let tasks = [];
let loading = true;
let error = "";
let interval;
async function fetchTasks() {
try {
const res = await fetch('/api/tasks?limit=10');
if (!res.ok) throw new Error('Failed to fetch tasks');
tasks = await res.json();
// [DEBUG] Check for tasks requiring attention
tasks.forEach(t => {
if (t.status === 'AWAITING_MAPPING' || t.status === 'AWAITING_INPUT') {
console.log(`[TaskHistory] Task ${t.id} is in state ${t.status}. Input required: ${t.input_required}`);
}
});
// Update selected task if it exists in the list (for status updates)
if ($selectedTask) {
const updatedTask = tasks.find(t => t.id === $selectedTask.id);
if (updatedTask && updatedTask.status !== $selectedTask.status) {
selectedTask.set(updatedTask);
}
}
} catch (e) {
error = e.message;
} finally {
loading = false;
}
}
async function clearTasks(status = null) {
if (!confirm('Are you sure you want to clear tasks?')) return;
try {
let url = '/api/tasks';
const params = new URLSearchParams();
if (status) params.append('status', status);
const res = await fetch(`${url}?${params.toString()}`, { method: 'DELETE' });
if (!res.ok) throw new Error('Failed to clear tasks');
await fetchTasks();
} catch (e) {
error = e.message;
}
}
async function selectTask(task) {
try {
// Fetch the full task details (including logs) before setting it as selected
const res = await fetch(`/api/tasks/${task.id}`);
if (res.ok) {
const fullTask = await res.json();
selectedTask.set(fullTask);
} else {
// Fallback to the list version if fetch fails
selectedTask.set(task);
}
} catch (e) {
console.error("Failed to fetch full task details:", e);
selectedTask.set(task);
}
}
function getStatusColor(status) {
switch (status) {
case 'SUCCESS': return 'bg-green-100 text-green-800';
case 'FAILED': return 'bg-red-100 text-red-800';
case 'RUNNING': return 'bg-blue-100 text-blue-800';
case 'AWAITING_INPUT': return 'bg-orange-100 text-orange-800';
case 'AWAITING_MAPPING': return 'bg-yellow-100 text-yellow-800';
default: return 'bg-gray-100 text-gray-800';
}
}
onMount(() => {
fetchTasks();
interval = setInterval(fetchTasks, 5000); // Poll every 5s
});
onDestroy(() => {
clearInterval(interval);
});
</script>
<div class="bg-white shadow overflow-hidden sm:rounded-lg mb-8">
<div class="px-4 py-5 sm:px-6 flex justify-between items-center">
<h3 class="text-lg leading-6 font-medium text-gray-900">
Recent Tasks
</h3>
<div class="flex space-x-4 items-center">
<div class="relative inline-block text-left group">
<button class="text-sm text-red-600 hover:text-red-900 focus:outline-none flex items-center py-2">
Clear Tasks
<svg class="ml-1 h-4 w-4" fill="none" stroke="currentColor" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 9l-7 7-7-7"></path></svg>
</button>
<!-- Added a transparent bridge to prevent menu closing when moving cursor -->
<div class="absolute h-2 w-full top-full left-0"></div>
<div class="origin-top-right absolute right-0 mt-2 w-48 rounded-md shadow-lg bg-white ring-1 ring-black ring-opacity-5 focus:outline-none hidden group-hover:block z-50">
<div class="py-1">
<button on:click={() => clearTasks()} class="block w-full text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100">Clear All Non-Running</button>
<button on:click={() => clearTasks('FAILED')} class="block w-full text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100">Clear Failed</button>
<button on:click={() => clearTasks('AWAITING_INPUT')} class="block w-full text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100">Clear Awaiting Input</button>
</div>
</div>
</div>
<button
on:click={fetchTasks}
class="text-sm text-indigo-600 hover:text-indigo-900 focus:outline-none"
>
Refresh
</button>
</div>
</div>
{#if loading && tasks.length === 0}
<div class="p-4 text-center text-gray-500">Loading tasks...</div>
{:else if error}
<div class="p-4 text-center text-red-500">{error}</div>
{:else if tasks.length === 0}
<div class="p-4 text-center text-gray-500">No recent tasks found.</div>
{:else}
<ul class="divide-y divide-gray-200">
{#each tasks as task}
<li>
<button
class="w-full text-left block hover:bg-gray-50 focus:outline-none focus:bg-gray-50 transition duration-150 ease-in-out"
class:bg-indigo-50={$selectedTask && $selectedTask.id === task.id}
on:click={() => selectTask(task)}
>
<div class="px-4 py-4 sm:px-6">
<div class="flex items-center justify-between">
<p class="text-sm font-medium text-indigo-600 truncate">
{task.plugin_id}
<span class="text-gray-500 text-xs ml-2">({task.id.slice(0, 8)})</span>
</p>
<div class="ml-2 flex-shrink-0 flex">
<p class="px-2 inline-flex text-xs leading-5 font-semibold rounded-full {getStatusColor(task.status)}">
{task.status}
</p>
</div>
</div>
<div class="mt-2 sm:flex sm:justify-between">
<div class="sm:flex">
<p class="flex items-center text-sm text-gray-500">
{#if task.params.from_env && task.params.to_env}
{task.params.from_env} &rarr; {task.params.to_env}
{:else}
Params: {Object.keys(task.params).length} keys
{/if}
</p>
</div>
<div class="mt-2 flex items-center text-sm text-gray-500 sm:mt-0">
<p>
Started: {new Date(task.started_at || task.created_at || Date.now()).toLocaleString()}
</p>
</div>
</div>
</div>
</button>
</li>
{/each}
</ul>
{/if}
</div>
<!-- [/DEF:TaskHistory] -->

View File

@@ -0,0 +1,153 @@
<!-- [DEF:TaskLogViewer:Component] -->
<!--
@SEMANTICS: task, log, viewer, modal
@PURPOSE: Displays detailed logs for a specific task in a modal.
@LAYER: UI
@RELATION: USES -> frontend/src/lib/api.js (inferred)
-->
<script>
import { createEventDispatcher, onMount, onDestroy } from 'svelte';
import { getTaskLogs } from '../services/taskService.js';
export let show = false;
export let taskId = null;
export let taskStatus = null; // To know if we should poll
const dispatch = createEventDispatcher();
let logs = [];
let loading = false;
let error = "";
let interval;
let autoScroll = true;
let logContainer;
async function fetchLogs() {
if (!taskId) return;
try {
logs = await getTaskLogs(taskId);
if (autoScroll) {
scrollToBottom();
}
} catch (e) {
error = e.message;
} finally {
loading = false;
}
}
function scrollToBottom() {
if (logContainer) {
setTimeout(() => {
logContainer.scrollTop = logContainer.scrollHeight;
}, 0);
}
}
function handleScroll() {
if (!logContainer) return;
// If user scrolls up, disable auto-scroll
const { scrollTop, scrollHeight, clientHeight } = logContainer;
const atBottom = scrollHeight - scrollTop - clientHeight < 50;
autoScroll = atBottom;
}
function close() {
dispatch('close');
show = false;
}
function getLogLevelColor(level) {
switch (level) {
case 'INFO': return 'text-blue-600';
case 'WARNING': return 'text-yellow-600';
case 'ERROR': return 'text-red-600';
case 'DEBUG': return 'text-gray-500';
default: return 'text-gray-800';
}
}
// React to changes in show/taskId
$: if (show && taskId) {
logs = [];
loading = true;
error = "";
fetchLogs();
// Poll if task is running
if (taskStatus === 'RUNNING' || taskStatus === 'AWAITING_INPUT' || taskStatus === 'AWAITING_MAPPING') {
interval = setInterval(fetchLogs, 3000);
}
} else {
if (interval) clearInterval(interval);
}
onDestroy(() => {
if (interval) clearInterval(interval);
});
</script>
{#if show}
<div class="fixed inset-0 z-50 overflow-y-auto" aria-labelledby="modal-title" role="dialog" aria-modal="true">
<div class="flex items-end justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
<!-- Background overlay -->
<div class="fixed inset-0 bg-gray-500 bg-opacity-75 transition-opacity" aria-hidden="true" on:click={close}></div>
<span class="hidden sm:inline-block sm:align-middle sm:h-screen" aria-hidden="true">&#8203;</span>
<div class="inline-block align-bottom bg-white rounded-lg text-left overflow-hidden shadow-xl transform transition-all sm:my-8 sm:align-middle sm:max-w-4xl sm:w-full">
<div class="bg-white px-4 pt-5 pb-4 sm:p-6 sm:pb-4">
<div class="sm:flex sm:items-start">
<div class="mt-3 text-center sm:mt-0 sm:ml-4 sm:text-left w-full">
<h3 class="text-lg leading-6 font-medium text-gray-900 flex justify-between items-center" id="modal-title">
<span>Task Logs <span class="text-sm text-gray-500 font-normal">({taskId})</span></span>
<button on:click={fetchLogs} class="text-sm text-indigo-600 hover:text-indigo-900">Refresh</button>
</h3>
<div class="mt-4 border rounded-md bg-gray-50 p-4 h-96 overflow-y-auto font-mono text-sm"
bind:this={logContainer}
on:scroll={handleScroll}>
{#if loading && logs.length === 0}
<p class="text-gray-500 text-center">Loading logs...</p>
{:else if error}
<p class="text-red-500 text-center">{error}</p>
{:else if logs.length === 0}
<p class="text-gray-500 text-center">No logs available.</p>
{:else}
{#each logs as log}
<div class="mb-1 hover:bg-gray-100 p-1 rounded">
<span class="text-gray-400 text-xs mr-2">
{new Date(log.timestamp).toLocaleTimeString()}
</span>
<span class="font-bold text-xs mr-2 w-16 inline-block {getLogLevelColor(log.level)}">
[{log.level}]
</span>
<span class="text-gray-800 break-words">
{log.message}
</span>
{#if log.context}
<div class="ml-24 text-xs text-gray-500 mt-1 bg-gray-100 p-1 rounded overflow-x-auto">
<pre>{JSON.stringify(log.context, null, 2)}</pre>
</div>
{/if}
</div>
{/each}
{/if}
</div>
</div>
</div>
</div>
<div class="bg-gray-50 px-4 py-3 sm:px-6 sm:flex sm:flex-row-reverse">
<button
type="button"
class="mt-3 w-full inline-flex justify-center rounded-md border border-gray-300 shadow-sm px-4 py-2 bg-white text-base font-medium text-gray-700 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 sm:mt-0 sm:ml-3 sm:w-auto sm:text-sm"
on:click={close}
>
Close
</button>
</div>
</div>
</div>
</div>
{/if}
<!-- [/DEF:TaskLogViewer] -->

View File

@@ -15,6 +15,8 @@
import { selectedTask, taskLogs } from '../lib/stores.js'; import { selectedTask, taskLogs } from '../lib/stores.js';
import { getWsUrl } from '../lib/api.js'; import { getWsUrl } from '../lib/api.js';
import { addToast } from '../lib/toasts.js'; import { addToast } from '../lib/toasts.js';
import MissingMappingModal from './MissingMappingModal.svelte';
import PasswordPrompt from './PasswordPrompt.svelte';
// [/SECTION] // [/SECTION]
let ws; let ws;
@@ -25,7 +27,13 @@
let reconnectTimeout; let reconnectTimeout;
let waitingForData = false; let waitingForData = false;
let dataTimeout; let dataTimeout;
let connectionStatus = 'disconnected'; // 'connecting', 'connected', 'disconnected', 'waiting', 'completed' let connectionStatus = 'disconnected'; // 'connecting', 'connected', 'disconnected', 'waiting', 'completed', 'awaiting_mapping', 'awaiting_input'
let showMappingModal = false;
let missingDbInfo = { name: '', uuid: '' };
let targetDatabases = [];
let showPasswordPrompt = false;
let passwordPromptData = { databases: [], errorMessage: '' };
// [DEF:connect:Function] // [DEF:connect:Function]
/** /**
@@ -58,7 +66,43 @@
connectionStatus = 'completed'; connectionStatus = 'completed';
ws.close(); ws.close();
} }
// Check for missing mapping signal
if (logEntry.message && logEntry.message.includes('Missing mapping for database UUID')) {
const uuidMatch = logEntry.message.match(/UUID: ([\w-]+)/);
if (uuidMatch) {
missingDbInfo = { name: 'Unknown', uuid: uuidMatch[1] };
connectionStatus = 'awaiting_mapping';
fetchTargetDatabases();
showMappingModal = true;
}
}
// Check for password request via log context or message
// Note: The backend logs "Task paused for user input" with context
if (logEntry.message && logEntry.message.includes('Task paused for user input') && logEntry.context && logEntry.context.input_request) {
const request = logEntry.context.input_request;
if (request.type === 'database_password') {
connectionStatus = 'awaiting_input';
passwordPromptData = {
databases: request.databases || [],
errorMessage: request.error_message || ''
}; };
showPasswordPrompt = true;
}
}
};
// Check if task is already awaiting input (e.g. when re-selecting task)
// We use the 'task' variable from the outer scope (connect function)
if (task && task.status === 'AWAITING_INPUT' && task.input_request && task.input_request.type === 'database_password') {
connectionStatus = 'awaiting_input';
passwordPromptData = {
databases: task.input_request.databases || [],
errorMessage: task.input_request.error_message || ''
};
showPasswordPrompt = true;
}
ws.onerror = (error) => { ws.onerror = (error) => {
console.error('[TaskRunner][Coherence:Failed] WebSocket error:', error); console.error('[TaskRunner][Coherence:Failed] WebSocket error:', error);
@@ -85,6 +129,84 @@
} }
// [/DEF:connect] // [/DEF:connect]
async function fetchTargetDatabases() {
const task = get(selectedTask);
if (!task || !task.params.to_env) return;
try {
// We need to find the environment ID by name first
const envsRes = await fetch('/api/environments');
const envs = await envsRes.json();
const targetEnv = envs.find(e => e.name === task.params.to_env);
if (targetEnv) {
const res = await fetch(`/api/environments/${targetEnv.id}/databases`);
targetDatabases = await res.json();
}
} catch (e) {
console.error('Failed to fetch target databases', e);
}
}
async function handleMappingResolve(event) {
const task = get(selectedTask);
const { sourceDbUuid, targetDbUuid, targetDbName } = event.detail;
try {
// 1. Save mapping to backend
const envsRes = await fetch('/api/environments');
const envs = await envsRes.json();
const srcEnv = envs.find(e => e.name === task.params.from_env);
const tgtEnv = envs.find(e => e.name === task.params.to_env);
await fetch('/api/mappings', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
source_env_id: srcEnv.id,
target_env_id: tgtEnv.id,
source_db_uuid: sourceDbUuid,
target_db_uuid: targetDbUuid,
source_db_name: missingDbInfo.name,
target_db_name: targetDbName
})
});
// 2. Resolve task
await fetch(`/api/tasks/${task.id}/resolve`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
resolution_params: { resolved_mapping: { [sourceDbUuid]: targetDbUuid } }
})
});
connectionStatus = 'connected';
addToast('Mapping resolved, migration continuing...', 'success');
} catch (e) {
addToast('Failed to resolve mapping: ' + e.message, 'error');
}
}
async function handlePasswordResume(event) {
const task = get(selectedTask);
const { passwords } = event.detail;
try {
await fetch(`/api/tasks/${task.id}/resume`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ passwords })
});
showPasswordPrompt = false;
connectionStatus = 'connected';
addToast('Passwords submitted, resuming migration...', 'success');
} catch (e) {
addToast('Failed to resume task: ' + e.message, 'error');
}
}
function startDataTimeout() { function startDataTimeout() {
waitingForData = false; waitingForData = false;
dataTimeout = setTimeout(() => { dataTimeout = setTimeout(() => {
@@ -110,7 +232,15 @@
clearTimeout(reconnectTimeout); clearTimeout(reconnectTimeout);
reconnectAttempts = 0; reconnectAttempts = 0;
connectionStatus = 'disconnected'; connectionStatus = 'disconnected';
// Initialize logs from the task object if available
if (task.logs && Array.isArray(task.logs)) {
console.log(`[TaskRunner] Loaded ${task.logs.length} existing logs.`);
taskLogs.set(task.logs);
} else {
taskLogs.set([]); taskLogs.set([]);
}
connect(); connect();
} }
}); });
@@ -151,6 +281,12 @@
{:else if connectionStatus === 'completed'} {:else if connectionStatus === 'completed'}
<span class="h-3 w-3 rounded-full bg-blue-500"></span> <span class="h-3 w-3 rounded-full bg-blue-500"></span>
<span class="text-xs text-gray-500">Completed</span> <span class="text-xs text-gray-500">Completed</span>
{:else if connectionStatus === 'awaiting_mapping'}
<span class="h-3 w-3 rounded-full bg-orange-500 animate-pulse"></span>
<span class="text-xs text-gray-500">Awaiting Mapping</span>
{:else if connectionStatus === 'awaiting_input'}
<span class="h-3 w-3 rounded-full bg-orange-500 animate-pulse"></span>
<span class="text-xs text-gray-500">Awaiting Input</span>
{:else} {:else}
<span class="h-3 w-3 rounded-full bg-red-500"></span> <span class="h-3 w-3 rounded-full bg-red-500"></span>
<span class="text-xs text-gray-500">Disconnected</span> <span class="text-xs text-gray-500">Disconnected</span>
@@ -158,18 +294,46 @@
</div> </div>
</div> </div>
<div class="bg-gray-900 text-white font-mono text-sm p-4 rounded-md h-96 overflow-y-auto relative"> <!-- Task Info Section -->
<div class="mb-4 bg-gray-50 p-3 rounded text-sm border border-gray-200">
<details open>
<summary class="cursor-pointer font-medium text-gray-700 focus:outline-none hover:text-indigo-600">Task Details & Parameters</summary>
<div class="mt-2 pl-2 border-l-2 border-indigo-200">
<div class="grid grid-cols-2 gap-2 mb-2">
<div><span class="font-semibold">ID:</span> <span class="text-gray-600">{$selectedTask.id}</span></div>
<div><span class="font-semibold">Status:</span> <span class="text-gray-600">{$selectedTask.status}</span></div>
<div><span class="font-semibold">Started:</span> <span class="text-gray-600">{new Date($selectedTask.started_at || $selectedTask.created_at || Date.now()).toLocaleString()}</span></div>
<div><span class="font-semibold">Plugin:</span> <span class="text-gray-600">{$selectedTask.plugin_id}</span></div>
</div>
<div class="mt-1">
<span class="font-semibold">Parameters:</span>
<pre class="text-xs bg-gray-100 p-2 rounded mt-1 overflow-x-auto border border-gray-200">{JSON.stringify($selectedTask.params, null, 2)}</pre>
</div>
</div>
</details>
</div>
<div class="bg-gray-900 text-white font-mono text-sm p-4 rounded-md h-96 overflow-y-auto relative shadow-inner">
{#if $taskLogs.length === 0}
<div class="text-gray-500 italic text-center mt-10">No logs available for this task.</div>
{/if}
{#each $taskLogs as log} {#each $taskLogs as log}
<div> <div class="hover:bg-gray-800 px-1 rounded">
<span class="text-gray-400">{new Date(log.timestamp).toLocaleTimeString()}</span> <span class="text-gray-500 select-none text-xs w-20 inline-block">{new Date(log.timestamp).toLocaleTimeString()}</span>
<span class="{log.level === 'ERROR' ? 'text-red-500' : 'text-green-400'}">[{log.level}]</span> <span class="{log.level === 'ERROR' ? 'text-red-500 font-bold' : log.level === 'WARNING' ? 'text-yellow-400' : 'text-green-400'} w-16 inline-block">[{log.level}]</span>
<span>{log.message}</span> <span>{log.message}</span>
{#if log.context}
<details class="ml-24">
<summary class="text-xs text-gray-500 cursor-pointer hover:text-gray-300">Context</summary>
<pre class="text-xs text-gray-400 pl-2 border-l border-gray-700 mt-1">{JSON.stringify(log.context, null, 2)}</pre>
</details>
{/if}
</div> </div>
{/each} {/each}
{#if waitingForData} {#if waitingForData && connectionStatus === 'connected'}
<div class="text-gray-500 italic mt-2 animate-pulse"> <div class="text-gray-500 italic mt-2 animate-pulse border-t border-gray-800 pt-2">
Waiting for data... Waiting for new logs...
</div> </div>
{/if} {/if}
</div> </div>
@@ -177,6 +341,23 @@
<p>No task selected.</p> <p>No task selected.</p>
{/if} {/if}
</div> </div>
<MissingMappingModal
bind:show={showMappingModal}
sourceDbName={missingDbInfo.name}
sourceDbUuid={missingDbInfo.uuid}
{targetDatabases}
on:resolve={handleMappingResolve}
on:cancel={() => { connectionStatus = 'disconnected'; ws.close(); }}
/>
<PasswordPrompt
bind:show={showPasswordPrompt}
databases={passwordPromptData.databases}
errorMessage={passwordPromptData.errorMessage}
on:resume={handlePasswordResume}
on:cancel={() => { showPasswordPrompt = false; }}
/>
<!-- [/SECTION] --> <!-- [/SECTION] -->
<!-- [/DEF:TaskRunner] --> <!-- [/DEF:TaskRunner] -->

View File

@@ -21,7 +21,14 @@
environments: [], environments: [],
settings: { settings: {
backup_path: '', backup_path: '',
default_environment_id: null default_environment_id: null,
logging: {
level: 'INFO',
file_path: 'logs/app.log',
max_bytes: 10485760,
backup_count: 5,
enable_belief_state: true
}
} }
}; };
@@ -180,10 +187,43 @@
<label for="backup_path" class="block text-sm font-medium text-gray-700">Backup Storage Path</label> <label for="backup_path" class="block text-sm font-medium text-gray-700">Backup Storage Path</label>
<input type="text" id="backup_path" bind:value={settings.settings.backup_path} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" /> <input type="text" id="backup_path" bind:value={settings.settings.backup_path} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div> </div>
<button on:click={handleSaveGlobal} class="bg-blue-500 text-white px-4 py-2 rounded hover:bg-blue-600 w-max"> </div>
<h3 class="text-lg font-medium mb-4 mt-6">Logging Configuration</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div>
<label for="log_level" class="block text-sm font-medium text-gray-700">Log Level</label>
<select id="log_level" bind:value={settings.settings.logging.level} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div>
<label for="log_file_path" class="block text-sm font-medium text-gray-700">Log File Path</label>
<input type="text" id="log_file_path" bind:value={settings.settings.logging.file_path} placeholder="logs/app.log" class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="log_max_bytes" class="block text-sm font-medium text-gray-700">Max File Size (MB)</label>
<input type="number" id="log_max_bytes" bind:value={settings.settings.logging.max_bytes} min="1" step="1" class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="log_backup_count" class="block text-sm font-medium text-gray-700">Backup Count</label>
<input type="number" id="log_backup_count" bind:value={settings.settings.logging.backup_count} min="1" step="1" class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div class="md:col-span-2">
<label class="flex items-center">
<input type="checkbox" id="enable_belief_state" bind:checked={settings.settings.logging.enable_belief_state} class="h-4 w-4 text-blue-600 border-gray-300 rounded" />
<span class="ml-2 block text-sm text-gray-900">Enable Belief State Logging</span>
</label>
</div>
</div>
<button on:click={handleSaveGlobal} class="bg-blue-500 text-white px-4 py-2 rounded hover:bg-blue-600 w-max mt-4">
Save Global Settings Save Global Settings
</button> </button>
</div>
</section> </section>
<section class="mb-8 bg-white p-6 rounded shadow"> <section class="mb-8 bg-white p-6 rounded shadow">

View File

@@ -4,6 +4,7 @@
import DynamicForm from '../components/DynamicForm.svelte'; import DynamicForm from '../components/DynamicForm.svelte';
import { api } from '../lib/api.js'; import { api } from '../lib/api.js';
import { get } from 'svelte/store'; import { get } from 'svelte/store';
import { goto } from '$app/navigation';
/** @type {import('./$types').PageData} */ /** @type {import('./$types').PageData} */
export let data; export let data;
@@ -15,8 +16,12 @@
function selectPlugin(plugin) { function selectPlugin(plugin) {
console.log(`[Dashboard][Action] Selecting plugin: ${plugin.id}`); console.log(`[Dashboard][Action] Selecting plugin: ${plugin.id}`);
if (plugin.id === 'superset-migration') {
goto('/migration');
} else {
selectedPlugin.set(plugin); selectedPlugin.set(plugin);
} }
}
async function handleFormSubmit(event) { async function handleFormSubmit(event) {
console.log("[App.handleFormSubmit][Action] Handling form submission for task creation."); console.log("[App.handleFormSubmit][Action] Handling form submission for task creation.");

View File

@@ -0,0 +1,399 @@
<!-- [DEF:MigrationDashboard:Component] -->
<!--
@SEMANTICS: migration, dashboard, environment, selection, database-replacement
@PURPOSE: Main dashboard for configuring and starting migrations.
@LAYER: Page
@RELATION: USES -> EnvSelector
@INVARIANT: Migration cannot start without source and target environments.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { onMount } from 'svelte';
import EnvSelector from '../../components/EnvSelector.svelte';
import DashboardGrid from '../../components/DashboardGrid.svelte';
import MappingTable from '../../components/MappingTable.svelte';
import TaskRunner from '../../components/TaskRunner.svelte';
import TaskHistory from '../../components/TaskHistory.svelte';
import TaskLogViewer from '../../components/TaskLogViewer.svelte';
import PasswordPrompt from '../../components/PasswordPrompt.svelte';
import { selectedTask } from '../../lib/stores.js';
import { resumeTask } from '../../services/taskService.js';
import type { DashboardMetadata, DashboardSelection } from '../../types/dashboard';
// [/SECTION]
// [SECTION: STATE]
let environments: any[] = [];
let sourceEnvId = "";
let targetEnvId = "";
let replaceDb = false;
let loading = true;
let error = "";
let dashboards: DashboardMetadata[] = [];
let selectedDashboardIds: number[] = [];
let sourceDatabases: any[] = [];
let targetDatabases: any[] = [];
let mappings: any[] = [];
let suggestions: any[] = [];
let fetchingDbs = false;
// UI State for Modals
let showLogViewer = false;
let logViewerTaskId: string | null = null;
let logViewerTaskStatus: string | null = null;
let showPasswordPrompt = false;
let passwordPromptDatabases: string[] = [];
let passwordPromptErrorMessage = "";
// [/SECTION]
// [DEF:fetchEnvironments:Function]
/**
* @purpose Fetches the list of environments from the API.
* @post environments state is updated.
*/
async function fetchEnvironments() {
try {
const response = await fetch('/api/environments');
if (!response.ok) throw new Error('Failed to fetch environments');
environments = await response.json();
} catch (e) {
error = e.message;
} finally {
loading = false;
}
}
// [/DEF:fetchEnvironments]
// [DEF:fetchDashboards:Function]
/**
* @purpose Fetches dashboards for the selected source environment.
* @param envId The environment ID.
* @post dashboards state is updated.
*/
async function fetchDashboards(envId: string) {
try {
const response = await fetch(`/api/environments/${envId}/dashboards`);
if (!response.ok) throw new Error('Failed to fetch dashboards');
dashboards = await response.json();
selectedDashboardIds = []; // Reset selection when env changes
} catch (e) {
error = e.message;
dashboards = [];
}
}
// [/DEF:fetchDashboards]
onMount(fetchEnvironments);
// Reactive: fetch dashboards when source env changes
$: if (sourceEnvId) fetchDashboards(sourceEnvId);
// [DEF:fetchDatabases:Function]
/**
* @purpose Fetches databases from both environments and gets suggestions.
*/
async function fetchDatabases() {
if (!sourceEnvId || !targetEnvId) return;
fetchingDbs = true;
error = "";
try {
const [srcRes, tgtRes, mapRes, sugRes] = await Promise.all([
fetch(`/api/environments/${sourceEnvId}/databases`),
fetch(`/api/environments/${targetEnvId}/databases`),
fetch(`/api/mappings?source_env_id=${sourceEnvId}&target_env_id=${targetEnvId}`),
fetch(`/api/mappings/suggest`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ source_env_id: sourceEnvId, target_env_id: targetEnvId })
})
]);
if (!srcRes.ok || !tgtRes.ok) throw new Error('Failed to fetch databases from environments');
sourceDatabases = await srcRes.json();
targetDatabases = await tgtRes.json();
mappings = await mapRes.json();
suggestions = await sugRes.json();
} catch (e) {
error = e.message;
} finally {
fetchingDbs = false;
}
}
// [/DEF:fetchDatabases]
// [DEF:handleMappingUpdate:Function]
/**
* @purpose Saves a mapping to the backend.
*/
async function handleMappingUpdate(event: CustomEvent) {
const { sourceUuid, targetUuid } = event.detail;
const sDb = sourceDatabases.find(d => d.uuid === sourceUuid);
const tDb = targetDatabases.find(d => d.uuid === targetUuid);
if (!sDb || !tDb) return;
try {
const response = await fetch('/api/mappings', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
source_env_id: sourceEnvId,
target_env_id: targetEnvId,
source_db_uuid: sourceUuid,
target_db_uuid: targetUuid,
source_db_name: sDb.database_name,
target_db_name: tDb.database_name
})
});
if (!response.ok) throw new Error('Failed to save mapping');
const savedMapping = await response.json();
mappings = [...mappings.filter(m => m.source_db_uuid !== sourceUuid), savedMapping];
} catch (e) {
error = e.message;
}
}
// [/DEF:handleMappingUpdate]
// [DEF:handleViewLogs:Function]
function handleViewLogs(event: CustomEvent) {
const task = event.detail;
logViewerTaskId = task.id;
logViewerTaskStatus = task.status;
showLogViewer = true;
}
// [/DEF:handleViewLogs]
// [DEF:handlePasswordPrompt:Function]
// This is triggered by TaskRunner or TaskHistory when a task needs input
// For now, we rely on the WebSocket or manual check.
// Ideally, TaskHistory or TaskRunner emits an event when input is needed.
// Or we watch selectedTask.
$: if ($selectedTask && $selectedTask.status === 'AWAITING_INPUT' && $selectedTask.input_request) {
const req = $selectedTask.input_request;
if (req.type === 'database_password') {
passwordPromptDatabases = req.databases || [];
passwordPromptErrorMessage = req.error_message || "";
showPasswordPrompt = true;
}
} else if (!$selectedTask || $selectedTask.status !== 'AWAITING_INPUT') {
// Close prompt if task is no longer waiting (e.g. resumed)
// But only if we are viewing this task.
// showPasswordPrompt = false;
// Actually, don't auto-close, let the user or success handler close it.
}
async function handleResumeMigration(event: CustomEvent) {
if (!$selectedTask) return;
const { passwords } = event.detail;
try {
await resumeTask($selectedTask.id, passwords);
showPasswordPrompt = false;
// Task status update will be handled by store/websocket
} catch (e) {
console.error("Failed to resume task:", e);
passwordPromptErrorMessage = e.message;
// Keep prompt open
}
}
// [DEF:startMigration:Function]
/**
* @purpose Starts the migration process.
* @pre sourceEnvId and targetEnvId must be set and different.
*/
async function startMigration() {
if (!sourceEnvId || !targetEnvId) {
error = "Please select both source and target environments.";
return;
}
if (sourceEnvId === targetEnvId) {
error = "Source and target environments must be different.";
return;
}
if (selectedDashboardIds.length === 0) {
error = "Please select at least one dashboard to migrate.";
return;
}
error = "";
try {
const selection: DashboardSelection = {
selected_ids: selectedDashboardIds,
source_env_id: sourceEnvId,
target_env_id: targetEnvId,
replace_db_config: replaceDb
};
console.log(`[MigrationDashboard][Action] Starting migration with selection:`, selection);
const response = await fetch('/api/migration/execute', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(selection)
});
console.log(`[MigrationDashboard][Action] API response status: ${response.status}`);
if (!response.ok) throw new Error(`Failed to start migration: ${response.status} ${response.statusText}`);
const result = await response.json();
console.log(`[MigrationDashboard][Action] Migration started: ${result.task_id} - ${result.message}`);
// Wait a brief moment for the backend to ensure the task is retrievable
await new Promise(r => setTimeout(r, 500));
// Fetch full task details and switch to TaskRunner view
try {
const taskRes = await fetch(`/api/tasks/${result.task_id}`);
if (taskRes.ok) {
const task = await taskRes.json();
selectedTask.set(task);
} else {
// Fallback: create a temporary task object to switch view immediately
console.warn("Could not fetch task details immediately, using placeholder.");
selectedTask.set({
id: result.task_id,
plugin_id: 'superset-migration',
status: 'RUNNING',
logs: [],
params: {}
});
}
} catch (fetchErr) {
console.error("Failed to fetch new task details:", fetchErr);
}
} catch (e) {
console.error(`[MigrationDashboard][Failure] Migration failed:`, e);
error = e.message;
}
}
// [/DEF:startMigration]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="max-w-4xl mx-auto p-6">
<h1 class="text-2xl font-bold mb-6">Migration Dashboard</h1>
<TaskHistory on:viewLogs={handleViewLogs} />
{#if $selectedTask}
<div class="mt-6">
<TaskRunner />
<button
on:click={() => selectedTask.set(null)}
class="mt-4 inline-flex items-center px-4 py-2 border border-gray-300 shadow-sm text-sm font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500"
>
Back to New Migration
</button>
</div>
{:else}
{#if loading}
<p>Loading environments...</p>
{:else if error}
<div class="bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded mb-4">
{error}
</div>
{/if}
<div class="grid grid-cols-1 md:grid-cols-2 gap-6 mb-8">
<EnvSelector
label="Source Environment"
bind:selectedId={sourceEnvId}
{environments}
/>
<EnvSelector
label="Target Environment"
bind:selectedId={targetEnvId}
{environments}
/>
</div>
<!-- [DEF:DashboardSelectionSection] -->
<div class="mb-8">
<h2 class="text-lg font-medium mb-4">Select Dashboards</h2>
{#if sourceEnvId}
<DashboardGrid
{dashboards}
bind:selectedIds={selectedDashboardIds}
/>
{:else}
<p class="text-gray-500 italic">Select a source environment to view dashboards.</p>
{/if}
</div>
<!-- [/DEF:DashboardSelectionSection] -->
<div class="flex items-center mb-4">
<input
id="replace-db"
type="checkbox"
bind:checked={replaceDb}
on:change={() => { if (replaceDb && sourceDatabases.length === 0) fetchDatabases(); }}
class="h-4 w-4 text-indigo-600 focus:ring-indigo-500 border-gray-300 rounded"
/>
<label for="replace-db" class="ml-2 block text-sm text-gray-900">
Replace Database (Apply Mappings)
</label>
</div>
{#if replaceDb}
<div class="mb-8 p-4 border rounded-md bg-gray-50">
<h3 class="text-md font-medium mb-4">Database Mappings</h3>
{#if fetchingDbs}
<p>Loading databases and suggestions...</p>
{:else if sourceDatabases.length > 0}
<MappingTable
{sourceDatabases}
{targetDatabases}
{mappings}
{suggestions}
on:update={handleMappingUpdate}
/>
{:else if sourceEnvId && targetEnvId}
<button
on:click={fetchDatabases}
class="text-indigo-600 hover:text-indigo-500 text-sm font-medium"
>
Refresh Databases & Suggestions
</button>
{/if}
</div>
{/if}
<button
on:click={startMigration}
disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId || selectedDashboardIds.length === 0}
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 disabled:bg-gray-400"
>
Start Migration
</button>
{/if}
</div>
<!-- Modals -->
<TaskLogViewer
bind:show={showLogViewer}
taskId={logViewerTaskId}
taskStatus={logViewerTaskStatus}
on:close={() => showLogViewer = false}
/>
<PasswordPrompt
bind:show={showPasswordPrompt}
databases={passwordPromptDatabases}
errorMessage={passwordPromptErrorMessage}
on:resume={handleResumeMigration}
on:cancel={() => showPasswordPrompt = false}
/>
<!-- [/SECTION] -->
<style>
/* Page specific styles */
</style>
<!-- [/DEF:MigrationDashboard] -->

View File

@@ -0,0 +1,183 @@
<!-- [DEF:MappingManagement:Component] -->
<!--
@SEMANTICS: mapping, management, database, fuzzy-matching
@PURPOSE: Page for managing database mappings between environments.
@LAYER: Page
@RELATION: USES -> EnvSelector
@RELATION: USES -> MappingTable
@INVARIANT: Mappings are saved to the backend for persistence.
-->
<script lang="ts">
// [SECTION: IMPORTS]
import { onMount } from 'svelte';
import EnvSelector from '../../../components/EnvSelector.svelte';
import MappingTable from '../../../components/MappingTable.svelte';
// [/SECTION]
// [SECTION: STATE]
let environments = [];
let sourceEnvId = "";
let targetEnvId = "";
let sourceDatabases = [];
let targetDatabases = [];
let mappings = [];
let suggestions = [];
let loading = true;
let fetchingDbs = false;
let error = "";
let success = "";
// [/SECTION]
async function fetchEnvironments() {
try {
const response = await fetch('/api/environments');
if (!response.ok) throw new Error('Failed to fetch environments');
environments = await response.json();
} catch (e) {
error = e.message;
} finally {
loading = false;
}
}
onMount(fetchEnvironments);
// [DEF:fetchDatabases:Function]
/**
* @purpose Fetches databases from both environments and gets suggestions.
*/
async function fetchDatabases() {
if (!sourceEnvId || !targetEnvId) return;
fetchingDbs = true;
error = "";
success = "";
try {
const [srcRes, tgtRes, mapRes, sugRes] = await Promise.all([
fetch(`/api/environments/${sourceEnvId}/databases`),
fetch(`/api/environments/${targetEnvId}/databases`),
fetch(`/api/mappings?source_env_id=${sourceEnvId}&target_env_id=${targetEnvId}`),
fetch(`/api/mappings/suggest`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ source_env_id: sourceEnvId, target_env_id: targetEnvId })
})
]);
if (!srcRes.ok || !tgtRes.ok) throw new Error('Failed to fetch databases from environments');
sourceDatabases = await srcRes.json();
targetDatabases = await tgtRes.json();
mappings = await mapRes.json();
suggestions = await sugRes.json();
} catch (e) {
error = e.message;
} finally {
fetchingDbs = false;
}
}
// [/DEF:fetchDatabases]
// [DEF:handleUpdate:Function]
/**
* @purpose Saves a mapping to the backend.
*/
async function handleUpdate(event: CustomEvent) {
const { sourceUuid, targetUuid } = event.detail;
const sDb = sourceDatabases.find(d => d.uuid === sourceUuid);
const tDb = targetDatabases.find(d => d.uuid === targetUuid);
if (!sDb || !tDb) return;
try {
const response = await fetch('/api/mappings', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
source_env_id: sourceEnvId,
target_env_id: targetEnvId,
source_db_uuid: sourceUuid,
target_db_uuid: targetUuid,
source_db_name: sDb.database_name,
target_db_name: tDb.database_name
})
});
if (!response.ok) throw new Error('Failed to save mapping');
const savedMapping = await response.json();
mappings = [...mappings.filter(m => m.source_db_uuid !== sourceUuid), savedMapping];
success = "Mapping saved successfully";
} catch (e) {
error = e.message;
}
}
// [/DEF:handleUpdate]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="max-w-6xl mx-auto p-6">
<h1 class="text-2xl font-bold mb-6">Database Mapping Management</h1>
{#if loading}
<p>Loading environments...</p>
{:else}
<div class="grid grid-cols-1 md:grid-cols-2 gap-6 mb-8">
<EnvSelector
label="Source Environment"
bind:selectedId={sourceEnvId}
{environments}
on:change={() => { sourceDatabases = []; mappings = []; suggestions = []; }}
/>
<EnvSelector
label="Target Environment"
bind:selectedId={targetEnvId}
{environments}
on:change={() => { targetDatabases = []; mappings = []; suggestions = []; }}
/>
</div>
<div class="mb-8">
<button
on:click={fetchDatabases}
disabled={!sourceEnvId || !targetEnvId || sourceEnvId === targetEnvId || fetchingDbs}
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-md shadow-sm text-white bg-indigo-600 hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 disabled:bg-gray-400"
>
{fetchingDbs ? 'Fetching...' : 'Fetch Databases & Suggestions'}
</button>
</div>
{#if error}
<div class="bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded mb-4">
{error}
</div>
{/if}
{#if success}
<div class="bg-green-100 border border-green-400 text-green-700 px-4 py-3 rounded mb-4">
{success}
</div>
{/if}
{#if sourceDatabases.length > 0}
<MappingTable
{sourceDatabases}
{targetDatabases}
{mappings}
{suggestions}
on:update={handleUpdate}
/>
{:else if !fetchingDbs && sourceEnvId && targetEnvId}
<p class="text-gray-500 italic">Select environments and click "Fetch Databases" to start mapping.</p>
{/if}
{/if}
</div>
<!-- [/SECTION] -->
<style>
/* Page specific styles */
</style>
<!-- [/DEF:MappingManagement] -->

View File

@@ -0,0 +1,120 @@
/**
* Service for interacting with the Task Management API.
*/
const API_BASE = '/api/tasks';
/**
* Fetch a list of tasks with pagination and optional status filter.
* @param {number} limit - Maximum number of tasks to return.
* @param {number} offset - Number of tasks to skip.
* @param {string|null} status - Filter by task status (optional).
* @returns {Promise<Array>} List of tasks.
*/
export async function getTasks(limit = 10, offset = 0, status = null) {
const params = new URLSearchParams({
limit: limit.toString(),
offset: offset.toString()
});
if (status) {
params.append('status', status);
}
const response = await fetch(`${API_BASE}?${params.toString()}`);
if (!response.ok) {
throw new Error(`Failed to fetch tasks: ${response.statusText}`);
}
return await response.json();
}
/**
* Fetch details for a specific task.
* @param {string} taskId - The ID of the task.
* @returns {Promise<Object>} Task details.
*/
export async function getTask(taskId) {
const response = await fetch(`${API_BASE}/${taskId}`);
if (!response.ok) {
throw new Error(`Failed to fetch task ${taskId}: ${response.statusText}`);
}
return await response.json();
}
/**
* Fetch logs for a specific task.
* @param {string} taskId - The ID of the task.
* @returns {Promise<Array>} List of log entries.
*/
export async function getTaskLogs(taskId) {
// Currently, logs are included in the task object, but we might have a separate endpoint later.
// For now, we fetch the task and return its logs.
// Or if we implement T017 (GET /api/tasks/{task_id}/logs), we would use that.
// The current backend implementation in tasks.py does NOT have a separate /logs endpoint yet.
// T017 is in Phase 3.
// So for now, we'll fetch the task.
const task = await getTask(taskId);
return task.logs || [];
}
/**
* Resume a task that is awaiting input (e.g., passwords).
* @param {string} taskId - The ID of the task.
* @param {Object} passwords - Map of database names to passwords.
* @returns {Promise<Object>} Updated task object.
*/
export async function resumeTask(taskId, passwords) {
const response = await fetch(`${API_BASE}/${taskId}/resume`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ passwords })
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || `Failed to resume task: ${response.statusText}`);
}
return await response.json();
}
/**
* Resolve a task that is awaiting mapping.
* @param {string} taskId - The ID of the task.
* @param {Object} resolutionParams - Resolution parameters.
* @returns {Promise<Object>} Updated task object.
*/
export async function resolveTask(taskId, resolutionParams) {
const response = await fetch(`${API_BASE}/${taskId}/resolve`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ resolution_params: resolutionParams })
});
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || `Failed to resolve task: ${response.statusText}`);
}
return await response.json();
}
/**
* Clear tasks based on status.
* @param {string|null} status - Filter by task status (optional).
*/
export async function clearTasks(status = null) {
const params = new URLSearchParams();
if (status) {
params.append('status', status);
}
const response = await fetch(`${API_BASE}?${params.toString()}`, {
method: 'DELETE'
});
if (!response.ok) {
throw new Error(`Failed to clear tasks: ${response.statusText}`);
}
}

View File

@@ -0,0 +1,13 @@
export interface DashboardMetadata {
id: number;
title: string;
last_modified: string;
status: string;
}
export interface DashboardSelection {
selected_ids: number[];
source_env_id: string;
target_env_id: string;
replace_db_config?: boolean;
}

298
logs/лог.md Executable file
View File

@@ -0,0 +1,298 @@
PS H:\dev\ss-tools> & C:/ProgramData/anaconda3/python.exe h:/dev/ss-tools/migration_script.py
2025-12-16 11:50:28,192 - INFO - [run][Entry] Запуск скрипта миграции.
=== Поведение при ошибке импорта ===
Если импорт завершится ошибкой, удалить существующий дашборд и попытаться импортировать заново? (y/n): n
2025-12-16 11:50:33,363 - INFO - [ask_delete_on_failure][State] Delete-on-failure = False
2025-12-16 11:50:33,368 - INFO - [select_environments][Entry] Шаг 1/5: Выбор окружений.
2025-12-16 11:50:33,374 - INFO - [setup_clients][Enter] Starting Superset clients initialization.
2025-12-16 11:50:33,730 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,734 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,739 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,742 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,746 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,750 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,754 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,758 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,761 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,764 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,769 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,772 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,776 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,779 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,782 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,786 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,790 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,794 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,799 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,805 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,808 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,811 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,815 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,820 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,823 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,827 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,831 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,834 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,838 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,840 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,847 - INFO - [setup_clients][Exit] All clients (dev, prod, sbx, preprod, uatta, dev5) initialized successfully.
=== Выбор окружения ===
Исходное окружение:
1) dev
2) prod
3) sbx
4) preprod
5) uatta
6) dev5
Введите номер (0 отмена): 4
2025-12-16 11:50:42,379 - INFO - [select_environments][State] from = preprod
=== Выбор окружения ===
Целевое окружение:
1) dev
2) prod
3) sbx
4) uatta
5) dev5
Введите номер (0 отмена): 5
2025-12-16 11:50:45,176 - INFO - [select_environments][State] to = dev5
2025-12-16 11:50:45,182 - INFO - [select_environments][Exit] Шаг 1 завершён.
2025-12-16 11:50:45,186 - INFO - [select_dashboards][Entry] Шаг 2/5: Выбор дашбордов.
2025-12-16 11:50:45,190 - INFO - [get_dashboards][Enter] Fetching dashboards.
2025-12-16 11:50:45,197 - INFO - [authenticate][Enter] Authenticating to https://preprodta.bi.dwh.rusal.com/api/v1
2025-12-16 11:50:45,880 - INFO - [authenticate][Exit] Authenticated successfully.
2025-12-16 11:50:46,025 - INFO - [get_dashboards][Exit] Found 95 dashboards.
=== Поиск ===
Введите регулярное выражение для поиска дашбордов:
fi
=== Выбор дашбордов ===
Отметьте нужные дашборды (введите номера):
1) [ALL] Все дашборды
2) [185] FI-0060 Финансы. Налоги. Данные по налогам. Старый
3) [184] FI-0083 Статистика по ДЗ/ПДЗ
4) [187] FI-0081 ПДЗ Казначейство
5) [122] FI-0080 Финансы. Оборотный Капитал ДЗ/КЗ
6) [208] FI-0020 Просроченная дебиторская и кредиторская задолженность в динамике
7) [126] FI-0022 Кредиторская задолженность для казначейства
8) [196] FI-0023 Дебиторская задолженность для казначейства
9) [113] FI-0060 Финансы. Налоги. Данные по налогам.
10) [173] FI-0040 Оборотно-сальдовая ведомость (ОСВ) по контрагентам
11) [174] FI-0021 Дебиторская и кредиторская задолженность по документам
12) [172] FI-0030 Дебиторская задолженность по штрафам
13) [170] FI-0050 Налог на прибыль (ОНА и ОНО)
14) [159] FI-0070 Досье контрагента
Введите номера через запятую (пустой ввод → отказ): 2
2025-12-16 11:50:52,235 - INFO - [select_dashboards][State] Выбрано 1 дашбордов.
2025-12-16 11:50:52,242 - INFO - [select_dashboards][Exit] Шаг 2 завершён.
=== Замена БД ===
Заменить конфигурацию БД в YAMLфайлах? (y/n): y
2025-12-16 11:50:53,808 - INFO - [_select_databases][Entry] Selecting databases from both environments.
2025-12-16 11:50:53,816 - INFO - [get_databases][Enter] Fetching databases.
2025-12-16 11:50:53,918 - INFO - [get_databases][Exit] Found 12 databases.
2025-12-16 11:50:53,923 - INFO - [get_databases][Enter] Fetching databases.
2025-12-16 11:50:53,926 - INFO - [authenticate][Enter] Authenticating to https://dev.bi.dwh.rusal.com/api/v1
2025-12-16 11:50:54,450 - INFO - [authenticate][Exit] Authenticated successfully.
2025-12-16 11:50:54,551 - INFO - [get_databases][Exit] Found 4 databases.
=== Выбор исходной БД ===
Выберите исходную БД:
1) DEV datalab (ID: 9)
2) Prod Greenplum (ID: 7)
3) DEV Clickhouse New (OLD) (ID: 16)
4) Preprod Clickhouse New (ID: 15)
5) DEV Greenplum (ID: 1)
6) Prod Clickhouse Node 1 (ID: 11)
7) Preprod Postgre Superset Internal (ID: 5)
8) Prod Postgre Superset Internal (ID: 28)
9) Prod Clickhouse (ID: 10)
10) Dev Clickhouse (correct) (ID: 14)
11) DEV ClickHouse New (ID: 23)
12) Sandbox Postgre Superset Internal (ID: 12)
Введите номер (0 отмена): 9
2025-12-16 11:51:11,008 - INFO - [get_database][Enter] Fetching database 10.
2025-12-16 11:51:11,038 - INFO - [get_database][Exit] Got database 10.
=== Выбор целевой БД ===
Выберите целевую БД:
1) DEV Greenplum (ID: 2)
2) DEV Clickhouse (ID: 3)
3) DEV ClickHouse New (ID: 4)
4) Dev Postgre Superset Internal (ID: 1)
Введите номер (0 отмена): 2
2025-12-16 11:51:15,559 - INFO - [get_database][Enter] Fetching database 3.
2025-12-16 11:51:15,586 - INFO - [get_database][Exit] Got database 3.
2025-12-16 11:51:15,589 - INFO - [_select_databases][Exit] Selected databases: Без имени -> Без имени
old_db: {'id': 10, 'result': {'allow_ctas': False, 'allow_cvas': False, 'allow_dml': True, 'allow_file_upload': False, 'allow_run_async': False, 'backen
d': 'clickhousedb', 'cache_timeout': None, 'configuration_method': 'sqlalchemy_form', 'database_name': 'Prod Clickhouse', 'driver': 'connect', 'engine_i
nformation': {'disable_ssh_tunneling': False, 'supports_file_upload': False}, 'expose_in_sqllab': True, 'force_ctas_schema': None, 'id': 10, 'impersonat
e_user': False, 'is_managed_externally': False, 'uuid': '97aced68-326a-4094-b381-27980560efa9'}}
2025-12-16 11:51:15,591 - INFO - [confirm_db_config_replacement][State] Replacement set: {'old': {'database_name': None, 'uuid': None, 'id': '10'}, 'new
': {'database_name': None, 'uuid': None, 'id': '3'}}
2025-12-16 11:51:15,594 - INFO - [execute_migration][Entry] Starting migration of 1 dashboards.
=== Миграция... ===
Миграция: FI-0060 Финансы. Налоги. Данные по налогам. Старый (1/1) 0%2025-12-16 11:51:15,598 - INFO - [export_dashboard][Enter] Exporting dashboard 185.
2025-12-16 11:51:16,142 - INFO - [export_dashboard][Exit] Exported dashboard 185 to dashboard_export_20251216T085115.zip.
2025-12-16 11:51:16,205 - INFO - [update_yamls][Enter] Starting YAML configuration update.
2025-12-16 11:51:16,208 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\metadata.yaml
2025-12-16 11:51:16,209 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-01_2787.yaml
2025-12-16 11:51:16,210 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_2_4030.yaml
2025-12-16 11:51:16,212 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_4029.yaml
2025-12-16 11:51:16,213 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL2_4036.yaml
2025-12-16 11:51:16,215 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL2_4037.yaml
2025-12-16 11:51:16,216 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL_4028.yaml
2025-12-16 11:51:16,217 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_ZNODE_ROOT2_4024.yaml
2025-12-16 11:51:16,218 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_ZNODE_ROOT_4033.yaml
2025-12-16 11:51:16,220 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND-BD2_4021.yaml
2025-12-16 11:51:16,221 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND_4027.yaml
2025-12-16 11:51:16,222 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND_4034.yaml
2025-12-16 11:51:16,224 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02_ZTAX_4022.yaml
2025-12-16 11:51:16,226 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02_ZTAX_4035.yaml
2025-12-16 11:51:16,227 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-04-2_4031.yaml
2025-12-16 11:51:16,228 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-05-01_4026.yaml
2025-12-16 11:51:16,230 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-05-01_4032.yaml
2025-12-16 11:51:16,231 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-06_1_4023.yaml
2025-12-16 11:51:16,233 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-06_2_4020.yaml
2025-12-16 11:51:16,234 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060_4025.yaml
2025-12-16 11:51:16,236 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\dashboards\FI-0060_185.yaml
2025-12-16 11:51:16,238 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\databases\Prod_Clickhouse_10.yaml
2025-12-16 11:51:16,240 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0000_-_685.yaml
2025-12-16 11:51:16,241 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-01-2_zfund_reciever_-_861.yaml
2025-12-16 11:51:16,242 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-01_zfund_reciever_click_689.yaml
2025-12-16 11:51:16,244 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-02_680.yaml
2025-12-16 11:51:16,245 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-03_ztax_862.yaml
2025-12-16 11:51:16,246 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-04_zpbe_681.yaml
2025-12-16 11:51:16,247 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-05_ZTAXZFUND_679.yaml
2025-12-16 11:51:16,249 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-06_860.yaml
2025-12-16 11:51:16,250 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-08_682.yaml
2025-12-16 11:51:16,251 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-10_zpbe_688.yaml
2025-12-16 11:51:16,253 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-11_ZTAX_NAME_863.yaml
2025-12-16 11:51:16,254 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_683.yaml
2025-12-16 11:51:16,255 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_684.yaml
2025-12-16 11:51:16,256 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_686.yaml
2025-12-16 11:51:16,258 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_690.yaml
2025-12-16 11:51:16,259 - INFO - [create_dashboard_export][Enter] Packing dashboard: ['C:\\Users\\LO54FB~1\\Temp\\tmpuidfegpd.dir'] -> C:\Users\LO54FB~1
\Temp\tmps7cuv2ti.zip
2025-12-16 11:51:16,347 - INFO - [create_dashboard_export][Exit] Archive created: C:\Users\LO54FB~1\Temp\tmps7cuv2ti.zip
2025-12-16 11:51:16,372 - ERROR - [import_dashboard][Failure] First import attempt failed: [API_FAILURE] API error during upload: {"errors": [{"message"
: "Expecting value: line 1 column 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "messag
e": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448
\u0438\u0431\u043a\u0430."}]}}]} | Context: {'type': 'api_call'}
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 186, in _perform_upload
response.raise_for_status()
File "C:\ProgramData\anaconda3\Lib\site-packages\requests\models.py", line 1021, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: https://dev.bi.dwh.rusal.com/api/v1/dashboard/import/
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\client.py", line 141, in import_dashboard
return self._do_import(file_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\client.py", line 197, in _do_import
return self.network.upload_file(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 172, in upload_file
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 196, in _perform_upload
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
superset_tool.exceptions.SupersetAPIError: [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "
error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437
\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'ty
pe': 'api_call'}
2025-12-16 11:51:16,511 - ERROR - [execute_migration][Failure] [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 c
olumn 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u04
40\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]
}}]} | Context: {'type': 'api_call'}
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 186, in _perform_upload
response.raise_for_status()
File "C:\ProgramData\anaconda3\Lib\site-packages\requests\models.py", line 1021, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: https://dev.bi.dwh.rusal.com/api/v1/dashboard/import/
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "h:\dev\ss-tools\migration_script.py", line 366, in execute_migration
self.to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
File "h:\dev\ss-tools\superset_tool\client.py", line 141, in import_dashboard
return self._do_import(file_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\client.py", line 197, in _do_import
return self.network.upload_file(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 172, in upload_file
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 196, in _perform_upload
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
superset_tool.exceptions.SupersetAPIError: [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "
error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437
\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'ty
pe': 'api_call'}
=== Ошибка ===
Не удалось мигрировать дашборд FI-0060 Финансы. Налоги. Данные по налогам. Старый.
[API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "leve
l": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438
\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'type': 'api_call'}
100%
2025-12-16 11:51:16,598 - INFO - [execute_migration][Exit] Migration finished.
=== Информация ===
Миграция завершена!
2025-12-16 11:51:16,605 - INFO - [run][Exit] Скрипт миграции завершён.

BIN
mappings.db Normal file

Binary file not shown.

View File

@@ -11,8 +11,9 @@ This protocol standardizes the "Semantic Bridge" between the two languages using
## I. CORE REQUIREMENTS ## I. CORE REQUIREMENTS
1. **Causal Validity:** Semantic definitions (Contracts) must ALWAYS precede implementation code. 1. **Causal Validity:** Semantic definitions (Contracts) must ALWAYS precede implementation code.
2. **Immutability:** Architectural decisions defined in the Module/Component Header are treated as immutable constraints. 2. **Immutability:** Architectural decisions defined in the Module/Component Header are treated as immutable constraints.
3. **Format Compliance:** Output must strictly follow the `[DEF]` / `[/DEF]` anchor syntax for structure. 3. **Format Compliance:** Output must strictly follow the `[DEF:..:...]` / `[/DEF:...:...]` anchor syntax for structure.
4. **Logic over Assertion:** Contracts define the *logic flow*. Do not generate explicit `assert` statements unless requested. The code logic itself must inherently satisfy the Pre/Post conditions (e.g., via control flow, guards, or types). 4. **Logic over Assertion:** Contracts define the *logic flow*. Do not generate explicit `assert` statements unless requested. The code logic itself must inherently satisfy the Pre/Post conditions (e.g., via control flow, guards, or types).
5. **Fractal Complexity:** Modules and functions must adhere to strict size limits (~300 lines/module, ~30-50 lines/function) to maintain semantic focus.
--- ---
@@ -25,13 +26,13 @@ Used to define the boundaries of Modules, Classes, Components, and Functions.
* **Python:** * **Python:**
* Start: `# [DEF:identifier:Type]` * Start: `# [DEF:identifier:Type]`
* End: `# [/DEF:identifier]` * End: `# [/DEF:identifier:Type]`
* **Svelte (Top-level):** * **Svelte (Top-level):**
* Start: `<!-- [DEF:ComponentName:Component] -->` * Start: `<!-- [DEF:ComponentName:Component] -->`
* End: `<!-- [/DEF:ComponentName] -->` * End: `<!-- [/DEF:ComponentName:Component] -->`
* **Svelte (Script/JS/TS):** * **Svelte (Script/JS/TS):**
* Start: `// [DEF:funcName:Function]` * Start: `// [DEF:funcName:Function]`
* End: `// [/DEF:funcName]` * End: `// [/DEF:funcName:Function]`
**Types:** `Module`, `Component`, `Class`, `Function`, `Store`, `Action`. **Types:** `Module`, `Component`, `Class`, `Function`, `Store`, `Action`.
@@ -63,7 +64,7 @@ Defines high-level dependencies.
# ... IMPLEMENTATION ... # ... IMPLEMENTATION ...
# [/DEF:module_name] # [/DEF:module_name:Module]
``` ```
### 2. Svelte Component Header (`.svelte`) ### 2. Svelte Component Header (`.svelte`)
@@ -81,20 +82,20 @@ Defines high-level dependencies.
<script lang="ts"> <script lang="ts">
// [SECTION: IMPORTS] // [SECTION: IMPORTS]
// ... // ...
// [/SECTION] // [/SECTION: IMPORTS]
// ... LOGIC IMPLEMENTATION ... // ... LOGIC IMPLEMENTATION ...
</script> </script>
<!-- [SECTION: TEMPLATE] --> <!-- [SECTION: TEMPLATE] -->
... ...
<!-- [/SECTION] --> <!-- [/SECTION: TEMPLATE] -->
<style> <style>
/* ... */ /* ... */
</style> </style>
<!-- [/DEF:ComponentName] --> <!-- [/DEF:ComponentName:Component] -->
``` ```
--- ---
@@ -122,7 +123,7 @@ def calculate_total(items: List[Item]) -> Decimal:
# Logic ensuring @POST # Logic ensuring @POST
return total return total
# [/DEF:calculate_total] # [/DEF:calculate_total:Function]
``` ```
### 2. Svelte/JS Contract Style (JSDoc) ### 2. Svelte/JS Contract Style (JSDoc)
@@ -145,7 +146,7 @@ async function updateUserProfile(profileData) {
// ... // ...
} }
// [/DEF:updateUserProfile] // [/DEF:updateUserProfile:Function]
``` ```
--- ---
@@ -154,21 +155,32 @@ async function updateUserProfile(profileData) {
Logs delineate the agent's internal state. Logs delineate the agent's internal state.
* **Python:** `logger.info(f"[{ANCHOR_ID}][{STATE}] Msg")` * **Python:** MUST use a Context Manager (e.g., `with belief_scope("ANCHOR_ID"):`) to ensure state consistency and automatic handling of Entry/Exit/Error states.
* Manual logging (inside scope): `logger.info(f"[{ANCHOR_ID}][{STATE}] Msg")`
* **Svelte/JS:** `console.log(\`[${ANCHOR_ID}][${STATE}] Msg\`)` * **Svelte/JS:** `console.log(\`[${ANCHOR_ID}][${STATE}] Msg\`)`
**Required States:** **Required States:**
1. `Entry` (Start of block) 1. `Entry` (Start of block - Auto-logged by Context Manager)
2. `Action` (Key business logic) 2. `Action` (Key business logic - Manual log)
3. `Coherence:OK` (Logic successfully completed) 3. `Coherence:OK` (Logic successfully completed - Auto-logged by Context Manager)
4. `Coherence:Failed` (Error handling) 4. `Coherence:Failed` (Exception/Error - Auto-logged by Context Manager)
5. `Exit` (End of block) 5. `Exit` (End of block - Auto-logged by Context Manager)
--- ---
## VI. GENERATION WORKFLOW ## VI. FRACTAL COMPLEXITY LIMIT
To maintain semantic coherence and avoid "Attention Sink" issues:
* **Module Size:** If a Module body exceeds ~300 lines (or logical complexity), it MUST be refactored into sub-modules or a package structure.
* **Function Size:** Functions should fit within a standard attention "chunk" (approx. 30-50 lines). If larger, logic MUST be decomposed into helper functions with their own contracts.
This ensures every vector embedding remains sharp and focused.
---
## VII. GENERATION WORKFLOW
1. **Context Analysis:** Identify language (Python vs Svelte) and Architecture Layer. 1. **Context Analysis:** Identify language (Python vs Svelte) and Architecture Layer.
2. **Scaffolding:** Generate the `[DEF]` Anchors and Header/Contract **before** writing any logic. 2. **Scaffolding:** Generate the `[DEF:...:...]` Anchors and Header/Contract **before** writing any logic.
3. **Implementation:** Write the code. Ensure the code logic handles the `@PRE` conditions (e.g., via `if/return` or guards) and satisfies `@POST` conditions naturally. **Do not write explicit `assert` statements unless debugging mode is requested.** 3. **Implementation:** Write the code. Ensure the code logic handles the `@PRE` conditions (e.g., via `if/return` or guards) and satisfies `@POST` conditions naturally. **Do not write explicit `assert` statements unless debugging mode is requested.**
4. **Closure:** Ensure every `[DEF]` is closed with `[/DEF]` to accumulate semantic context. 4. **Closure:** Ensure every `[DEF:...:...]` is closed with `[/DEF:...:...]` to accumulate semantic context.

View File

@@ -0,0 +1,34 @@
# Specification Quality Checklist: Migration Process and UI Redesign
**Purpose**: Validate specification completeness and quality before proceeding to planning
**Created**: 2025-12-20
**Feature**: [specs/001-migration-ui-redesign/spec.md](specs/001-migration-ui-redesign/spec.md)
## Content Quality
- [x] No implementation details (languages, frameworks, APIs)
- [x] Focused on user value and business needs
- [x] Written for non-technical stakeholders
- [x] All mandatory sections completed
## Requirement Completeness
- [x] No [NEEDS CLARIFICATION] markers remain
- [x] Requirements are testable and unambiguous
- [x] Success criteria are measurable
- [x] Success criteria are technology-agnostic (no implementation details)
- [x] All acceptance scenarios are defined
- [x] Edge cases are identified
- [x] Scope is clearly bounded
- [x] Dependencies and assumptions identified
## Feature Readiness
- [x] All functional requirements have clear acceptance criteria
- [x] User scenarios cover primary flows
- [x] Feature meets measurable outcomes defined in Success Criteria
- [x] No implementation details leak into specification
## Notes
- Items marked incomplete require spec updates before `/speckit.clarify` or `/speckit.plan`

View File

@@ -0,0 +1,115 @@
# API Contracts: Migration Process and UI Redesign
## Environment Management
### GET /api/environments
List all configured environments.
**Response (200 OK)**:
```json
[
{
"id": "uuid",
"name": "Development",
"url": "https://superset-dev.example.com"
}
]
```
### GET /api/environments/{id}/databases
Fetch the list of databases from a specific environment.
**Response (200 OK)**:
```json
[
{
"uuid": "db-uuid",
"database_name": "Dev Clickhouse",
"engine": "clickhouse"
}
]
```
## Database Mapping
### GET /api/mappings
List all saved database mappings.
**Query Parameters**:
- `source_env_id`: Filter by source environment.
- `target_env_id`: Filter by target environment.
**Response (200 OK)**:
```json
[
{
"id": "uuid",
"source_env_id": "uuid",
"target_env_id": "uuid",
"source_db_uuid": "uuid",
"target_db_uuid": "uuid",
"source_db_name": "Dev Clickhouse",
"target_db_name": "Prod Clickhouse"
}
]
```
### POST /api/mappings
Create or update a database mapping.
**Request Body**:
```json
{
"source_env_id": "uuid",
"target_env_id": "uuid",
"source_db_uuid": "uuid",
"target_db_uuid": "uuid"
}
```
### POST /api/mappings/suggest
Get suggested mappings based on fuzzy matching.
**Request Body**:
```json
{
"source_env_id": "uuid",
"target_env_id": "uuid"
}
```
**Response (200 OK)**:
```json
[
{
"source_db_uuid": "uuid",
"target_db_uuid": "uuid",
"confidence": 0.95
}
]
```
## Migration Execution
### POST /api/migrations
Start a migration job.
**Request Body**:
```json
{
"source_env_id": "uuid",
"target_env_id": "uuid",
"assets": [
{"type": "dashboard", "id": 123}
],
"replace_db": true
}
```
**Response (202 Accepted)**:
```json
{
"job_id": "uuid",
"status": "RUNNING"
}
```

View File

@@ -0,0 +1,48 @@
# Data Model: Migration Process and UI Redesign
## Entities
### Environment
Represents a Superset instance.
| Field | Type | Description |
|-------|------|-------------|
| `id` | UUID | Primary Key |
| `name` | String | Display name (e.g., "Development", "Production") |
| `url` | String | Base URL of the Superset instance |
| `credentials_id` | String | Reference to encrypted credentials in the config manager |
### DatabaseMapping
Represents a mapping between a database in the source environment and a database in the target environment.
| Field | Type | Description |
|-------|------|-------------|
| `id` | UUID | Primary Key |
| `source_env_id` | UUID | Foreign Key to Environment (Source) |
| `target_env_id` | UUID | Foreign Key to Environment (Target) |
| `source_db_uuid` | String | UUID of the database in the source environment |
| `target_db_uuid` | String | UUID of the database in the target environment |
| `source_db_name` | String | Name of the database in the source environment (for UI) |
| `target_db_name` | String | Name of the database in the target environment (for UI) |
| `engine` | String | Database engine type (e.g., "clickhouse", "postgres") |
### MigrationJob
Represents a single migration execution.
| Field | Type | Description |
|-------|------|-------------|
| `id` | UUID | Primary Key |
| `source_env_id` | UUID | Foreign Key to Environment |
| `target_env_id` | UUID | Foreign Key to Environment |
| `status` | Enum | `PENDING`, `RUNNING`, `COMPLETED`, `FAILED`, `AWAITING_MAPPING` |
| `replace_db` | Boolean | Whether to apply database mappings |
| `created_at` | DateTime | Timestamp of creation |
## Relationships
- `DatabaseMapping` belongs to a pair of `Environments`.
- `MigrationJob` references two `Environments`.
## Validation Rules
- `source_env_id` and `target_env_id` must be different.
- `source_db_uuid` and `target_db_uuid` must belong to databases with compatible engines (optional warning).
- Mappings must be unique for a given `(source_env_id, target_env_id, source_db_uuid)` triplet.

View File

@@ -0,0 +1,79 @@
# Implementation Plan: Migration Process and UI Redesign
**Branch**: `001-migration-ui-redesign` | **Date**: 2025-12-20 | **Spec**: [specs/001-migration-ui-redesign/spec.md](specs/001-migration-ui-redesign/spec.md)
## Summary
Redesign the migration process to support environment-based selection and automated database mapping. The technical approach involves using a SQLite database to persist mappings between source and target databases, implementing a fuzzy matching algorithm for empirical suggestions, and intercepting asset definitions during migration to apply these mappings.
## Technical Context
**Language/Version**: Python 3.9+, Node.js 18+
**Primary Dependencies**: FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLite
**Storage**: SQLite (for database mappings and environment metadata)
**Testing**: pytest (Backend), Vitest/Playwright (Frontend)
**Target Platform**: Linux server
**Project Type**: Web application (FastAPI + SvelteKit SPA)
**Performance Goals**: SC-001: Users can complete a full database mapping for 5+ databases in under 60 seconds.
**Constraints**: SPA-First Architecture (Constitution Principle I), API-Driven Communication (Constitution Principle II).
**Scale/Scope**: Support for multiple environments and hundreds of database mappings.
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
| Principle | Status | Notes |
|-----------|--------|-------|
| I. SPA-First Architecture | PASS | SvelteKit will be built as a static SPA and served by FastAPI. |
| II. API-Driven Communication | PASS | All mapping and migration actions will go through FastAPI endpoints. |
| III. Modern Stack Consistency | PASS | Using FastAPI, SvelteKit, and Tailwind CSS. |
| IV. Semantic Protocol Adherence | PASS | Code will include GRACE-Poly anchors and contracts. |
## Project Structure
### Documentation (this feature)
```text
specs/001-migration-ui-redesign/
├── plan.md # This file
├── research.md # Phase 0 output
├── data-model.md # Phase 1 output
├── quickstart.md # Phase 1 output
├── contracts/ # Phase 1 output
└── tasks.md # Phase 2 output
```
### Source Code (repository root)
```text
backend/
├── src/
│ ├── api/
│ │ └── routes/
│ │ ├── environments.py # New: Env selection
│ │ └── mappings.py # New: DB mapping management
│ ├── core/
│ │ └── migration_engine.py # Update: DB replacement logic
│ └── models/
│ └── mapping.py # New: SQLite models
└── tests/
frontend/
├── src/
│ ├── components/
│ │ ├── MappingTable.svelte # New: DB mapping UI
│ │ └── EnvSelector.svelte # New: Source/Target selection
│ └── routes/
│ └── migration/ # New: Migration dashboard
└── tests/
```
**Structure Decision**: Web application structure (Option 2) is selected to maintain separation between the FastAPI backend and SvelteKit frontend while adhering to the SPA-first principle.
## Complexity Tracking
> **Fill ONLY if Constitution Check has violations that must be justified**
| Violation | Why Needed | Simpler Alternative Rejected Because |
|-----------|------------|-------------------------------------|
| None | N/A | N/A |

View File

@@ -0,0 +1,39 @@
# Quickstart: Migration Process and UI Redesign
## Setup
1. **Install Dependencies**:
```bash
pip install rapidfuzz sqlalchemy
cd frontend && npm install
```
2. **Configure Environments**:
Ensure you have at least two Superset environments configured in the application settings.
3. **Initialize Database**:
The system will automatically create the `mappings.db` SQLite file on the first run.
## Usage
### 1. Define Mappings
1. Navigate to the **Database Mapping** tab.
2. Select your **Source** and **Target** environments.
3. Click **Fetch Databases**.
4. Review the **Suggested Mappings** (highlighted in green).
5. Manually adjust any mappings using the dropdowns.
6. Click **Save Mappings**.
### 2. Run Migration
1. Go to the **Migration** dashboard.
2. Select the **Source** and **Target** environments.
3. Select the assets (Dashboards/Datasets) you want to migrate.
4. Enable the **Replace Database** toggle.
5. Click **Start Migration**.
6. If a database is missing a mapping, a modal will appear prompting you to select a target database.
## Troubleshooting
- **Connection Error**: Ensure the backend can reach both Superset instances. Check credentials in settings.
- **Mapping Not Applied**: Verify that the "Replace Database" toggle was enabled and that the mapping exists for the specific environment pair.
- **Fuzzy Match Failure**: If names are too different, manual mapping is required. The system learns from manual overrides.

View File

@@ -0,0 +1,33 @@
# Research: Migration Process and UI Redesign
## Decision: Fuzzy Matching Algorithm
- **Choice**: `RapidFuzz` library with `fuzz.token_sort_ratio`.
- **Rationale**: `RapidFuzz` is significantly faster than `FuzzyWuzzy` and provides robust string similarity metrics. `token_sort_ratio` is ideal for database names because it ignores word order and is less sensitive to prefixes like "Dev-" or "Prod-".
- **Alternatives considered**:
- `Levenshtein`: Too sensitive to string length and prefixes.
- `Jaro-Winkler`: Good for short strings but less effective for multi-word names with different orders.
## Decision: Asset Interception Strategy
- **Choice**: ZIP-based transformation during migration.
- **Rationale**: Superset's native export/import format is a ZIP archive containing YAML definitions. Intercepting this archive allows for precise modification of database references (UUIDs) before they reach the target environment.
- **Implementation**:
1. Export dashboard/dataset from source (ZIP).
2. Extract ZIP to a temporary directory.
3. Iterate through `datasets/*.yaml` files.
4. Replace `database_uuid` values based on the mapping table.
5. Re-package the ZIP.
6. Import to target.
## Decision: Database Mapping Persistence
- **Choice**: SQLite with SQLAlchemy/SQLModel.
- **Rationale**: SQLite is lightweight, requires no separate server, and is perfect for storing local configuration and mappings. It aligns with the project's existing stack.
- **Schema**:
- `Environment`: `id`, `name`, `url`, `credentials_id`.
- `DatabaseMapping`: `id`, `source_env_id`, `target_env_id`, `source_db_uuid`, `target_db_uuid`, `source_db_name`, `target_db_name`.
## Decision: Superset API Integration
- **Choice**: Extend existing `SupersetClient`.
- **Rationale**: `SupersetClient` already handles authentication, network requests, and basic CRUD for dashboards/datasets. Adding environment-specific fetching and database listing is a natural extension.
- **New Endpoints to use**:
- `GET /api/v1/database/`: List all databases.
- `GET /api/v1/database/{id}`: Get detailed database config.

View File

@@ -0,0 +1,109 @@
# Feature Specification: Migration Process and UI Redesign
**Feature Branch**: `001-migration-ui-redesign`
**Created**: 2025-12-20
**Status**: Draft
**Input**: User description: "я хочу переработать процесс и интерфейс миграции. 1. Необходимо чтобы был выпадающий список enviroments (откуда и куда), а также просто галка замены БД 2. Процесс замены БД должен быть предустановленными парами , необходима отдельная вкладка которая бы считывала базы данных с источника и цели и позволяла их маппить, при этом первоначально эмпирически подставляя пары вида 'Dev Clickhouse' -> 'Prod Clickhouse'. Меппинг нужно сохранять и иметь возможность его редактировать"
## Clarifications
### Session 2025-12-20
- Q: Scope of Database Mapping → A: Map the full configuration object obtained from the Superset API.
- Q: Persistence of mappings → A: Use a SQLite database for storing mappings.
- Q: Handling of missing mappings during migration → A: Show a modal dialog during the migration process to prompt for missing mappings.
- Q: Empirical matching algorithm details → A: Use name-based fuzzy matching (ignoring common prefixes like Dev/Prod).
- Q: Scope of "Replace Database" toggle → A: Apply replacement to all assets (Dashboards, Datasets, Charts) included in the migration.
- Q: Backend exposure of Superset databases → A: Dedicated environment database endpoints (e.g., `/api/environments/{id}/databases`).
- Q: Superset API authentication → A: Use stored environment credentials from the backend.
- Q: Error handling for unreachable environments → A: Return structured error responses (502/503) with descriptive messages.
- Q: Database list filtering → A: Return all available databases with metadata (engine type, etc.).
- Q: Handling large database lists → A: Return full list (no pagination) for simplicity.
## User Scenarios & Testing *(mandatory)*
### User Story 1 - Environment-Based Migration Setup (Priority: P1)
As a migration operator, I want to easily select the source and target environments from a list so that I can quickly define the scope of my migration without manual URL entry.
**Why this priority**: This is the core interaction for starting any migration. Using predefined environments reduces errors and improves speed.
**Independent Test**: Can be tested by opening the migration page and verifying that the "Source" and "Target" dropdowns are populated with configured environments and can be selected.
**Acceptance Scenarios**:
1. **Given** multiple environments are configured in settings, **When** I open the migration page, **Then** I should see two dropdowns for "Source" and "Target" containing these environments.
2. **Given** a source and target are selected, **When** I toggle the "Replace Database" checkbox, **Then** the system should prepare to apply database mappings during the next migration step.
---
### User Story 2 - Database Mapping Management (Priority: P1)
As an administrator, I want to define how databases in my development environment map to databases in production so that my dashboards and datasets work correctly after migration.
**Why this priority**: Migrations often fail or require manual fixups because database references point to the wrong environment. Automated mapping is critical for reliable migrations.
**Independent Test**: Can be tested by navigating to the "Database Mapping" tab, fetching databases, and verifying that mappings can be created, saved, and edited.
**Acceptance Scenarios**:
1. **Given** a source and target environment are selected, **When** I open the "Database Mapping" tab, **Then** the system should fetch and display lists of databases from both environments.
2. **Given** the database lists are loaded, **When** the system identifies similar names (e.g., "Dev Clickhouse" and "Prod Clickhouse"), **Then** it should automatically suggest these as a mapping pair.
3. **Given** suggested or manual mappings, **When** I click "Save Mappings", **Then** these pairs should be persisted and associated with the selected environment pair.
---
### User Story 3 - Migration with Automated DB Replacement (Priority: P2)
As a user, I want the migration process to automatically update database references based on my saved mappings so that I don't have to manually edit exported files or post-migration settings.
**Why this priority**: This delivers the actual value of the mapping feature by automating a tedious and error-prone task.
**Independent Test**: Can be tested by running a migration with "Replace Database" enabled and verifying that the resulting assets in the target environment point to the mapped databases.
**Acceptance Scenarios**:
1. **Given** saved mappings exist for the selected environments, **When** I start a migration with "Replace Database" enabled, **Then** the system should replace all source database IDs/names with their corresponding target values during the transfer.
2. **Given** "Replace Database" is enabled but a source database has no mapping, **When** the migration runs, **Then** the system should pause and show a modal dialog prompting the user to provide a mapping on-the-fly for the missing database.
---
### Edge Cases
- **Environment Connectivity**: If the source or target environment is unreachable, the backend MUST return a structured error (502/503), and the frontend MUST display a clear connection error with a retry option.
- **Duplicate Mappings**: How does the system handle multiple source databases mapping to the same target database? (Assumption: This is allowed, as multiple dev DBs might consolidate into one prod DB).
- **Missing Target Database**: What if a mapped target database no longer exists in the target environment? (Assumption: Validation should occur before migration starts, highlighting broken mappings).
## Requirements *(mandatory)*
### Functional Requirements
- **FR-001**: System MUST provide dropdown menus for selecting "Source Environment" and "Target Environment" on the migration screen.
- **FR-002**: System MUST provide a "Replace Database" checkbox that, when enabled, triggers the database mapping logic for all assets (Dashboards, Datasets, Charts) during migration.
- **FR-003**: System MUST include a dedicated "Database Mapping" tab or view accessible from the migration interface.
- **FR-004**: System MUST fetch available databases from both source and target environments via their respective APIs when the mapping tab is opened.
- **FR-005**: System MUST implement a name-based fuzzy matching algorithm to suggest initial mappings, ignoring common environment prefixes (e.g., "Dev", "Prod").
- **FR-006**: System MUST allow users to manually override suggested mappings and create new ones via a drag-and-drop or dropdown-based interface.
- **FR-007**: System MUST persist database mappings in a local SQLite database, keyed by the source and target environment identifiers.
- **FR-008**: System MUST provide an "Edit" capability for existing mappings, allowing users to update or delete them.
- **FR-009**: During migration, if "Replace Database" is active, the system MUST intercept asset definitions (JSON/YAML) and replace database references according to the active mapping table.
### Key Entities *(include if feature involves data)*
- **Environment**: A configured Superset instance (Name, URL, Credentials).
- **Database Mapping**: A record linking a source database configuration (including metadata like engine type) to a target database configuration for a specific `source_env` -> `target_env` pair.
- **Migration Configuration**: The set of parameters for a migration job, including selected environments and the "Replace Database" toggle state.
## Success Criteria *(mandatory)*
### Measurable Outcomes
- **SC-001**: Users can complete a full database mapping for 5+ databases in under 60 seconds using the empirical suggestions.
- **SC-002**: 100% of assets migrated with "Replace Database" enabled correctly reference the target databases as defined in the mapping table.
- **SC-003**: Mapping persistence allows users to run subsequent migrations between the same environments without re-configuring database pairs in 100% of cases.
- **SC-004**: The system successfully identifies and suggests at least 90% of matching pairs when naming follows a "Prefix + Name" pattern (e.g., "Dev-Sales" -> "Prod-Sales").
## Assumptions
- **AS-001**: Environments are already configured in the application's global settings.
- **AS-002**: The backend has access to stored credentials for both source and target environments to perform API requests.
- **AS-003**: Database names or IDs are stable enough within an environment to be used as reliable mapping keys.

View File

@@ -0,0 +1,186 @@
---
description: "Task list for Migration Process and UI Redesign implementation"
---
# Tasks: Migration Process and UI Redesign
**Input**: Design documents from `specs/001-migration-ui-redesign/`
**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, quickstart.md
**Tests**: Tests are NOT explicitly requested in the feature specification, so they are omitted from this task list.
**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
## Format: `[ID] [P?] [Story] Description`
- **[P]**: Can run in parallel (different files, no dependencies)
- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
- Include exact file paths in descriptions
## Path Conventions
- **Web app**: `backend/src/`, `frontend/src/`
---
## Phase 1: Setup (Shared Infrastructure)
**Purpose**: Project initialization and basic structure
- [ ] T001 Create project structure per implementation plan in `backend/src/` and `frontend/src/`
- [ ] T002 [P] Install backend dependencies (rapidfuzz, sqlalchemy) in `backend/requirements.txt`
- [ ] T003 [P] Install frontend dependencies (if any new) in `frontend/package.json`
---
## Phase 2: Foundational (Blocking Prerequisites)
**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
**⚠️ CRITICAL**: No user story work can begin until this phase is complete
- [ ] T004 Setup SQLite database schema and SQLAlchemy models in `backend/src/models/mapping.py`
- [ ] T005 [P] Implement fuzzy matching utility using RapidFuzz in `backend/src/core/utils/matching.py`
- [ ] T006 [P] Extend SupersetClient to support database listing and metadata fetching in `backend/src/core/superset_client.py`
- [ ] T007 Configure database mapping persistence layer in `backend/src/core/database.py`
**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
---
## Phase 3: User Story 1 - Environment-Based Migration Setup (Priority: P1) 🎯 MVP
**Goal**: Enable selection of source and target environments and toggle database replacement.
**Independent Test**: Open the migration page and verify that the "Source" and "Target" dropdowns are populated with configured environments and can be selected.
### Implementation for User Story 1
- [ ] T008 [P] [US1] Implement environment selection API endpoints in `backend/src/api/routes/environments.py`
- [ ] T009 [P] [US1] Create `EnvSelector.svelte` component for source/target selection in `frontend/src/components/EnvSelector.svelte`
- [ ] T010 [US1] Integrate `EnvSelector` and "Replace Database" toggle into migration dashboard in `frontend/src/routes/migration/+page.svelte`
- [ ] T011 [US1] Add validation to ensure source and target environments are different in `frontend/src/routes/migration/+page.svelte`
**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently.
---
## Phase 4: User Story 2 - Database Mapping Management (Priority: P1)
**Goal**: Fetch databases from environments, suggest mappings using fuzzy matching, and allow manual overrides/persistence.
**Independent Test**: Navigate to the "Database Mapping" tab, fetch databases, and verify that mappings can be created, saved, and edited.
### Implementation for User Story 2
- [ ] T012 [P] [US2] Implement database mapping CRUD API endpoints in `backend/src/api/routes/mappings.py`
- [ ] T013 [US2] Implement mapping service with fuzzy matching logic in `backend/src/services/mapping_service.py`
- [ ] T014 [P] [US2] Create `MappingTable.svelte` component for displaying and editing pairs in `frontend/src/components/MappingTable.svelte`
- [ ] T015 [US2] Create database mapping management view in `frontend/src/routes/migration/mappings/+page.svelte`
- [ ] T016 [US2] Implement "Fetch Databases" action and suggestion highlighting in `frontend/src/routes/migration/mappings/+page.svelte`
**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently.
---
## Phase 5: User Story 3 - Migration with Automated DB Replacement (Priority: P2)
**Goal**: Intercept assets during migration, apply database mappings, and prompt for missing ones.
**Independent Test**: Run a migration with "Replace Database" enabled and verify that the resulting assets in the target environment point to the mapped databases.
### Implementation for User Story 3
- [ ] T017 [US3] Implement ZIP-based asset interception and YAML transformation logic in `backend/src/core/migration_engine.py`
- [ ] T018 [US3] Integrate database mapping application into the migration job execution flow in `backend/src/core/task_manager.py`
- [ ] T019 [P] [US3] Create `MissingMappingModal.svelte` for on-the-fly mapping prompts in `frontend/src/components/MissingMappingModal.svelte`
- [ ] T020 [US3] Implement backend pause and frontend modal trigger for missing mappings in `backend/src/api/routes/tasks.py` and `frontend/src/components/TaskRunner.svelte`
**Checkpoint**: All user stories should now be independently functional.
---
## Phase 6: Polish & Cross-Cutting Concerns
**Purpose**: Improvements that affect multiple user stories
- [ ] T021 [P] Update documentation in `docs/` to include database mapping instructions
- [ ] T022 Code cleanup and refactoring of migration logic
- [ ] T023 [P] Performance optimization for fuzzy matching and ZIP processing
- [ ] T024 Run `quickstart.md` validation to ensure end-to-end flow works as documented
---
## Dependencies & Execution Order
### Phase Dependencies
- **Setup (Phase 1)**: No dependencies - can start immediately
- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
- **User Stories (Phase 3+)**: All depend on Foundational phase completion
- User stories can then proceed in parallel (if staffed)
- Or sequentially in priority order (P1 → P2 → P3)
- **Polish (Final Phase)**: Depends on all desired user stories being complete
### User Story Dependencies
- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
- **User Story 2 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
- **User Story 3 (P2)**: Can start after Foundational (Phase 2) - Depends on US1/US2 for mapping data and configuration
### Within Each User Story
- Models before services
- Services before endpoints
- Core implementation before integration
- Story complete before moving to next priority
### Parallel Opportunities
- All Setup tasks marked [P] can run in parallel
- All Foundational tasks marked [P] can run in parallel (within Phase 2)
- Once Foundational phase completes, US1 and US2 can start in parallel
- Models and UI components within a story marked [P] can run in parallel
---
## Parallel Example: User Story 2
```bash
# Launch backend and frontend components for User Story 2 together:
Task: "Implement database mapping CRUD API endpoints in backend/src/api/routes/mappings.py"
Task: "Create MappingTable.svelte component for displaying and editing pairs in frontend/src/components/MappingTable.svelte"
```
---
## Implementation Strategy
### MVP First (User Story 1 & 2)
1. Complete Phase 1: Setup
2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
3. Complete Phase 3: User Story 1
4. Complete Phase 4: User Story 2
5. **STOP and VALIDATE**: Test environment selection and mapping management independently
6. Deploy/demo if ready
### Incremental Delivery
1. Complete Setup + Foundational → Foundation ready
2. Add User Story 1 → Test independently → Deploy/Demo
3. Add User Story 2 → Test independently → Deploy/Demo (MVP!)
4. Add User Story 3 → Test independently → Deploy/Demo
5. Each story adds value without breaking previous stories
---
## Notes
- [P] tasks = different files, no dependencies
- [Story] label maps task to specific user story for traceability
- Each user story should be independently completable and testable
- Commit after each task or logical group
- Stop at any checkpoint to validate story independently
- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence

View File

@@ -0,0 +1,24 @@
# WebSocket Contract: Task Logs
## Endpoint
`WS /ws/logs/{task_id}`
## Description
Streams real-time logs for a specific task.
## Connection Parameters
- `task_id`: UUID of the task to monitor.
## Message Format (Server -> Client)
```json
{
"task_id": "uuid",
"message": "Log message text",
"timestamp": "2025-12-20T20:20:00Z",
"level": "INFO"
}
```
## Error Handling
- If `task_id` is invalid, the connection is closed with code `4004` (Not Found).
- If the connection fails, the client should attempt reconnection with exponential backoff.

View File

@@ -0,0 +1,34 @@
# Specification Quality Checklist: Configurable Belief State Logging
**Purpose**: Validate specification completeness and quality before proceeding to planning
**Created**: 2025-12-26
**Feature**: [specs/006-configurable-belief-logs/spec.md](../spec.md)
## Content Quality
- [x] No implementation details (languages, frameworks, APIs)
- [x] Focused on user value and business needs
- [x] Written for non-technical stakeholders
- [x] All mandatory sections completed
## Requirement Completeness
- [x] No [NEEDS CLARIFICATION] markers remain
- [x] Requirements are testable and unambiguous
- [x] Success criteria are measurable
- [x] Success criteria are technology-agnostic (no implementation details)
- [x] All acceptance scenarios are defined
- [x] Edge cases are identified
- [x] Scope is clearly bounded
- [x] Dependencies and assumptions identified
## Feature Readiness
- [x] All functional requirements have clear acceptance criteria
- [x] User scenarios cover primary flows
- [x] Feature meets measurable outcomes defined in Success Criteria
- [x] No implementation details leak into specification
## Notes
- Items marked incomplete require spec updates before `/speckit.clarify` or `/speckit.plan`

View File

@@ -0,0 +1,56 @@
# API Contract: Settings Update
## PATCH /api/settings/global
Updates the global application settings, including the new logging configuration.
### Request Body
**Content-Type**: `application/json`
```json
{
"backup_path": "string",
"default_environment_id": "string (optional)",
"logging": {
"level": "string (DEBUG, INFO, WARNING, ERROR, CRITICAL)",
"file_path": "string (optional)",
"max_bytes": "integer (default: 10485760)",
"backup_count": "integer (default: 5)",
"enable_belief_state": "boolean (default: true)"
}
}
```
### Response
**Status**: `200 OK`
**Content-Type**: `application/json`
```json
{
"backup_path": "string",
"default_environment_id": "string (optional)",
"logging": {
"level": "string",
"file_path": "string",
"max_bytes": "integer",
"backup_count": "integer",
"enable_belief_state": "boolean"
}
}
```
### Example
**Request**
```json
{
"backup_path": "backups",
"logging": {
"level": "DEBUG",
"file_path": "logs/app.log",
"enable_belief_state": true
}
}

View File

@@ -0,0 +1,74 @@
# Data Model: Configurable Belief State Logging
## 1. Configuration Models
These models extend the existing `ConfigModels` in `backend/src/core/config_models.py`.
### 1.1. LoggingConfig
Defines the configuration for the application's logging system.
| Field | Type | Default | Description |
|---|---|---|---|
| `level` | `str` | `"INFO"` | The logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL). |
| `file_path` | `Optional[str]` | `"logs/app.log"` | Path to the log file. If None, file logging is disabled. |
| `max_bytes` | `int` | `10485760` (10MB) | Maximum size of a log file before rotation. |
| `backup_count` | `int` | `5` | Number of backup files to keep. |
| `enable_belief_state` | `bool` | `True` | Whether to enable structured Belief State logging (Entry/Exit). |
```python
class LoggingConfig(BaseModel):
level: str = "INFO"
file_path: Optional[str] = "logs/app.log"
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
```
### 1.2. GlobalSettings (Updated)
Updates the existing `GlobalSettings` to include `LoggingConfig`.
| Field | Type | Default | Description |
|---|---|---|---|
| `logging` | `LoggingConfig` | `LoggingConfig()` | The logging configuration object. |
```python
class GlobalSettings(BaseModel):
backup_path: str
default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
```
## 2. Logger Entities
These entities are part of the `backend/src/core/logger.py` module.
### 2.1. LogEntry (Existing)
Represents a single log record.
| Field | Type | Description |
|---|---|---|
| `timestamp` | `datetime` | UTC timestamp of the log. |
| `level` | `str` | Log level. |
| `message` | `str` | Log message. |
| `context` | `Optional[Dict[str, Any]]` | Additional context data. |
### 2.2. BeliefState (Concept)
Represents the state of execution in the "Belief State" model.
- **Entry**: Entering a logical block.
- **Action**: Performing a core action within the block.
- **Coherence**: Verifying the state (OK or Failed).
- **Exit**: Exiting the logical block.
Format: `[{ANCHOR_ID}][{STATE}] {Message}`
## 3. Relationships
- `AppConfig` contains `GlobalSettings`.
- `GlobalSettings` contains `LoggingConfig`.
- `ConfigManager` reads/writes `AppConfig`.
- `ConfigManager` configures the global `logger` based on `LoggingConfig`.

View File

@@ -0,0 +1,81 @@
# Implementation Plan: Configurable Belief State Logging
**Branch**: `006-configurable-belief-logs` | **Date**: 2025-12-26 | **Spec**: specs/006-configurable-belief-logs/spec.md
**Input**: Feature specification from `/specs/006-configurable-belief-logs/spec.md`
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
## Summary
Implement a configurable logging system with "Belief State" support (Entry, Action, Coherence, Exit).
Approach: Use Python's `logging` module with a custom Context Manager (`belief_scope`) and extend `GlobalSettings` with a `LoggingConfig` model.
## Technical Context
**Language/Version**: Python 3.9+
**Primary Dependencies**: FastAPI (Backend), Pydantic (Config), Svelte (Frontend)
**Storage**: Filesystem (for log files), JSON (for configuration persistence)
**Testing**: pytest (Backend), manual verification (Frontend)
**Target Platform**: Linux server (primary), cross-platform compatible
**Project Type**: Web application (Backend + Frontend)
**Performance Goals**: Low overhead logging (<1ms per log entry), non-blocking for main thread (mostly)
**Constraints**: Must use standard library `logging` where possible; Log rotation to prevent disk overflow
**Scale/Scope**: Configurable log levels and retention policies
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
- **Library-First**: N/A (Core infrastructure)
- **CLI Interface**: N/A (Configured via UI/API/JSON)
- **Test-First**: Will require unit tests for `belief_scope` and config updates.
- **Integration Testing**: Will require testing the settings API.
- **Observability**: This feature *is* the observability enhancement.
## Project Structure
### Documentation (this feature)
```text
specs/[###-feature]/
├── plan.md # This file (/speckit.plan command output)
├── research.md # Phase 0 output (/speckit.plan command)
├── data-model.md # Phase 1 output (/speckit.plan command)
├── quickstart.md # Phase 1 output (/speckit.plan command)
├── contracts/ # Phase 1 output (/speckit.plan command)
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
```
### Source Code (repository root)
```text
backend/
├── src/
│ ├── core/
│ │ ├── config_models.py # Add LoggingConfig
│ │ ├── config_manager.py # Update config loading/saving
│ │ └── logger.py # Add belief_scope and configure_logger
│ └── api/
│ └── routes/
│ └── settings.py # Update GlobalSettings endpoint
└── tests/
└── test_logger.py # New tests for logger logic
frontend/
├── src/
│ ├── pages/
│ │ └── Settings.svelte # Add logging UI controls
│ └── lib/
│ └── api.js # Update API calls if needed
```
**Structure Decision**: enhancing existing Backend/Frontend structure.
## Complexity Tracking
> **Fill ONLY if Constitution Check has violations that must be justified**
| Violation | Why Needed | Simpler Alternative Rejected Because |
|-----------|------------|-------------------------------------|
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |

View File

@@ -0,0 +1,104 @@
# Quickstart: Configurable Belief State Logging
## 1. Configuration
The logging system is configured via the `GlobalSettings` in `config.json` or through the Settings UI.
### 1.1. Default Configuration
```json
{
"environments": [],
"settings": {
"backup_path": "backups",
"logging": {
"level": "INFO",
"file_path": "logs/app.log",
"max_bytes": 10485760,
"backup_count": 5,
"enable_belief_state": true
}
}
}
```
### 1.2. Changing Log Level
To change the log level to `DEBUG`, update the `logging.level` field in `config.json` or use the API:
```bash
curl -X PATCH http://localhost:8000/api/settings/global \
-H "Content-Type: application/json" \
-d '{
"backup_path": "backups",
"logging": {
"level": "DEBUG",
"file_path": "logs/app.log",
"max_bytes": 10485760,
"backup_count": 5,
"enable_belief_state": true
}
}'
```
## 2. Using Belief State Logging
### 2.1. Basic Usage
Use the `belief_scope` context manager to automatically log Entry, Exit, and Coherence states.
```python
from backend.src.core.logger import logger, belief_scope
def my_function():
with belief_scope("MyFunction"):
# Logs: [MyFunction][Entry]
logger.info("Doing something important")
# Logs: [MyFunction][Action] Doing something important
# ... logic ...
# Logs: [MyFunction][Coherence:OK]
# Logs: [MyFunction][Exit]
```
### 2.2. Error Handling
If an exception occurs within the scope, it is caught, logged as a failure, and re-raised.
```python
def failing_function():
with belief_scope("FailingFunc"):
raise ValueError("Something went wrong")
# Logs: [FailingFunc][Entry]
# Logs: [FailingFunc][Coherence:Failed] Something went wrong
# Re-raises ValueError
```
### 2.3. Custom Messages
You can provide an optional message to `belief_scope`.
```python
with belief_scope("DataProcessor", "Processing batch #1"):
# Logs: [DataProcessor][Entry] Processing batch #1
pass
```
## 3. Log Output Format
### 3.1. Standard Log
```text
[2025-12-26 10:00:00,000][INFO][superset_tools_app] System initialized
```
### 3.2. Belief State Log
```text
[2025-12-26 10:00:01,000][INFO][superset_tools_app] [MyFunction][Entry]
[2025-12-26 10:00:01,050][INFO][superset_tools_app] [MyFunction][Action] Processing data
[2025-12-26 10:00:01,100][INFO][superset_tools_app] [MyFunction][Coherence:OK]
[2025-12-26 10:00:01,100][INFO][superset_tools_app] [MyFunction][Exit]

View File

@@ -0,0 +1,109 @@
# Research: Configurable Belief State Logging
## 1. Introduction
This research explores the implementation of a configurable logging system that supports "Belief State" logging (Entry, Action, Coherence, Exit) and allows users to customize log levels, formats, and file persistence.
## 2. Analysis of Existing System
- **Language**: Python 3.9+ (Backend)
- **Framework**: FastAPI (inferred from context, though not explicitly seen in snippets, `uvicorn` mentioned)
- **Logging**: Standard Python `logging` module.
- **Configuration**: Pydantic models (`ConfigModels`) managed by `ConfigManager`, persisting to `config.json`.
- **Current Logger**:
- `backend/src/core/logger.py`:
- Uses `logging.getLogger("superset_tools_app")`
- Has `StreamHandler` (console) and `WebSocketLogHandler` (streaming).
- `WebSocketLogHandler` buffers logs in a `deque`.
## 3. Requirements Analysis
- **Belief State**: Need a structured way to log `[ANCHOR_ID][STATE] Message`.
- **Context Manager**: Need a `with belief_scope("ID"):` pattern.
- **Configuration**: Need to add `LoggingConfig` to `GlobalSettings`.
- **File Logging**: Need `RotatingFileHandler` with size limits.
- **Dynamic Reconfiguration**: Need to update logger handlers/levels when config changes.
## 4. Proposed Solution
### 4.1. Data Model (`LoggingConfig`)
We will add a `LoggingConfig` model to `backend/src/core/config_models.py`:
```python
class LoggingConfig(BaseModel):
level: str = "INFO" # DEBUG, INFO, WARNING, ERROR, CRITICAL
file_path: Optional[str] = "logs/app.log"
max_bytes: int = 10 * 1024 * 1024 # 10MB
backup_count: int = 5
enable_belief_state: bool = True
```
And update `GlobalSettings`:
```python
class GlobalSettings(BaseModel):
backup_path: str
default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig)
```
### 4.2. Context Manager (`belief_scope`)
We will implement a context manager in `backend/src/core/logger.py`:
```python
from contextlib import contextmanager
@contextmanager
def belief_scope(anchor_id: str, message: str = ""):
# ... logic to log [Entry] ...
try:
yield
# ... logic to log [Coherence:OK] and [Exit] ...
except Exception as e:
# ... logic to log [Coherence:Failed] ...
raise
```
### 4.3. Logger Reconfiguration
We will add a `configure_logger(config: LoggingConfig)` function in `backend/src/core/logger.py` that:
1. Sets the logger level.
2. Removes old file handlers.
3. Adds a new `RotatingFileHandler` if `file_path` is set.
4. Updates a global flag for `enable_belief_state`.
`ConfigManager` will call this function whenever settings are updated.
### 4.4. Belief State Filtering
If `enable_belief_state` is False:
- `Entry`/`Exit` logs are skipped.
- `Action`/`Coherence` logs are logged as standard messages (maybe without the `[ANCHOR_ID]` prefix if desired, but retaining it is usually better for consistency).
## 5. Alternatives Considered
- **Alternative A**: Use a separate logger for Belief State.
- *Pros*: Cleaner separation.
- *Cons*: Harder to interleave with standard logs in the same stream/file.
- *Decision*: Rejected. We want a unified log stream.
- **Alternative B**: Use structlog.
- *Pros*: Powerful structured logging.
- *Cons*: Adds a new dependency.
- *Decision*: Rejected. Standard `logging` is sufficient and already used.
## 6. Implementation Steps
1. **Modify `backend/src/core/config_models.py`**: Add `LoggingConfig` and update `GlobalSettings`.
2. **Modify `backend/src/core/logger.py`**:
- Add `configure_logger` function.
- Implement `belief_scope` context manager.
- Implement `RotatingFileHandler`.
3. **Modify `backend/src/core/config_manager.py`**: Call `configure_logger` on init and update.
4. **Frontend**: Update Settings page to allow editing `LoggingConfig`.
## 7. Conclusion
The proposed solution leverages the existing Pydantic/ConfigManager architecture and standard Python logging, minimizing disruption while meeting all requirements.

View File

@@ -0,0 +1,88 @@
# Feature Specification: Configurable Belief State Logging
**Feature Branch**: `006-configurable-belief-logs`
**Created**: 2025-12-26
**Status**: Draft
**Input**: User description: "Меня не устраивает текущее состояния логов проекта, которые я получаю после запуска run.sh. Нужно продумать систему хранения логов, которая будет настраиваемой, включать логирование belief state агента разработки (смотри semantic_protocol.md) и гибко настраиваемой."
## Clarifications
### Session 2025-12-26
- Q: Log rotation policy? → A: Size-based rotation (e.g., 10MB limit, 5 backups).
- Q: Belief State disabled behavior? → A: Smart Filter (Suppress Entry/Exit; keep Action/Coherence as standard logs).
- Q: Implementation pattern? → A: Context Manager (e.g., `with belief_scope("ID"):`) to auto-handle Entry/Exit/Error.
## User Scenarios & Testing
### User Story 1 - Structured Belief State Logging (Priority: P1)
As a developer or AI agent, I want logs to clearly indicate the execution flow and internal state ("Belief State") of the system using a standardized format, so that I can easily debug logic errors and verify semantic coherence.
**Why this priority**: This is the core requirement to align with the `semantic_protocol.md` and improve debugging capabilities for the AI agent.
**Independent Test**: Trigger an action in the system (e.g., running a migration) and verify that the logs contain entries formatted as `[ANCHOR_ID][STATE] Message`, representing the entry, action, and exit states of the code blocks.
**Acceptance Scenarios**:
1. **Given** a function wrapped with a Belief State logger, **When** the function is called, **Then** a log entry `[ANCHOR_ID][Entry] ...` is generated.
2. **Given** a function execution, **When** the core logic is executed, **Then** a log entry `[ANCHOR_ID][Action] ...` is generated.
3. **Given** a function execution completes successfully, **When** it returns, **Then** a log entry `[ANCHOR_ID][Coherence:OK]` and `[ANCHOR_ID][Exit]` are generated.
4. **Given** a function execution fails, **When** an exception is raised, **Then** a log entry `[ANCHOR_ID][Coherence:Failed]` is generated.
---
### User Story 2 - Configurable Logging Settings (Priority: P2)
As a user, I want to be able to configure log levels, formats, and destinations via the application settings, so that I can control the verbosity and storage of logs without changing code.
**Why this priority**: Provides the "flexible" and "customizable" aspect of the requirement, allowing users to adapt logging to their needs (dev vs prod).
**Independent Test**: Change the log level in the settings (e.g., from INFO to DEBUG) and verify that debug messages start appearing in the output.
**Acceptance Scenarios**:
1. **Given** the application is running, **When** I update the log level to "DEBUG" in the settings, **Then** debug-level logs are captured and displayed.
2. **Given** the application is running, **When** I enable "File Logging" in settings, **Then** logs are written to the specified file path.
3. **Given** the application is running, **When** I disable "Belief State" logs in settings, **Then** "Entry" and "Exit" logs are suppressed, while "Action" and "Coherence" logs are retained as standard log entries.
---
### Edge Cases
- **Invalid Configuration**: What happens if the user provides an invalid log level (e.g., "SUPER_LOUD")? The system should fallback to a safe default (e.g., "INFO") and log a warning.
- **File System Errors**: What happens if the configured log file path is not writable? The system should fallback to console logging and alert the user.
- **High Volume**: What happens if "Belief State" logging generates too much noise? The system should remain performant, and users should be able to toggle it off easily.
## Requirements
### Functional Requirements
- **FR-001**: The system MUST support a `LoggingConfig` structure within the global configuration to store settings for level, format, file path, and belief state enablement.
- **FR-002**: The logging system MUST implement a standard format for "Belief State" logs as defined in the system protocols: `[{ANCHOR_ID}][{STATE}] {Message}`.
- **FR-003**: The system MUST provide a standard mechanism to easily generate Belief State logs without repetitive boilerplate code, specifically using a **Context Manager** pattern.
- **FR-004**: The supported Belief States MUST include: `Entry`, `Action`, `Coherence:OK`, `Coherence:Failed`, `Exit`.
- **FR-005**: The system MUST allow dynamic reconfiguration of the logger (e.g., changing levels) when settings are updated.
- **FR-006**: The real-time log stream MUST preserve the structured format of Belief State logs so the frontend can potentially render them specially.
- **FR-007**: Logs MUST optionally be persisted to a file if configured.
- **FR-008**: The file logging system MUST implement size-based rotation (default: 10MB limit, 5 backups) to prevent disk saturation.
- **FR-009**: When `enable_belief_state` is False, the logger MUST suppress `Entry` and `Exit` states but retain `Action` and `Coherence` states (stripping the structured prefix if necessary or keeping it as standard info).
- **FR-010**: The Context Manager MUST automatically log `[Entry]` on start, `[Exit]` on success, and `[Coherence:Failed]` if an exception occurs (re-raising the exception).
### Key Entities
- **LoggingConfig**: A configuration object defining `level` (text), `file_path` (text), `format` (structure), `enable_belief_state` (boolean).
- **BeliefStateAdapter**: A component that enforces the semantic protocol format for log entries.
## Success Criteria
### Measurable Outcomes
- **SC-001**: Developers can trace the execution flow of a specific task using only the `[ANCHOR_ID]` filtered logs.
- **SC-002**: Changing the log level in the configuration file or API immediately (or upon restart) reflects in the log output.
- **SC-003**: All "Belief State" logs strictly follow the `[ID][State]` format, allowing for regex parsing.
- **SC-004**: System supports logging to both console and file simultaneously if configured.
## Assumptions
- The existing real-time streaming mechanism can be adapted or chained with the new logging configuration.
- The system protocol definition of states is the source of truth.

View File

@@ -0,0 +1,61 @@
# Tasks: Configurable Belief State Logging
**Spec**: `specs/006-configurable-belief-logs/spec.md`
**Plan**: `specs/006-configurable-belief-logs/plan.md`
**Status**: Completed
## Phase 1: Setup
*Goal: Initialize project structure for logging.*
- [x] T001 Ensure logs directory exists at `logs/` (relative to project root)
## Phase 2: Foundational
*Goal: Define data models and infrastructure required for logging configuration.*
- [x] T002 Define `LoggingConfig` model in `backend/src/core/config_models.py`
- [x] T003 Update `GlobalSettings` model to include `logging` field in `backend/src/core/config_models.py`
- [x] T004 Update `ConfigManager` to handle logging configuration persistence in `backend/src/core/config_manager.py`
## Phase 3: User Story 1 - Structured Belief State Logging
*Goal: Implement the core "Belief State" logging logic with context managers.*
*Priority: P1*
**Independent Test**: Run `pytest backend/tests/test_logger.py` and verify `belief_scope` generates `[ID][Entry]`, `[ID][Action]`, and `[ID][Exit]` logs.
- [x] T005 [US1] Create unit tests for belief state logging in `backend/tests/test_logger.py`
- [x] T006 [US1] Implement `belief_scope` context manager in `backend/src/core/logger.py`
- [x] T007 [US1] Implement log formatting and smart filtering (suppress Entry/Exit if disabled) in `backend/src/core/logger.py`
## Phase 4: User Story 2 - Configurable Logging Settings
*Goal: Expose logging configuration to the user via API and UI.*
*Priority: P2*
**Independent Test**: Update settings via API/UI and verify log level changes (e.g., DEBUG logs appear) and file rotation is active.
- [x] T008 [US2] Implement `configure_logger` function to apply settings (level, file, rotation) in `backend/src/core/logger.py`
- [x] T009 [US2] Update settings API endpoint to handle `logging` updates in `backend/src/api/routes/settings.py`
- [x] T010 [P] [US2] Add Logging configuration section (Level, File Path, Enable Belief State) to `frontend/src/pages/Settings.svelte`
## Final Phase: Polish & Cross-Cutting Concerns
*Goal: Verify system stability and cleanup.*
- [x] T011 Verify log rotation works by generating dummy logs (manual verification)
- [x] T012 Ensure default configuration provides a sensible out-of-the-box experience
## Dependencies
```mermaid
graph TD
Setup[Phase 1: Setup] --> Foundational[Phase 2: Foundational]
Foundational --> US1[Phase 3: US1 Belief State]
US1 --> US2[Phase 4: US2 Configurable Settings]
US2 --> Polish[Final Phase: Polish]
```
## Parallel Execution Opportunities
- **US2**: Frontend task (T010) can be implemented in parallel with Backend tasks (T008, T009) once the API contract is finalized.
## Implementation Strategy
1. **MVP**: Complete Phase 1, 2, and 3 to enable structured logging for the agent.
2. **Full Feature**: Complete Phase 4 to allow users to control the verbosity and storage.

View File

@@ -0,0 +1,36 @@
# Specification Quality Checklist: Migration Plugin Dashboard Grid
**Purpose**: Validate specification completeness and quality before proceeding to planning
**Created**: 2025-12-27
**Feature**: [specs/007-migration-dashboard-grid/spec.md](../spec.md)
## Content Quality
- [x] No implementation details (languages, frameworks, APIs)
- [x] Focused on user value and business needs
- [x] Written for non-technical stakeholders
- [x] All mandatory sections completed
## Requirement Completeness
- [x] No [NEEDS CLARIFICATION] markers remain
- [x] Requirements are testable and unambiguous
- [x] Success criteria are measurable
- [x] Success criteria are technology-agnostic (no implementation details)
- [x] All acceptance scenarios are defined
- [x] Edge cases are identified
- [x] Scope is clearly bounded
- [x] Dependencies and assumptions identified
## Feature Readiness
- [x] All functional requirements have clear acceptance criteria
- [x] User scenarios cover primary flows
- [x] Feature meets measurable outcomes defined in Success Criteria
- [x] No implementation details leak into specification
## Notes
- The specification clearly defines the UI requirements for the dashboard selection grid.
- "Superset API" is mentioned as the source of truth, which is acceptable as it defines the data boundary.
- Success criteria include specific performance metrics (<200ms filtering).

View File

@@ -0,0 +1,58 @@
# API Contracts: Migration Dashboard Grid
## Endpoints
### 1. List Dashboards
**Method**: `GET`
**Path**: `/api/environments/{env_id}/dashboards`
**Purpose**: Fetch all dashboards from the specified environment for the grid.
**Request Parameters**:
- `env_id` (path): The ID of the environment to fetch from.
**Response**:
- **200 OK**:
```json
[
{
"id": 123,
"title": "Sales Dashboard",
"last_modified": "2023-10-27T10:00:00Z",
"status": "published"
},
{
"id": 124,
"title": "Draft Metrics",
"last_modified": "2023-10-26T15:30:00Z",
"status": "draft"
}
]
```
- **404 Not Found**: Environment not found.
- **500 Internal Server Error**: Superset API error.
## Components (Frontend)
### DashboardGrid
**Props**:
- `dashboards`: `DashboardMetadata[]` - List of dashboards to display.
- `selectedIds`: `number[]` - IDs of currently selected dashboards.
**Events**:
- `selectionChanged`: Emitted when selection changes. Payload: `number[]` (new list of selected IDs).
**State**:
- `filterText`: string - Current filter text.
- `currentPage`: number - Current page index (0-based).
- `pageSize`: number - Items per page (default 20).
- `sortColumn`: string - 'title' | 'last_modified' | 'status'.
- `sortDirection`: 'asc' | 'desc'.
## Superset Client Extension
### `get_dashboards_summary`
**Signature**: `def get_dashboards_summary(self) -> List[Dict]`
**Purpose**: Fetches dashboard metadata optimized for the grid.
**Implementation Detail**:
- Calls `GET /api/v1/dashboard/` with query params `q=(columns:!(id,dashboard_title,changed_on_utc,published))`.
- Maps response fields to `DashboardMetadata` schema.

View File

@@ -0,0 +1,25 @@
# Data Model: Migration Dashboard Grid
## Entities
### DashboardMetadata
**Source**: Superset API (`/api/v1/dashboard/`)
**Purpose**: Represents a dashboard available for migration.
| Field | Type | Description | Source Mapping |
|-------|------|-------------|----------------|
| `id` | Integer | Unique identifier | `id` |
| `title` | String | Display name of the dashboard | `dashboard_title` |
| `last_modified` | String (ISO 8601) | Timestamp of last modification | `changed_on_utc` |
| `status` | Enum ('published', 'draft') | Publication status | `published` (boolean) -> 'published'/'draft' |
## Value Objects
### DashboardSelection
**Purpose**: Represents the user's selection of dashboards to migrate.
| Field | Type | Description |
|-------|------|-------------|
| `selected_ids` | List[Integer] | List of dashboard IDs selected for migration |
| `source_env_id` | String | ID of the source environment |
| `target_env_id` | String | ID of the target environment |

View File

@@ -0,0 +1,85 @@
# Implementation Plan: [FEATURE]
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
## Summary
[Extract from feature spec: primary requirement + technical approach from research]
## Technical Context
**Language/Version**: Python 3.9+ (Backend), Node.js 18+ (Frontend)
**Primary Dependencies**: FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API
**Storage**: N/A (Superset API integration - read-only for metadata)
**Testing**: pytest (Backend), vitest (Frontend - inferred)
**Target Platform**: Linux server / Containerized
**Project Type**: web application (Backend + Frontend)
**Performance Goals**: Client-side filtering < 200ms for 100+ items
**Constraints**: Must handle large lists via pagination (Client-side). Spec says "Client-side (Fetch all, filter locally)" and "Pagination (e.g., 20 per page)". *RESOLVED: Fetch all, paginate locally.*
**Scale/Scope**: ~100s of dashboards per environment.
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
- [x] **Causal Validity**: Contracts (API/Data Model) defined before implementation.
- [x] **Immutability**: Module headers (`[DEF]`) preserved/added.
- [x] **Semantic Format**: All new code uses `[DEF]` anchors and metadata.
- [x] **Fractal Complexity**: New components (Grid) kept modular; `SupersetClient` extensions are small methods.
**Status**: PASSED
## Project Structure
### Documentation (this feature)
```text
specs/[###-feature]/
├── plan.md # This file (/speckit.plan command output)
├── research.md # Phase 0 output (/speckit.plan command)
├── data-model.md # Phase 1 output (/speckit.plan command)
├── quickstart.md # Phase 1 output (/speckit.plan command)
├── contracts/ # Phase 1 output (/speckit.plan command)
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
```
### Source Code (repository root)
```text
backend/
├── src/
│ ├── api/
│ │ └── routes/
│ │ └── environments.py # Update to support dashboard fetching
│ ├── core/
│ │ └── superset_client.py # Update to fetch extended dashboard metadata
│ └── models/
│ └── dashboard.py # New model for Dashboard metadata
└── tests/
└── test_superset_client.py
frontend/
├── src/
│ ├── components/
│ │ ├── DashboardGrid.svelte # New component
│ │ └── Pagination.svelte # New component (if not exists)
│ ├── routes/
│ │ └── migration/
│ │ └── +page.svelte # Update to use DashboardGrid
│ └── types/
│ └── dashboard.ts # New type definitions
```
**Structure Decision**: Standard Web Application structure. Backend updates to `SupersetClient` and API routes to serve dashboard metadata. Frontend updates to include a new `DashboardGrid` component and integrate it into the migration flow.
## Complexity Tracking
> **Fill ONLY if Constitution Check has violations that must be justified**
| Violation | Why Needed | Simpler Alternative Rejected Because |
|-----------|------------|-------------------------------------|
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |

View File

@@ -0,0 +1,31 @@
# Quickstart: Migration Dashboard Grid
## Prerequisites
- Backend running (`uvicorn backend.src.app:app --reload`)
- Frontend running (`npm run dev`)
- Superset instance accessible and configured in `config.yaml`
## Steps to Verify
1. **Navigate to Migration Page**:
- Open browser to `http://localhost:5173/migration`
- Select a Source Environment from the dropdown.
2. **Verify Dashboard Grid**:
- The grid should appear below the environment selectors.
- It should list dashboards with columns: Title, Last Modified, Status.
- Status pills should be green (Published) or gray (Draft).
3. **Test Filtering**:
- Type in the "Search dashboards..." input.
- The list should filter instantly (client-side).
4. **Test Pagination**:
- If >20 dashboards, check pagination controls at the bottom.
- Navigate to next page.
5. **Test Selection**:
- Select a few dashboards.
- Change filter (hide selected).
- Clear filter -> Selection should persist.
- Click "Select All" -> Should select all matching current filter.

View File

@@ -0,0 +1,48 @@
# Research: Migration Dashboard Grid
## Unknowns & Clarifications
### 1. Pagination vs Client-side Filtering
**Context**: The spec mentions "Client-side (Fetch all, filter locally)" (FR-004) but also "Pagination (e.g., 20 per page)" (FR-008).
**Resolution**:
- We will fetch ALL dashboard metadata from the Superset API in one go. The metadata (ID, Title, Status, Date) is lightweight. Even for 1000 dashboards, the payload is small (~100KB).
- **Client-side Pagination**: We will implement pagination purely on the frontend. This satisfies "Pagination" for UI performance/usability while keeping the "Fetch all" requirement for fast filtering.
- **Decision**: Fetch all, paginate locally.
### 2. Superset API for Dashboard Metadata
**Context**: Need to fetch `title`, `changed_on`, `published`.
**Research**:
- Superset API endpoint: `/api/v1/dashboard/`
- Standard response includes `result` array with `dashboard_title`, `changed_on_utc`, `published`.
- **Decision**: Use `GET /api/v1/dashboard/` with `q` parameter to select specific columns to minimize payload.
- Columns: `id`, `dashboard_title`, `changed_on_utc`, `published`.
### 3. Grid Component
**Context**: Need a grid with sorting, filtering, and selection.
**Options**:
- **Custom Svelte Table**: Lightweight, full control.
- **3rd Party Lib (e.g. svelte-headless-table)**: Powerful but maybe overkill.
- **Decision**: **Custom Svelte Component** (`DashboardGrid.svelte`).
- Why: Requirements are specific (Select All across pages, custom status pill, specific columns). A custom component using standard HTML table + Tailwind is simple and maintainable for this scope.
## Design Decisions
### Data Model
- **Dashboard**:
- `id`: string (or int, depends on Superset version, usually int for dashboards but we treat as ID)
- `title`: string
- `last_modified`: string (ISO date)
- `status`: 'published' | 'draft'
### Architecture
- **Backend**:
- `SupersetClient.get_dashboards()`: Fetches list from Superset.
- `GET /api/environments/{id}/dashboards`: Proxy endpoint.
- **Frontend**:
- `DashboardGrid.svelte`: Handles display, sorting, pagination, and selection logic.
- `migration/+page.svelte`: Orchestrates fetching and passes data to Grid.
### UX/UI
- **Status Column**: Badge (Green for Published, Gray for Draft).
- **Selection**: Checkbox in first column.
- **Pagination**: Simple "Prev 1 of 5 Next" controls at bottom.

View File

@@ -0,0 +1,81 @@
# Feature Specification: Migration Plugin Dashboard Grid
**Feature Branch**: `007-migration-dashboard-grid`
**Created**: 2025-12-27
**Status**: Draft
**Input**: User description: "Я хочу доработать плагин миграции. Выбор дашбордов должен осуществляться из списка-грида, с возможностью его фильтровать по наименованию. В гриде должны быть поля наименования дашборда, дата последнего изменения дашборда, плюс статус - опубликован или черновик"
## Clarifications
### Session 2025-12-27
- Q: How should the grid handle data loading and filtering to ensure performance and usability? → A: **Client-side** (Fetch all, filter locally).
- Q: Should the grid include a "Select All" checkbox in the header for bulk operations? → A: **Yes, include "Select All"**.
- Q: How should the grid handle large lists of dashboards (e.g., >50)? → A: **Pagination** (e.g., 20 per page).
- Q: Does the "Select All" checkbox select only the currently visible page of dashboards, or all dashboards that match the current filter? → A: **All matching filter** (Selects all filtered results, not just the visible page).
- Q: What should happen if the user changes the filter while some items are already selected? → A: **Preserve selection** (Selected items remain selected even if hidden by new filter).
- Q: What should be the default sort order when the dashboard grid first loads? → A: **Last Modified Date (Newest first)**.
- Q: Should the grid include an "Owners" column to help distinguish dashboards with the same name? → A: **Yes, include Owners**.
- Q: How should the "Owners" column display multiple owners? → A: **Show first owner + count (e.g., "admin + 2") with tooltip**.
- Q: How should the "Status" (Draft/Published) be visually represented in the grid? → A: **Colored Badges/Chips**.
- Q: Should the grid include a "Preview" action (e.g., link to open the dashboard in Superset)? → A: **Yes, open in new tab**.
## User Scenarios & Testing *(mandatory)*
### User Story 1 - Advanced Dashboard Selection (Priority: P1)
As a migration engineer, I want to select dashboards from a detailed grid view that includes status and modification dates, so that I can easily distinguish between draft/published versions and identify the most recent changes before migrating.
**Why this priority**: Current selection mechanisms (likely simple dropdowns or lists) lack critical context (status, freshness), making it error-prone to select the right assets for migration.
**Independent Test**: Can be tested by connecting to a Superset instance with known dashboards (some drafts, some published) and verifying the grid correctly displays their metadata and allows filtering/selection.
**Acceptance Scenarios**:
1. **Given** I have selected a source environment in the migration plugin, **When** the dashboard list loads, **Then** I see a grid view displaying "Dashboard Name", "Last Modified", and "Status" columns.
2. **Given** the dashboard grid is displayed, **When** I type "Sales" into the filter input, **Then** the grid updates to show only dashboards containing "Sales" in their name.
3. **Given** a dashboard is in "Draft" state in Superset, **When** it appears in the grid, **Then** the Status column clearly indicates "Draft" (vs "Published").
4. **Given** I want to migrate multiple dashboards, **When** I check the boxes next to several rows, **Then** they are added to the selection for the migration job.
5. **Given** the grid is populated, **When** I click the "Select All" checkbox in the header, **Then** all visible dashboards are selected.
---
### Edge Cases
- **Empty Environment**: What happens if the source environment has no dashboards? System should display a "No dashboards found" message in the grid area.
- **Missing Metadata**: What if the Superset API returns null for `changed_on` or `published`? System should display "N/A" or a default value (e.g., "Unknown") rather than crashing.
- **Large Dataset**: How does the grid handle 1000+ dashboards? The grid MUST use pagination (default 20 items per page) to manage display density.
## Requirements *(mandatory)*
### Functional Requirements
- **FR-001**: The system MUST fetch extended metadata for dashboards from the Superset API, specifically: Title, Last Modified Date (`changed_on`), and Published Status (`published`).
- **FR-002**: The Migration Plugin UI MUST display a data grid component to list these dashboards.
- **FR-003**: The grid MUST include sortable columns for:
- Name (Dashboard Title)
- Last Modified (Date/Time)
- Status (Published/Draft)
- Owners (List of owner names)
- **FR-004**: The UI MUST provide a text filter input that filters the grid rows by Dashboard Name in real-time using client-side logic (fetching all dashboards once).
- **FR-005**: The grid MUST support multi-row selection to allow migrating batches of dashboards.
- **FR-006**: The selection state MUST be passed to the migration execution logic when the user initiates the migration.
- **FR-007**: The grid header MUST include a "Select All" checkbox. When checked, it MUST select ALL dashboards matching the current filter criteria (spanning across all pages), not just the currently visible page.
- **FR-008**: The grid MUST support pagination, displaying 20 rows per page by default, with navigation controls (Next/Prev/Page numbers).
- **FR-009**: The selection state MUST be preserved across filter changes. Items selected before a filter change MUST remain selected even if they are hidden by the new filter.
### Key Entities
- **Dashboard Metadata**:
- `id`: Unique identifier from Superset.
- `title`: Display name.
- `changed_on`: Timestamp of last edit.
- `is_published`: Boolean status.
- `owners`: List of owner objects/names.
## Success Criteria *(mandatory)*
### Measurable Outcomes
- **SC-001**: Users can identify the status (Draft/Published) of any dashboard in the list with 100% accuracy.
- **SC-002**: Filtering a list of 100 dashboards takes less than 200ms to update the view.
- **SC-003**: Users can successfully select and initiate migration for a mix of Draft and Published dashboards in a single operation.

View File

@@ -0,0 +1,29 @@
---
description: "Architecture tasks for Migration Plugin Dashboard Grid"
---
# Architecture Tasks: Migration Plugin Dashboard Grid
**Role**: Architect Agent
**Goal**: Define the "What" and "Why" (Contracts, Scaffolding, Models) before implementation.
## Phase 1: Setup & Models
- [x] A001 Define contracts/scaffolding for migration route in backend/src/api/routes/migration.py
- [x] A002 Define contracts/scaffolding for Dashboard model in backend/src/models/dashboard.py
## Phase 2: User Story 1 - Advanced Dashboard Selection
- [x] A003 [US1] Define contracts/scaffolding for SupersetClient extensions in backend/src/core/superset_client.py
- [x] A004 [US1] Define contracts/scaffolding for GET /api/migration/dashboards endpoint in backend/src/api/routes/migration.py
- [x] A005 [US1] Define contracts/scaffolding for DashboardGrid component in frontend/src/components/DashboardGrid.svelte
- [x] A006 [US1] Define contracts/scaffolding for migration page integration in frontend/src/routes/migration/+page.svelte
- [x] A007 [US1] Define contracts/scaffolding for POST /api/migration/execute endpoint in backend/src/api/routes/migration.py
## Handover Checklist
- [x] All new files created with `[DEF]` anchors
- [x] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
- [x] No "naked code" (logic outside of anchors)
- [x] `tasks-dev.md` is ready for the Developer Agent

View File

@@ -0,0 +1,49 @@
---
description: "Development tasks for Migration Plugin Dashboard Grid"
---
# Development Tasks: Migration Plugin Dashboard Grid
**Role**: Developer Agent
**Goal**: Implement the logic defined in the architecture contracts.
## Phase 1: Backend Implementation
- [x] D001 [US1] Implement `SupersetClient.get_dashboards_summary` in `backend/src/core/superset_client.py`
- **Context**: Fetch dashboards from Superset API with specific columns (`id`, `dashboard_title`, `changed_on_utc`, `published`).
- **Input**: None (uses instance config).
- **Output**: List of dictionaries mapped to `DashboardMetadata` fields.
- [x] D002 [US1] Implement `get_dashboards` endpoint in `backend/src/api/routes/migration.py`
- **Context**: Initialize `SupersetClient` with environment config and call `get_dashboards_summary`.
- **Input**: `env_id` (path param).
- **Output**: JSON list of `DashboardMetadata`.
- [x] D003 [US1] Implement `execute_migration` endpoint in `backend/src/api/routes/migration.py`
- **Context**: Validate selection and initiate migration task (placeholder or TaskManager integration).
- **Input**: `DashboardSelection` body.
- **Output**: Task ID and status message.
## Phase 2: Frontend Implementation
- [x] D004 [US1] Implement `DashboardGrid.svelte` logic
- **Context**: `frontend/src/components/DashboardGrid.svelte`
- **Requirements**:
- Client-side pagination (default 20 items).
- Sorting by Title, Last Modified, Status.
- Text filtering (search by title).
- Multi-selection with "Select All" capability.
- Emit selection events.
- [x] D005 [US1] Integrate `DashboardGrid` into Migration Page
- **Context**: `frontend/src/routes/migration/+page.svelte`
- **Requirements**:
- Fetch dashboards when `sourceEnvId` changes.
- Bind `dashboards` data to `DashboardGrid`.
- Bind `selectedDashboardIds`.
- Update `startMigration` to send `selectedDashboardIds` to backend.
## Phase 3: Verification
- [ ] D006 Verify Dashboard Grid functionality (Sort, Filter, Page).
- [ ] D007 Verify API integration (Fetch dashboards, Start migration).

View File

@@ -0,0 +1,35 @@
# Specification Quality Checklist: Migration UI Improvements
**Purpose**: Validate specification completeness and quality before proceeding to planning
**Created**: 2025-12-27
**Feature**: [specs/008-migration-ui-improvements/spec.md](specs/008-migration-ui-improvements/spec.md)
## Content Quality
- [x] No implementation details (languages, frameworks, APIs)
- [x] Focused on user value and business needs
- [x] Written for non-technical stakeholders
- [x] All mandatory sections completed
## Requirement Completeness
- [x] No [NEEDS CLARIFICATION] markers remain
- [x] Requirements are testable and unambiguous
- [x] Success criteria are measurable
- [x] Success criteria are technology-agnostic (no implementation details)
- [x] All acceptance scenarios are defined
- [x] Edge cases are identified
- [x] Scope is clearly bounded
- [x] Dependencies and assumptions identified
## Feature Readiness
- [x] All functional requirements have clear acceptance criteria
- [x] User scenarios cover primary flows
- [x] Feature meets measurable outcomes defined in Success Criteria
- [x] No implementation details leak into specification
## Notes
- The specification addresses the user's request for task history, logs, and interactive password resolution.
- Assumptions are made about task persistence and sequential password prompts for multiple databases.

View File

@@ -0,0 +1,356 @@
# API Contracts: Migration UI Improvements
**Date**: 2025-12-27 | **Status**: Implemented
## Overview
This document defines the API contracts for the Migration UI Improvements feature. All endpoints follow RESTful conventions and use standard HTTP status codes.
## Base URL
`/api/` - All endpoints are relative to the API base URL
## Authentication
All endpoints require authentication using the existing session mechanism.
## Endpoints
### 1. List Migration Tasks
**Endpoint**: `GET /api/tasks`
**Purpose**: Retrieve a paginated list of migration tasks
**Parameters**:
```
limit: integer (query, optional) - Number of tasks to return (default: 10, max: 50)
offset: integer (query, optional) - Pagination offset (default: 0)
status: string (query, optional) - Filter by task status (PENDING, RUNNING, SUCCESS, FAILED, AWAITING_INPUT, AWAITING_MAPPING)
```
**Response**: `200 OK`
**Content-Type**: `application/json`
**Response Body**:
```json
{
"tasks": [
{
"id": "string (uuid)",
"type": "string",
"status": "string (enum)",
"start_time": "string (iso8601)",
"end_time": "string (iso8601) | null",
"requires_input": "boolean"
}
],
"total": "integer",
"limit": "integer",
"offset": "integer"
}
```
**Example Request**:
```
GET /api/tasks?limit=5&offset=0
```
**Example Response**:
```json
{
"tasks": [
{
"id": "550e8400-e29b-41d4-a716-446655440000",
"type": "migration",
"status": "RUNNING",
"start_time": "2025-12-27T09:47:12.000Z",
"end_time": null,
"requires_input": false
},
{
"id": "550e8400-e29b-41d4-a716-446655440001",
"type": "migration",
"status": "AWAITING_INPUT",
"start_time": "2025-12-27T09:45:00.000Z",
"end_time": null,
"requires_input": true
}
],
"total": 2,
"limit": 5,
"offset": 0
}
```
**Error Responses**:
- `401 Unauthorized` - Authentication required
- `400 Bad Request` - Invalid parameters
### 2. Get Task Logs
**Endpoint**: `GET /tasks/{task_id}/logs`
**Purpose**: Retrieve detailed logs for a specific task
**Parameters**: None
**Response**: `200 OK`
**Content-Type**: `application/json`
**Response Body**:
```json
{
"task_id": "string (uuid)",
"status": "string (enum)",
"logs": [
{
"timestamp": "string (iso8601)",
"level": "string",
"message": "string",
"context": "object | null"
}
]
}
```
**Example Request**:
```
GET /api/tasks/550e8400-e29b-41d4-a716-446655440001/logs
```
**Example Response**:
```json
{
"task_id": "550e8400-e29b-41d4-a716-446655440001",
"status": "AWAITING_INPUT",
"logs": [
{
"timestamp": "2025-12-27T09:45:00.000Z",
"level": "INFO",
"message": "Starting migration",
"context": null
},
{
"timestamp": "2025-12-27T09:47:12.000Z",
"level": "ERROR",
"message": "API error during upload",
"context": {
"error": "Must provide a password for the database",
"database": "PostgreSQL"
}
}
]
}
```
**Error Responses**:
- `401 Unauthorized` - Authentication required
- `404 Not Found` - Task not found
- `403 Forbidden` - Access denied to this task
### 3. Resume Task with Input
**Endpoint**: `POST /tasks/{task_id}/resume`
**Purpose**: Provide required input and resume a paused task
**Request Body**:
```json
{
"passwords": {
"database_name": "password"
}
}
```
**Response**: `200 OK`
**Content-Type**: `application/json`
**Response Body**:
```json
{
"success": true,
"message": "Task resumed successfully"
}
```
**Example Request**:
```
POST /api/tasks/550e8400-e29b-41d4-a716-446655440001/resume
Content-Type: application/json
{
"passwords": {
"PostgreSQL": "securepassword123"
}
}
```
**Example Response**:
```json
{
"success": true,
"message": "Task resumed successfully"
}
```
**Error Responses**:
- `401 Unauthorized` - Authentication required
- `404 Not Found` - Task not found
- `400 Bad Request` - Invalid request body or missing required fields
- `409 Conflict` - Task not in AWAITING_INPUT state or already completed
- `422 Unprocessable Entity` - Invalid password provided
### 4. Get Task Details (Optional)
**Endpoint**: `GET /tasks/{task_id}`
**Purpose**: Get detailed information about a specific task
**Parameters**: None
**Response**: `200 OK`
**Content-Type**: `application/json`
**Response Body**:
```json
{
"id": "string (uuid)",
"type": "string",
"status": "string (enum)",
"start_time": "string (iso8601)",
"end_time": "string (iso8601) | null",
"requires_input": "boolean",
"input_request": "object | null"
}
```
**Example Response**:
```json
{
"id": "550e8400-e29b-41d4-a716-446655440001",
"type": "migration",
"status": "AWAITING_INPUT",
"start_time": "2025-12-27T09:45:00.000Z",
"end_time": null,
"requires_input": true,
"input_request": {
"type": "database_password",
"databases": ["PostgreSQL"],
"error_message": "Must provide a password for the database"
}
}
```
## Data Types
### TaskStatus Enum
```
PENDING
RUNNING
SUCCESS
FAILED
AWAITING_INPUT
```
### LogLevel Enum
```
INFO
WARNING
ERROR
DEBUG
```
## Error Handling
### Standard Error Format
```json
{
"error": {
"code": "string",
"message": "string",
"details": "object | null"
}
}
```
### Common Error Codes
- `invalid_task_id`: Task ID is invalid or not found
- `task_not_awaiting_input`: Task is not in AWAITING_INPUT state
- `invalid_password`: Provided password is invalid
- `unauthorized`: Authentication required
- `bad_request`: Invalid request parameters
## WebSocket Integration
### Task Status Updates
**Channel**: `/ws/tasks/{task_id}/status`
**Message Format**:
```json
{
"event": "status_update",
"task_id": "string (uuid)",
"status": "string (enum)",
"timestamp": "string (iso8601)"
}
```
### Task Log Updates
**Channel**: `/ws/tasks/{task_id}/logs`
**Message Format**:
```json
{
"event": "log_update",
"task_id": "string (uuid)",
"log": {
"timestamp": "string (iso8601)",
"level": "string",
"message": "string",
"context": "object | null"
}
}
```
## Rate Limiting
- Maximum 10 requests per minute per user for task list endpoint
- Maximum 30 requests per minute per user for task details/logs endpoints
- No rate limiting for WebSocket connections
## Versioning
All endpoints are versioned using the `Accept` header:
- `Accept: application/vnd.api.v1+json` - Current version
## Security Considerations
1. **Authentication**: All endpoints require valid session authentication
2. **Authorization**: Users can only access their own tasks
3. **Password Handling**: Passwords are not stored permanently, only used for immediate task resumption
4. **Input Validation**: All inputs are validated according to defined schemas
5. **Rate Limiting**: Prevents abuse of API endpoints
## Implementation Notes
1. **Pagination**: Default limit of 10 tasks, maximum of 50
2. **Sorting**: Tasks are sorted by start_time descending by default
3. **Caching**: Task list responses can be cached for 5 seconds
4. **WebSocket**: Use existing WebSocket infrastructure for real-time updates
5. **Error Recovery**: Failed task resumptions can be retried with corrected input
## OpenAPI Specification
A complete OpenAPI 3.0 specification is available in the repository at `specs/008-migration-ui-improvements/contracts/openapi.yaml`.

View File

@@ -0,0 +1,286 @@
# Data Model: Migration UI Improvements
**Date**: 2025-12-27 | **Status**: Draft
## Entities
### 1. Task (Extended)
**Source**: `backend/src/core/task_manager.py`
**Fields**:
- `id: UUID` - Unique task identifier
- `type: str` - Task type (e.g., "migration")
- `status: TaskStatus` - Current status (extended enum)
- `start_time: datetime` - When task was created
- `end_time: datetime | None` - When task completed (if applicable)
- `logs: List[LogEntry]` - Task execution logs
- `context: Dict` - Task-specific data
- `input_required: bool` - Whether task is awaiting user input
- `input_request: Dict | None` - Details about required input (for AWAITING_INPUT state)
**New Status Values**:
- `AWAITING_INPUT` - Task is paused waiting for user input (e.g., password)
**Relationships**:
- Has many: `LogEntry`
- Belongs to: `Migration` (if migration task)
**Validation Rules**:
- `id` must be unique and non-null
- `status` must be valid TaskStatus enum value
- `start_time` must be set on creation
- `input_request` required when status is `AWAITING_INPUT`
**State Transitions**:
```mermaid
graph LR
PENDING --> RUNNING
RUNNING --> SUCCESS
RUNNING --> FAILED
RUNNING --> AWAITING_INPUT
AWAITING_INPUT --> RUNNING
AWAITING_INPUT --> FAILED
```
### 2. LogEntry
**Source**: Existing in codebase
**Fields**:
- `timestamp: datetime` - When log entry was created
- `level: str` - Log level (INFO, WARNING, ERROR, etc.)
- `message: str` - Log message
- `context: Dict | None` - Additional context data
**Validation Rules**:
- `timestamp` must be set
- `level` must be valid log level
- `message` must be non-empty
### 3. DatabasePasswordRequest (New)
**Source**: New entity for password prompts
**Fields**:
- `database_name: str` - Name of database requiring password
- `connection_string: str | None` - Partial connection string (without password)
- `error_message: str | None` - Original error message
- `attempt_count: int` - Number of password attempts
**Validation Rules**:
- `database_name` must be non-empty
- `attempt_count` must be >= 0
**Relationships**:
- Embedded in: `Task.input_request`
### 4. TaskListResponse (API DTO)
**Fields**:
- `tasks: List[TaskSummary]` - List of task summaries
- `total: int` - Total number of tasks
- `limit: int` - Pagination limit
- `offset: int` - Pagination offset
### 5. TaskSummary (API DTO)
**Fields**:
- `id: UUID` - Task ID
- `type: str` - Task type
- `status: str` - Current status
- `start_time: datetime` - Start time
- `end_time: datetime | None` - End time (if completed)
- `requires_input: bool` - Whether task needs user input
### 6. TaskLogResponse (API DTO)
**Fields**:
- `task_id: UUID` - Task ID
- `logs: List[LogEntry]` - Task logs
- `status: str` - Current task status
### 7. PasswordPromptRequest (API DTO)
**Fields**:
- `task_id: UUID` - Task ID
- `passwords: Dict[str, str]` - Database name to password mapping
**Validation Rules**:
- `task_id` must exist and be in AWAITING_INPUT state
- All required databases must be provided
## API Contracts
### 1. GET /api/tasks - List Tasks
**Purpose**: Retrieve list of recent migration tasks
**Parameters**:
- `limit: int` (query, optional) - Pagination limit (default: 10)
- `offset: int` (query, optional) - Pagination offset (default: 0)
- `status: str` (query, optional) - Filter by status
**Response**: `TaskListResponse`
**Example**:
```json
{
"tasks": [
{
"id": "abc-123",
"type": "migration",
"status": "RUNNING",
"start_time": "2025-12-27T09:47:12Z",
"end_time": null,
"requires_input": false
}
],
"total": 1,
"limit": 10,
"offset": 0
}
```
### 2. GET /api/tasks/{task_id}/logs - Get Task Logs
**Purpose**: Retrieve detailed logs for a specific task
**Parameters**: None
**Response**: `TaskLogResponse`
**Example**:
```json
{
"task_id": "abc-123",
"status": "AWAITING_INPUT",
"logs": [
{
"timestamp": "2025-12-27T09:47:12Z",
"level": "ERROR",
"message": "Must provide a password for the database",
"context": {
"database": "PostgreSQL"
}
}
]
}
```
### 3. POST /api/tasks/{task_id}/resume - Resume Task with Input
**Purpose**: Provide required input and resume a paused task
**Request Body**: `PasswordPromptRequest`
**Response**:
```json
{
"success": true,
"message": "Task resumed successfully"
}
```
**Error Responses**:
- `404 Not Found` - Task not found
- `400 Bad Request` - Invalid input or task not in AWAITING_INPUT state
- `409 Conflict` - Task already completed or failed
## Database Schema Changes
### Task Persistence (SQLite)
**Table**: `persistent_tasks`
**Columns**:
- `id TEXT PRIMARY KEY` - Task ID
- `status TEXT NOT NULL` - Task status
- `created_at TEXT NOT NULL` - Creation timestamp
- `updated_at TEXT NOT NULL` - Last update timestamp
- `input_request JSON` - Serialized input request data
- `context JSON` - Serialized task context
**Indexes**:
- `idx_status` on `status` column
- `idx_created_at` on `created_at` column
## Event Flow
### Normal Task Execution
```mermaid
sequenceDiagram
participant UI
participant API
participant TaskManager
participant MigrationPlugin
UI->>API: Start migration
API->>TaskManager: Create task
TaskManager->>MigrationPlugin: Execute
MigrationPlugin->>TaskManager: Update status (RUNNING)
MigrationPlugin->>TaskManager: Add logs
MigrationPlugin->>TaskManager: Update status (SUCCESS/FAILED)
```
### Task with Password Requirement
```mermaid
sequenceDiagram
participant UI
participant API
participant TaskManager
participant MigrationPlugin
UI->>API: Start migration
API->>TaskManager: Create task
TaskManager->>MigrationPlugin: Execute
MigrationPlugin->>TaskManager: Update status (RUNNING)
MigrationPlugin->>TaskManager: Detect password error
TaskManager->>TaskManager: Update status (AWAITING_INPUT)
TaskManager->>API: Persist task (if needed)
API->>UI: Task status update
UI->>API: Get task logs
API->>UI: Return logs with error
UI->>User: Show password prompt
User->>UI: Provide password
UI->>API: POST /tasks/{id}/resume
API->>TaskManager: Resume task with password
TaskManager->>MigrationPlugin: Continue execution
MigrationPlugin->>TaskManager: Update status (RUNNING)
MigrationPlugin->>TaskManager: Complete task
```
## Validation Rules
### Task Creation
- Task ID must be unique
- Start time must be set
- Initial status must be PENDING
### Task State Transitions
- Only RUNNING tasks can transition to AWAITING_INPUT
- Only AWAITING_INPUT tasks can be resumed
- Completed tasks (SUCCESS/FAILED) cannot be modified
### Password Input
- All required databases must be provided
- Passwords must meet minimum complexity requirements
- Invalid passwords trigger new error and prompt again
## Implementation Notes
1. **Task Persistence**: Only tasks in AWAITING_INPUT state will be persisted to handle backend restarts
2. **Error Detection**: Specific pattern matching for Superset "Must provide a password" errors
3. **UI Integration**: Real-time updates using existing WebSocket infrastructure
4. **Security**: Passwords are not permanently stored, only used for immediate task resumption
5. **Performance**: Basic pagination for task history to handle growth
## Open Questions
None - All design decisions have been documented and validated against requirements.

View File

@@ -0,0 +1,142 @@
# Implementation Plan: Migration UI Improvements
**Branch**: `008-migration-ui-improvements` | **Date**: 2025-12-27 | **Spec**: [spec.md](spec.md)
**Input**: Feature specification from `/specs/008-migration-ui-improvements/spec.md`
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
## Summary
This feature aims to improve the migration UI by:
1. Displaying a list of recent and current migration tasks with their statuses
2. Allowing users to view detailed logs for each task
3. Handling database password errors interactively by prompting users to provide missing passwords
The technical approach involves extending the existing TaskManager and MigrationPlugin to support new task states, adding API endpoints for task history and logs, and creating UI components for task visualization and password prompts.
## Technical Context
**Language/Version**: Python 3.9+, Node.js 18+
**Primary Dependencies**: FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API
**Storage**: SQLite (optional for job history), existing database for mappings
**Testing**: pytest, ruff check
**Target Platform**: Linux server (backend), Web browser (frontend)
**Project Type**: web (backend + frontend)
**Performance Goals**: Basic pagination support (limit/offset), real-time status updates
**Constraints**: Configurable retention period for task history, hybrid task persistence approach
**Scale/Scope**: Small to medium migration tasks, 10-50 concurrent users
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
### Compliance Gates
1. **Causal Validity (Contracts First)**: ✅ PASS
- ✅ All new entities defined in data-model.md with contracts
- ✅ API contracts documented in contracts/api.md
- ✅ TaskManager extensions have clear @PRE/@POST conditions defined
2. **Immutability of Architecture**: ✅ PASS
- ✅ Existing architectural layers maintained (Domain/Infra/UI)
- ✅ New components follow established patterns
- ✅ Web application structure preserved
3. **Semantic Format Compliance**: ✅ PASS
- ✅ All new code will use [DEF] / [/DEF] anchor syntax
- ✅ Proper metadata tags (@KEY) defined in data model
- ✅ Graph relations (@RELATION) documented
4. **Design by Contract (DbC)**: ✅ PASS
- ✅ All new functions/classes have defined contracts
- ✅ API endpoints have clear specifications and constraints
- ✅ Implementation will strictly satisfy contracts
5. **Belief State Logging**: ✅ PASS
- ✅ Context Manager pattern documented for Python
- ✅ Proper [ANCHOR_ID][STATE] format maintained
- ✅ Logging integrated into task state transitions
6. **Fractal Complexity Limit**: ✅ PASS
- ✅ Modules designed to stay under ~300 lines
- ✅ Functions designed for ~30-50 lines max
- ✅ Complex logic decomposed into helpers
### Post-Design Validation
**Design Artifacts Created**:
- ✅ research.md - Phase 0 research findings
- ✅ data-model.md - Entity definitions and relationships
- ✅ contracts/api.md - API contracts and specifications
- ✅ quickstart.md - Usage documentation
**Constitution Compliance**:
- ✅ All contracts defined before implementation
- ✅ Architectural decisions respect existing structure
- ✅ Semantic format compliance maintained
- ✅ Design by Contract principles applied
- ✅ Complexity limits respected
### Potential Violations
None identified. The design phase has successfully addressed all constitutional requirements and the feature is ready for implementation.
## Project Structure
### Documentation (this feature)
```text
specs/008-migration-ui-improvements/
├── plan.md # This file (/speckit.plan command output)
├── research.md # Phase 0 output (/speckit.plan command)
├── data-model.md # Phase 1 output (/speckit.plan command)
├── quickstart.md # Phase 1 output (/speckit.plan command)
├── contracts/ # Phase 1 output (/speckit.plan command)
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
```
### Source Code (repository root)
```text
backend/
├── src/
│ ├── models/
│ ├── services/
│ ├── core/
│ │ ├── task_manager.py # Extended with new task states
│ │ └── migration_engine.py # Enhanced error handling
│ ├── api/
│ │ └── routes/
│ │ └── tasks.py # New API endpoints
│ └── plugins/
│ └── migration.py # Enhanced password handling
└── tests/
├── test_task_manager.py # New tests for task states
└── test_migration_plugin.py # New tests for password prompts
frontend/
├── src/
│ ├── components/
│ │ ├── TaskHistory.svelte # New component for task list
│ │ ├── TaskLogViewer.svelte # New component for log viewing
│ │ └── PasswordPrompt.svelte # New component for password input
│ ├── pages/
│ │ └── migration/
│ │ └── +page.svelte # Enhanced migration page
│ └── services/
│ └── taskService.js # New service for task API
└── tests/
├── TaskHistory.spec.js # New component tests
└── taskService.spec.js # New service tests
```
**Structure Decision**: Web application structure (Option 2) is selected as this feature involves both backend API extensions and frontend UI components. The existing backend/frontend separation will be maintained, with new components added to their respective directories.
## Complexity Tracking
> **Fill ONLY if Constitution Check has violations that must be justified**
| Violation | Why Needed | Simpler Alternative Rejected Because |
|-----------|------------|-------------------------------------|
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |

View File

@@ -0,0 +1,226 @@
# Quickstart: Migration UI Improvements
**Date**: 2025-12-27 | **Status**: Draft
## Overview
This guide provides step-by-step instructions for using the new Migration UI Improvements feature.
## Prerequisites
- Running instance of the migration tool
- Valid user session
- At least one migration task (completed or in progress)
## Installation
No additional installation required. The feature is integrated into the existing migration UI.
## Using the Feature
### 1. Viewing Task History
**Steps**:
1. Navigate to the Migration Dashboard
2. Locate the "Recent Tasks" section
3. View the list of your recent migration tasks
**What you'll see**:
- Task ID
- Status (Pending, Running, Success, Failed, Awaiting Input)
- Start time
- "View Logs" action button
**Screenshot**: [Placeholder for task history screenshot]
### 2. Viewing Task Logs
**Steps**:
1. From the task history list, click "View Logs" on any task
2. A modal will open showing detailed logs
**What you'll see**:
- Timestamped log entries
- Log levels (INFO, WARNING, ERROR)
- Detailed error messages
- Context information for errors
**Screenshot**: [Placeholder for log viewer screenshot]
### 3. Handling Database Password Prompts
**Scenario**: A migration fails due to missing database password
**Steps**:
1. Start a migration that requires database passwords
2. When the system detects a missing password error:
- Task status changes to "Awaiting Input"
- A notification appears
- The task shows "Requires Input" indicator
3. Click "View Logs" to see the specific error
4. The system will show a password prompt with:
- Database name requiring password
- Original error message
- Password input field
5. Enter the required password(s)
6. Click "Submit" to resume the migration
**What happens next**:
- The migration resumes with the provided credentials
- Task status changes back to "Running"
- If password is incorrect, you'll be prompted again
**Screenshot**: [Placeholder for password prompt screenshot]
## API Usage Examples
### List Recent Tasks
```bash
curl -X GET \
'http://localhost:8000/api/tasks?limit=5&offset=0' \
-H 'Authorization: Bearer YOUR_TOKEN' \
-H 'Accept: application/vnd.api.v1+json'
```
### Get Task Logs
```bash
curl -X GET \
'http://localhost:8000/api/tasks/TASK_ID/logs' \
-H 'Authorization: Bearer YOUR_TOKEN' \
-H 'Accept: application/vnd.api.v1+json'
```
### Resume Task with Password
```bash
curl -X POST \
'http://localhost:8000/api/tasks/TASK_ID/resume' \
-H 'Authorization: Bearer YOUR_TOKEN' \
-H 'Content-Type: application/json' \
-H 'Accept: application/vnd.api.v1+json' \
-d '{
"passwords": {
"PostgreSQL": "your_secure_password"
}
}'
```
## Troubleshooting
### Common Issues
**Issue**: No tasks appearing in history
- **Solution**: Check that you have started at least one migration task
- **Solution**: Verify your session is valid
- **Solution**: Try refreshing the page
**Issue**: Task stuck in "Awaiting Input" state
- **Solution**: Check the task logs for specific error details
- **Solution**: Provide the required input through the UI
- **Solution**: If input was provided but task didn't resume, try again
**Issue**: Password prompt keeps appearing
- **Solution**: Verify the password is correct
- **Solution**: Check for multiple databases requiring passwords
- **Solution**: Contact administrator if issue persists
### Error Messages
- `"Task not found"`: The specified task ID doesn't exist or you don't have permission
- `"Task not awaiting input"`: The task is not in a state that requires user input
- `"Invalid password"`: The provided password was rejected by the target system
- `"Unauthorized"`: Your session has expired or is invalid
## Configuration
### Task Retention
Configure how long completed tasks are retained:
```bash
# In backend configuration
TASK_RETENTION_DAYS=30
TASK_RETENTION_LIMIT=100
```
### Pagination Limits
Adjust default pagination limits:
```bash
# In backend configuration
DEFAULT_TASK_LIMIT=10
MAX_TASK_LIMIT=50
```
## Best Practices
1. **Monitor Tasks**: Regularly check task history for failed migrations
2. **Prompt Response**: Respond to "Awaiting Input" tasks promptly to avoid delays
3. **Log Review**: Always review logs for failed tasks to understand root causes
4. **Password Management**: Use secure password storage for frequently used credentials
5. **Session Management**: Ensure your session is active before starting long migrations
## Integration Guide
### Frontend Integration
```javascript
// Example: Fetching task list
import { getTasks } from '/src/services/taskService'
const fetchTasks = async () => {
try {
const response = await getTasks({ limit: 10, offset: 0 })
setTasks(response.tasks)
setTotal(response.total)
} catch (error) {
console.error('Failed to fetch tasks:', error)
}
}
```
### Backend Integration
```python
# Example: Extending TaskManager
from backend.src.core.task_manager import TaskManager
class EnhancedTaskManager(TaskManager):
def get_tasks_for_user(self, user_id, limit=10, offset=0):
# Implement user-specific task filtering
pass
```
## Support
For issues not covered in this guide:
- Check the main documentation
- Review API contract specifications
- Contact support team with error details
## Changelog
**2025-12-27**: Initial release
- Added task history viewing
- Added task log inspection
- Added interactive password prompts
- Added API endpoints for task management
## Feedback
Provide feedback on this feature:
- What works well
- What could be improved
- Additional use cases to support
[Feedback Form Link Placeholder]
## Next Steps
1. Try the feature with a test migration
2. Familiarize yourself with the error patterns
3. Integrate with your existing workflows
4. Provide feedback for improvements

View File

@@ -0,0 +1,164 @@
# Research: Migration UI Improvements
**Date**: 2025-12-27 | **Status**: Complete
## Overview
This research phase was conducted to resolve any technical unknowns and validate design decisions for the Migration UI Improvements feature. Based on the feature specification and existing codebase analysis, all major technical questions have been addressed in the specification's "Clarifications" section.
## Research Findings
### 1. Task History and Status Display
**Decision**: Implement a task history API endpoint and UI component
**Rationale**:
- The existing `TaskManager` already tracks tasks in memory
- Need to expose this information through a new API endpoint
- UI will display tasks with status, start time, and actions
**Alternatives considered**:
- Full database persistence for all tasks (rejected due to complexity)
- In-memory only with no persistence (rejected as it wouldn't survive restarts)
**Implementation approach**:
- Extend `TaskManager` to support task history retrieval
- Add `/api/tasks` endpoint for fetching task list
- Create `TaskHistory` Svelte component for display
### 2. Task Log Viewing
**Decision**: Implement log retrieval API and modal viewer
**Rationale**:
- Each task already maintains logs in `LogEntry` format
- Need API endpoint to retrieve logs for specific task ID
- UI modal provides detailed log viewing without page navigation
**Alternatives considered**:
- Separate log page (rejected for poor UX)
- Downloadable log files (rejected as overkill for current needs)
**Implementation approach**:
- Add `/api/tasks/{task_id}/logs` endpoint
- Create `TaskLogViewer` modal component
- Integrate with existing task list
### 3. Database Password Error Handling
**Decision**: Implement interactive password prompt with task pausing
**Rationale**:
- Superset requires database passwords that aren't exported
- Current system fails entirely on missing passwords
- Interactive resolution improves user experience significantly
**Alternatives considered**:
- Pre-migration password collection form (rejected as not all migrations need passwords)
- Automatic retry with default passwords (rejected as insecure)
**Implementation approach**:
- Extend `MigrationPlugin` to detect specific Superset password errors
- Add new task state "AWAITING_INPUT"
- Create `PasswordPrompt` component for user input
- Implement task resumption with provided credentials
### 4. Task Persistence Strategy
**Decision**: Hybrid approach - persist only tasks needing user input
**Rationale**:
- Full persistence adds unnecessary complexity
- Most tasks complete quickly and don't need persistence
- Only tasks awaiting user input need to survive restarts
**Alternatives considered**:
- Full database persistence (rejected due to complexity)
- No persistence (rejected as loses important state)
**Implementation approach**:
- Extend `TaskManager` to persist "AWAITING_INPUT" tasks
- Use SQLite for simple persistence
- Clear completed tasks based on configurable retention
### 5. Error Detection and Pattern Matching
**Decision**: Pattern match on Superset API error responses
**Rationale**:
- Superset returns specific error format for missing passwords
- Pattern: `UNPROCESSABLE ENTITY` 422 with specific JSON body
- Error message contains: "Must provide a password for the database"
**Implementation approach**:
- Enhance error handling in `MigrationPlugin.execute()`
- Detect specific error pattern and transition to "AWAITING_INPUT"
- Include database name in password prompt
## Technical Validation
### Existing Codebase Analysis
**TaskManager** (`backend/src/core/task_manager.py`):
- Already supports task creation and status tracking
- Needs extension for:
- Task history retrieval
- New "AWAITING_INPUT" state
- Selective persistence
**MigrationPlugin** (`backend/src/plugins/migration.py`):
- Handles migration execution
- Needs enhancement for:
- Error pattern detection
- Task state transitions
- Password injection
**API Routes** (`backend/src/api/routes/`):
- Existing structure for REST endpoints
- Needs new endpoints:
- `GET /tasks` - list tasks
- `GET /tasks/{id}/logs` - get task logs
- `POST /tasks/{id}/resume` - resume with input
### Performance Considerations
**Pagination**: Basic limit/offset pagination will be implemented for task history to handle potential growth of task records.
**Real-time Updates**: WebSocket or polling approach for real-time status updates. Given existing WebSocket infrastructure, this will leverage the current system.
**Error Handling**: Robust error handling for:
- Invalid task IDs
- Missing logs
- Password validation failures
- Task resumption errors
## Open Questions (Resolved)
All questions from the specification's "Clarifications" section have been addressed:
1.**Data retention policy**: Configurable retention period implemented
2.**Multiple password handling**: Prompt for all missing passwords at once
3.**Invalid password handling**: Prompt again with error message
4.**Task persistence**: Hybrid approach for "AWAITING_INPUT" tasks
5.**Performance requirements**: Basic pagination support
## Recommendations
1. **Implementation Order**:
- Backend API extensions first
- Task state management second
- UI components last
2. **Testing Focus**:
- Error condition testing for password prompts
- Task state transition validation
- Real-time update verification
3. **Future Enhancements**:
- Advanced filtering for task history
- Task search functionality
- Export/import of task history
## Conclusion
The research phase confirms that the proposed feature can be implemented using the existing architecture with targeted extensions. No major technical blockers were identified, and all clarification questions have satisfactory answers. The implementation can proceed to Phase 1: Design & Contracts.

View File

@@ -0,0 +1,99 @@
# Feature Specification: Migration UI Improvements
**Feature Branch**: `008-migration-ui-improvements`
**Created**: 2025-12-27
**Status**: Draft
**Input**: User description: "я хочу доработать интерфейс миграции: 1. Необходимо выводить список последних (и текущую) задач миграции с их статусами и возможностью посмотреть лог миграции 2. Необходимо корректно отрабатывать ошибки миграции БД, например такую [Backend] 2025-12-27 09:47:12,230 - ERROR - [import_dashboard][Failure] First import attempt failed: [API_FAILURE] API error during upload: {"errors": [{"message": "Error importing dashboard: databases/PostgreSQL.yaml: {'_schema': ['Must provide a password for the database']}", "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": {"databases/PostgreSQL.yaml": {"_schema": ["Must provide a password for the database"]}, "issue_codes": [{"code": 1010, "message": "Issue 1010 - Superset encountered an error while running a command."}]}}]} | Context: {'type': 'api_call'} ... Здесь видно, что можно предложить пользователю ввести пароль от БД"
## User Scenarios & Testing *(mandatory)*
### User Story 1 - Task History and Status (Priority: P1)
As a user, I want to see a list of my recent and currently running migration tasks so that I can track progress and review past results.
**Why this priority**: Essential for visibility into background processes and troubleshooting.
**Independent Test**: Can be fully tested by starting a migration and verifying it appears in a "Recent Tasks" list with its current status.
**Acceptance Scenarios**:
1. **Given** the Migration Dashboard, **When** I open the page, **Then** I see a list of recent migration tasks with their status (Pending, Running, Success, Failed).
2. **Given** a running task, **When** the task updates its status on the backend, **Then** the UI reflects the status change in real-time or upon refresh.
---
### User Story 2 - Task Log Inspection (Priority: P2)
As a user, I want to view the detailed logs of any migration task so that I can understand exactly what happened or why it failed.
**Why this priority**: Critical for debugging failures and confirming success details.
**Independent Test**: Can be tested by clicking a "View Logs" button for a task and seeing a modal or panel with the task's log entries.
**Acceptance Scenarios**:
1. **Given** a migration task in the history list, **When** I click "View Logs", **Then** a detailed log view opens showing timestamped messages from that task.
---
### User Story 3 - Interactive Database Password Resolution (Priority: P1)
As a user, I want the system to detect when a migration fails due to a missing database password and allow me to provide it interactively.
**Why this priority**: Prevents migration blocks due to Superset's security requirements (passwords are not exported).
**Independent Test**: Can be tested by triggering a migration that requires a DB password and verifying the UI prompts for the password instead of just failing.
**Acceptance Scenarios**:
1. **Given** a migration task that encounters a "Must provide a password for the database" error, **When** the error is detected, **Then** the task status changes to "Awaiting Input" and the UI prompts the user to enter the password for the specific database.
2. **Given** a password prompt, **When** I enter the password and submit, **Then** the migration resumes using the provided password.
---
### Edge Cases
- **Multiple Missing Passwords**: How does the system handle multiple databases in one migration needing passwords? (Assumption: Prompt sequentially or as a list).
- **Invalid Password Provided**: What happens if the user provides an incorrect password? (Assumption: System should detect the new error and prompt again or fail gracefully).
- **Task Manager Restart**: How are tasks persisted across backend restarts? (Assumption: Currently tasks are in-memory; persistence might be needed for "Recent Tasks" to be truly useful).
## Clarifications
### Session 2025-12-27
- Q: What is the expected data retention policy for migration task history? Should the system keep all tasks indefinitely, or is there a retention limit (e.g., last 50 tasks, last 30 days)? → A: Configurable retention period (default: last 50 tasks or 30 days, configurable via settings)
- Q: How should the system handle multiple databases requiring passwords in a single migration? Should it prompt for all missing passwords at once, or sequentially as each one is encountered? → A: Prompt for all missing passwords at once in a single form
- Q: What should happen when a user provides an incorrect database password? Should the system retry the same password, prompt again with an error message, or fail the migration entirely? → A: Prompt again with an error message explaining the password was incorrect
- Q: How should task persistence be handled across backend restarts? Should tasks be persisted to a database, kept in-memory only, or use a hybrid approach? → A: Hybrid approach: persist only tasks that need user input
- Q: What performance requirements should the task history API meet? Should it support pagination, filtering, or have specific response time targets? → A: Basic pagination only (limit/offset)
## Requirements *(mandatory)*
### Functional Requirements
- **FR-001**: System MUST provide an API endpoint to retrieve the list of all tasks from `TaskManager`.
- **FR-002**: System MUST provide an API endpoint to retrieve full logs for a specific task ID.
- **FR-003**: Migration Dashboard MUST display a list of recent tasks including ID, start time, status, and a "View Logs" action.
- **FR-004**: `MigrationPlugin` MUST detect specific Superset API errors related to missing database passwords (e.g., matching the `UNPROCESSABLE ENTITY` 422 error with specific JSON body).
- **FR-005**: System MUST support a task state "AWAITING_INPUT" (extending `AWAITING_MAPPING`) specifically for interactive password entry.
- **FR-006**: UI MUST display an interactive prompt when a task enters "AWAITING_INPUT" state due to missing DB password.
- **FR-007**: System MUST allow resuming a task with provided sensitive information (passwords) without persisting them permanently if not required.
### Non-Functional Requirements
- **NFR-001**: System MUST implement configurable retention policy for migration task history with default values of last 50 tasks or 30 days, configurable via settings.
### Key Entities
- **Task**: Represents a migration execution (Existing: `backend/src/core/task_manager.py:Task`). Needs to ensure logs are accessible.
- **MigrationLog**: A single entry in a task's log (Existing: `LogEntry`).
- **DatabasePasswordRequest**: A specific type of resolution request sent to the UI.
## Success Criteria *(mandatory)*
### Measurable Outcomes
- **SC-001**: Users can view the status of their last 10 migration tasks directly on the migration page.
- **SC-002**: Users can access the full log of any recent task in less than 2 clicks.
- **SC-003**: 100% of "Missing Password" errors are caught and presented as interactive prompts rather than fatal migration failures.
- **SC-004**: Users can successfully resume a "blocked" migration by providing the required password through the UI.

View File

@@ -0,0 +1,524 @@
# Tasks: Migration UI Improvements
**Feature**: Migration UI Improvements
**Branch**: `008-migration-ui-improvements`
**Generated**: 2025-12-27
**Status**: Ready for Implementation
## Overview
This document provides actionable, dependency-ordered tasks for implementing the Migration UI Improvements feature. All tasks follow the strict checklist format and are organized by user story for independent implementation and testing.
## Dependencies & Execution Order
### Phase 1: Setup (Project Initialization)
- **Goal**: Initialize project structure and verify prerequisites
- **Dependencies**: None
- **Test Criteria**: Project structure exists, all dependencies available
### Phase 2: Foundational (Blocking Prerequisites)
- **Goal**: Extend core components to support new functionality
- **Dependencies**: Phase 1 complete
- **Test Criteria**: Core extensions work independently
### Phase 3: User Story 1 - Task History and Status (P1)
- **Goal**: Display list of recent migration tasks with status
- **Dependencies**: Phase 2 complete
- **Test Criteria**: Start a migration, verify it appears in task list with correct status
### Phase 4: User Story 2 - Task Log Inspection (P2)
- **Goal**: View detailed logs for any migration task
- **Dependencies**: Phase 3 complete (uses task list)
- **Test Criteria**: Click "View Logs" button, see modal with task log entries
### Phase 5: User Story 3 - Interactive Password Resolution (P1)
- **Goal**: Handle missing database password errors interactively
- **Dependencies**: Phase 2 complete
- **Test Criteria**: Trigger migration with missing password, verify UI prompts instead of failing
### Phase 6: Polish & Cross-Cutting Concerns
- **Goal**: Finalize integration, add tests, documentation
- **Dependencies**: All previous phases complete
- **Test Criteria**: All user stories work together, tests pass
## Parallel Execution Opportunities
**Within Phase 3 (US1)**:
- Backend API endpoints can be implemented in parallel with frontend components
- Task list API and Task log API are independent
**Within Phase 5 (US3)**:
- Password prompt UI can be developed independently of backend resumption logic
- Error detection in MigrationPlugin can be tested separately
---
## Phase 1: Setup (Project Initialization)
- [x] T001 Verify project structure and create missing directories
- Check backend/src/api/routes/ exists
- Check backend/src/models/ exists
- Check frontend/src/components/ exists
- Check frontend/src/services/ exists
- Create any missing directories per plan.md structure
- [x] T002 Verify Python 3.9+ and Node.js 18+ dependencies
- Check Python version >= 3.9
- Check Node.js version >= 18
- Verify FastAPI, Pydantic, SQLAlchemy installed
- Verify SvelteKit, Tailwind CSS configured
- [x] T003 Initialize task tracking for this implementation
- Create implementation log in backend/logs/implementation.log
- Document start time and initial state
---
## Phase 2: Foundational (Blocking Prerequisites)
### Backend Core Extensions
- [x] T004 [P] Extend TaskStatus enum in backend/src/core/task_manager/models.py
- Add new state: `AWAITING_INPUT`
- Update state transition logic
- Add validation for new state
- [x] T005 [P] Extend Task class in backend/src/core/task_manager/models.py
- Add `input_required: bool` field
- Add `input_request: Dict | None` field
- Add `logs: List[LogEntry]` field
- Update constructor and validation
- [x] T006 [P] Implement task history retrieval in TaskManager (backend/src/core/task_manager/manager.py)
- Add `get_tasks(limit, offset, status)` method
- Add `get_task_logs(task_id)` method
- Add `persist_awaiting_input_tasks()` method
- Add `load_persisted_tasks()` method
- [x] T007 [P] Verify/Update SQLite schema in backend/src/core/task_manager/persistence.py
- Ensure `persistent_tasks` table exists with required fields
- Add indexes for status and created_at if missing
- Verify schema creation in `_ensure_db_exists`
- [x] T008 [P] Extend MigrationPlugin error handling
- Add pattern matching for Superset password errors
- Detect "Must provide a password for the database" message
- Extract database name from error context
- Transition task to AWAITING_INPUT state
- [x] T009 [P] Implement password injection mechanism
- Add method to resume task with credentials
- Validate password format
- Handle multiple database passwords
- Update task context with credentials
### Backend API Routes
- [x] T010 [P] Create backend/src/api/routes/tasks.py
- Create file with basic route definitions
- Add route handlers with stubbed responses
- Register router in __init__.py (covered by T011)
- Add basic error handling structure
- [x] T011 [P] Add task routes to backend/src/api/routes/__init__.py
- Import and register tasks router
- Verify route registration
### Frontend Services
- [x] T012 [P] Create frontend/src/services/taskService.js
- Implement `getTasks(limit, offset, status)` function
- Implement `getTaskLogs(taskId)` function
- Implement `resumeTask(taskId, passwords)` function
- Add proper error handling
### Frontend Components
- [x] T013 [P] Create frontend/src/components/TaskHistory.svelte
- Display task list with ID, status, start time
- Add "View Logs" action button
- Handle empty state
- Support real-time updates
- [x] T014 [P] Create frontend/src/components/TaskLogViewer.svelte
- Display modal with log entries
- Show timestamp, level, message, context
- Auto-scroll to latest logs
- Close button functionality
- [x] T015 [P] Create frontend/src/components/PasswordPrompt.svelte
- Display database name and error message
- Show password input fields (dynamic for multiple databases)
- Submit button with validation
- Error message display for invalid passwords
---
## Phase 3: User Story 1 - Task History and Status (P1)
**Goal**: As a user, I want to see a list of my recent and currently running migration tasks so that I can track progress and review past results.
**Independent Test**: Start a migration and verify it appears in a "Recent Tasks" list with its current status.
### Backend Implementation
- [x] T016 [US1] Implement GET /api/tasks endpoint logic
- Query TaskManager for task list
- Apply limit/offset pagination
- Apply status filter if provided
- Return TaskListResponse format
- [x] T017 [US1] Implement GET /api/tasks/{task_id}/logs endpoint logic
- Validate task_id exists
- Retrieve logs from TaskManager
- Return TaskLogResponse format
- Handle not found errors
- [x] T018 [US1] Add task status update WebSocket support
- Extend existing WebSocket infrastructure
- Broadcast status changes to /ws/tasks/{task_id}/status
- Broadcast log updates to /ws/tasks/{task_id}/logs
### Frontend Implementation
- [x] T019 [US1] Integrate TaskHistory component into migration page
- Add component to frontend/src/routes/migration/+page.svelte
- Fetch tasks on page load
- Handle loading state
- Display error messages
- [x] T020 [US1] Implement real-time status updates
- Subscribe to WebSocket channel for task updates
- Update task list on status change
- Add visual indicators for running tasks
- [x] T021 [US1] Add task list pagination
- Implement "Load More" button
- Handle offset updates
- Maintain current task list while loading more
### Testing
- [x] T022 [US1] Test task list retrieval
- Create test migration tasks
- Verify API returns correct format
- Verify pagination works
- Verify status filtering works
- [x] T023 [US1] Test real-time updates
- Start a migration
- Verify task appears in list
- Verify status updates in real-time
- Verify WebSocket messages are received
---
## Phase 4: User Story 2 - Task Log Inspection (P2)
**Goal**: As a user, I want to view the detailed logs of any migration task so that I can understand exactly what happened or why it failed.
**Independent Test**: Click a "View Logs" button for a task and see a modal or panel with the task's log entries.
### Backend Implementation
- [x] T024 [P] [US2] Enhance log storage in TaskManager
- Ensure logs are retained for all task states
- Add log context preservation
- Implement log cleanup on task retention
### Frontend Implementation
- [x] T025 [US2] Implement TaskLogViewer modal integration
- Add "View Logs" button to TaskHistory component
- Wire button to open TaskLogViewer modal
- Pass task_id to modal
- Fetch logs when modal opens
- [x] T026 [US2] Implement log display formatting
- Color-code by log level (INFO=blue, WARNING=yellow, ERROR=red)
- Format timestamps nicely
- Display context as JSON or formatted text
- Auto-scroll to bottom on new logs
- [x] T027 [US2] Add log refresh functionality
- Add refresh button in modal
- Poll for new logs every 5 seconds while modal open
- Show "new logs available" indicator
### Testing
- [x] T028 [US2] Test log retrieval
- Create task with various log entries
- Verify logs are returned correctly
- Verify log context is preserved
- Test with large log files
- [x] T029 [US2] Test log viewer UI
- Open logs for completed task
- Open logs for running task
- Verify formatting and readability
- Test refresh functionality
---
## Phase 5: User Story 3 - Interactive Password Resolution (P1)
**Goal**: As a user, I want the system to detect when a migration fails due to a missing database password and allow me to provide it interactively.
**Independent Test**: Trigger a migration that requires a DB password and verify the UI prompts for the password instead of just failing.
### Backend Implementation
- [x] T030 [US3] Implement password error detection in MigrationPlugin
- Add error pattern matching for Superset 422 errors
- Extract database name from error message
- Create DatabasePasswordRequest object
- Transition task to AWAITING_INPUT state
- [x] T031 [US3] Implement task resumption with passwords
- Add validation for password format
- Inject passwords into migration context
- Resume task execution from failure point
- Handle multiple database passwords
- [x] T032 [US3] Add task persistence for AWAITING_INPUT state
- Persist task context and input_request
- Load persisted tasks on backend restart
- Clear persisted data on task completion
- Implement retention policy
### Frontend Implementation
- [x] T033 [US3] Implement password prompt detection
- Monitor task status changes
- Detect AWAITING_INPUT state
- Show notification to user
- Update task list to show "Requires Input" indicator
- [x] T034 [US3] Wire PasswordPrompt to task resumption
- Connect form submission to taskService.resumeTask()
- Handle success (close prompt, resume task)
- Handle failure (show error, keep prompt open)
- Support multiple database inputs
- [x] T035 [US3] Add visual indicators for password-required tasks
- Highlight tasks needing input in task list
- Add badge or icon
- Show count of pending inputs
### Testing
- [x] T036 [US3] Test password error detection
- Mock Superset password error
- Verify error is detected
- Verify task transitions to AWAITING_INPUT
- Verify DatabasePasswordRequest is created
- [x] T037 [US3] Test password resumption
- Provide correct password
- Verify task resumes
- Verify task completes successfully
- Test with incorrect password (should prompt again)
- [x] T038 [US3] Test persistence across restarts
- Create AWAITING_INPUT task
- Restart backend
- Verify task is loaded
- Verify password prompt still works
- [x] T039 [US3] Test multiple database passwords
- Create migration requiring 2+ databases
- Verify all databases listed in prompt
- Verify all passwords submitted
- Verify task resumes with all credentials
---
## Phase 6: Polish & Cross-Cutting Concerns
### Integration & E2E
- [x] T040 [P] Integrate all components on migration page
- Add TaskHistory to migration page
- Add password prompt handling
- Ensure WebSocket connections work
- Test complete user flow
- [x] T041 [P] Add loading states and error boundaries
- Show loading spinners during API calls
- Handle API errors gracefully
- Show user-friendly error messages
- Add retry mechanisms
### Configuration & Security
- [x] T042 [P] Add configuration options to backend/src/core/config_models.py
- Extend `GlobalSettings` with `task_retention` fields:
- `retention_days` (default: 30)
- `retention_limit` (default: 100)
- Add `pagination_limit` to settings (default: 10)
- Update `ConfigManager` to handle new fields
- [x] T043 [P] Security review
- Verify passwords are not logged
- Verify passwords are not stored permanently
- Verify input validation on all endpoints
- Check for SQL injection vulnerabilities
### Documentation
- [x] T044 [P] Update API documentation
- Add new endpoints to OpenAPI spec
- Update API contract examples
- Document WebSocket channels
- [x] T045 [P] Update quickstart guide
- Add task history section
- Add log viewing section
- Add password prompt section
- Add troubleshooting tips
### Testing & Quality
- [x] T046 [P] Write unit tests for backend
- Test TaskManager extensions
- Test MigrationPlugin error detection
- Test API endpoints
- Test password validation
- [x] T047 [P] Write unit tests for frontend
- Test taskService functions
- Test TaskHistory component
- Test TaskLogViewer component
- Test PasswordPrompt component
- [x] T048 [P] Write integration tests
- Test complete password flow
- Test task persistence
- Test WebSocket updates
- Test error recovery
- [x] T049 [P] Run full test suite
- Execute pytest for backend
- Execute frontend tests
- Fix any failing tests
- Verify all acceptance criteria met
- [x] T050 [P] Final validation
- Verify all user stories work independently
- Verify all acceptance scenarios pass
- Check performance (pagination, real-time updates)
- Review code quality and security
---
## Implementation Strategy
### MVP Approach (Recommended)
**Focus on User Story 1 (P1) first**:
1. Complete Phase 1 (Setup)
2. Complete Phase 2 (Foundational) - Backend only
3. Complete Phase 3 (US1) - Task History
4. **MVP Complete**: Users can view task list and status
**Then add User Story 3 (P1)**:
5. Complete Phase 5 (US3) - Password Resolution
6. **Enhanced MVP**: Users can handle password errors
**Finally add User Story 2 (P2)**:
7. Complete Phase 4 (US2) - Log Inspection
8. **Full Feature**: Complete UI improvements
### Parallel Development
**Backend Team**:
- T004-T009 (Core extensions)
- T010-T011 (API routes)
- T016-T018 (US1 backend)
- T024 (US2 backend)
- T030-T032 (US3 backend)
**Frontend Team**:
- T012 (Services)
- T013-T015 (Components)
- T019-T021 (US1 frontend)
- T025-T027 (US2 frontend)
- T033-T035 (US3 frontend)
**Testing Team**:
- T022-T023 (US1 tests)
- T028-T029 (US2 tests)
- T036-T039 (US3 tests)
- T046-T050 (Integration & final)
### Risk Mitigation
1. **Superset API Changes**: Pattern matching may need updates if Superset changes error format
- Mitigation: Make pattern matching configurable
2. **WebSocket Scalability**: Real-time updates may impact performance with many users
- Mitigation: Use polling fallback, implement rate limiting
3. **Password Security**: Handling sensitive data requires careful implementation
- Mitigation: Never log passwords, clear from memory after use, use secure transport
4. **Task Persistence**: SQLite may not scale for high-volume task history
- Mitigation: Configurable retention, consider migration to full database later
---
## Success Criteria Validation
### User Story 1 - Task History and Status
- [ ] SC-001: Users can view status of last 10 migration tasks on migration page
- [ ] Real-time status updates work
- [ ] Task list shows ID, status, start time, actions
### User Story 2 - Task Log Inspection
- [ ] SC-002: Users can access full log in less than 2 clicks
- [ ] Log viewer shows timestamped messages
- [ ] Modal/panel works correctly
### User Story 3 - Interactive Password Resolution
- [ ] SC-003: 100% of "Missing Password" errors caught and presented as prompts
- [ ] SC-004: Users can resume blocked migration by providing password
- [ ] Task state transitions work correctly
- [ ] Multiple database passwords supported
### Cross-Cutting Requirements
- [ ] All API endpoints follow contract specifications
- [ ] All components follow existing patterns
- [ ] Security requirements met
- [ ] Performance acceptable (pagination, real-time updates)
- [ ] Configuration options available
- [ ] Documentation complete
---
## Task Summary
**Total Tasks**: 50
**Setup Phase**: 3 tasks
**Foundational Phase**: 12 tasks
**US1 Phase**: 8 tasks
**US2 Phase**: 6 tasks
**US3 Phase**: 10 tasks
**Polish Phase**: 11 tasks
**Parallel Opportunities**: 15+ tasks can be developed in parallel within their phases
**MVP Scope**: Tasks 1-23 (Setup, Foundational, US1) - ~23 tasks
**Full Feature**: All 50 tasks
---
## Next Steps
1. **Review this tasks.md** with team leads
2. **Assign tasks** to backend/frontend developers
3. **Start Phase 1** immediately (Setup)
4. **Schedule daily standups** to track progress
5. **Use this document** as the single source of truth for implementation
**Ready for Implementation**: ✅ All design artifacts complete, tasks generated, dependencies mapped.

View File

@@ -20,7 +20,7 @@ from .utils.logger import SupersetLogger
class SupersetConfig(BaseModel): class SupersetConfig(BaseModel):
env: str = Field(..., description="Название окружения (например, dev, prod).") env: str = Field(..., description="Название окружения (например, dev, prod).")
base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.") base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.")
auth: Dict[str, str] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).") auth: Dict[str, Any] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).")
verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.") verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.")
timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.") timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.")
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.") logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
@@ -32,7 +32,7 @@ class SupersetConfig(BaseModel):
# @THROW: ValueError - Если отсутствуют обязательные поля. # @THROW: ValueError - Если отсутствуют обязательные поля.
# @PARAM: v (Dict[str, str]) - Значение поля auth. # @PARAM: v (Dict[str, str]) - Значение поля auth.
@validator('auth') @validator('auth')
def validate_auth(cls, v: Dict[str, str]) -> Dict[str, str]: def validate_auth(cls, v: Dict[str, Any]) -> Dict[str, Any]:
required = {'provider', 'username', 'password', 'refresh'} required = {'provider', 'username', 'password', 'refresh'}
if not required.issubset(v.keys()): if not required.issubset(v.keys()):
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}") raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")

View File

@@ -28,7 +28,11 @@ class SupersetLogger:
# @PARAM: log_dir (Optional[Path]) - Директория для сохранения лог-файлов. # @PARAM: log_dir (Optional[Path]) - Директория для сохранения лог-файлов.
# @PARAM: level (int) - Уровень логирования (e.g., `logging.INFO`). # @PARAM: level (int) - Уровень логирования (e.g., `logging.INFO`).
# @PARAM: console (bool) - Флаг для включения вывода в консоль. # @PARAM: console (bool) - Флаг для включения вывода в консоль.
def __init__(self, name: str = "superset_tool", log_dir: Optional[Path] = None, level: int = logging.INFO, console: bool = True) -> None: def __init__(self, name: str = "superset_tool", log_dir: Optional[Path] = None, level: int = logging.INFO, console: bool = True, logger: Optional[logging.Logger] = None) -> None:
if logger:
self.logger = logger
return
self.logger = logging.getLogger(name) self.logger = logging.getLogger(name)
self.logger.setLevel(level) self.logger.setLevel(level)
self.logger.propagate = False self.logger.propagate = False

188
test_migration_debug.py Normal file
View File

@@ -0,0 +1,188 @@
#!/usr/bin/env python3
"""
Debug script to test the migration flow and identify where the issue occurs.
"""
import sys
import os
from pathlib import Path
# Add project root to sys.path
project_root = Path(__file__).parent
sys.path.insert(0, str(project_root))
def test_plugin_loader():
"""Test if the plugin loader can find the migration plugin."""
print("=== Testing Plugin Loader ===")
try:
from backend.src.core.plugin_loader import PluginLoader
from pathlib import Path
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
print(f"Plugin directory: {plugin_dir}")
print(f"Directory exists: {plugin_dir.exists()}")
if plugin_dir.exists():
print(f"Files in plugin directory: {list(plugin_dir.iterdir())}")
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
configs = plugin_loader.get_all_plugin_configs()
print(f"Found {len(configs)} plugins:")
for config in configs:
print(f" - {config.name} (ID: {config.id})")
migration_plugin = plugin_loader.get_plugin("superset-migration")
if migration_plugin:
print("✓ Migration plugin found")
print(f" Name: {migration_plugin.name}")
print(f" Description: {migration_plugin.description}")
print(f" Schema: {migration_plugin.get_schema()}")
else:
print("✗ Migration plugin NOT found")
return migration_plugin is not None
except Exception as e:
print(f"✗ Plugin loader test failed: {e}")
import traceback
traceback.print_exc()
return False
def test_task_manager():
"""Test if the task manager can create tasks."""
print("\n=== Testing Task Manager ===")
try:
from backend.src.core.plugin_loader import PluginLoader
from backend.src.core.task_manager import TaskManager
from pathlib import Path
plugin_dir = Path(__file__).parent / "backend" / "src" / "plugins"
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
task_manager = TaskManager(plugin_loader)
# Test task creation
test_params = {
"from_env": "dev",
"to_env": "prod",
"dashboard_regex": ".*",
"replace_db_config": False
}
print(f"Creating test task with params: {test_params}")
import asyncio
task = asyncio.run(task_manager.create_task("superset-migration", test_params))
print(f"✓ Task created successfully: {task.id}")
print(f" Status: {task.status}")
print(f" Plugin ID: {task.plugin_id}")
return True
except Exception as e:
print(f"✗ Task manager test failed: {e}")
import traceback
traceback.print_exc()
return False
def test_migration_endpoint():
"""Test the migration endpoint directly."""
print("\n=== Testing Migration Endpoint ===")
try:
from backend.src.api.routes.migration import execute_migration
from backend.src.models.dashboard import DashboardSelection
from backend.src.dependencies import get_config_manager, get_task_manager
# Create a test selection
selection = DashboardSelection(
selected_ids=[1, 2, 3],
source_env_id="ss2",
target_env_id="ss1"
)
print(f"Testing migration with selection: {selection.dict()}")
# This would normally be called by FastAPI with dependencies
# For testing, we'll call it directly
config_manager = get_config_manager()
task_manager = get_task_manager()
import asyncio
# We need to ensure TaskManager uses the SAME loop as run_and_wait
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# Re-initialize TaskManager with the new loop
task_manager.loop = loop
async def run_and_wait():
result = await execute_migration(selection, config_manager, task_manager)
print(f"✓ Migration endpoint test successful: {result}")
task_id = result["task_id"]
print(f"Waiting for task {task_id} to complete...")
for i in range(20):
await asyncio.sleep(1)
task = task_manager.get_task(task_id)
print(f" [{i}] Task status: {task.status}")
if task.status in ["SUCCESS", "FAILED"]:
print(f"Task finished with status: {task.status}")
for log in task.logs:
print(f" [{log.level}] {log.message}")
return task.status == "SUCCESS"
print("Task timed out. Current status: " + task_manager.get_task(task_id).status)
return False
try:
return loop.run_until_complete(run_and_wait())
finally:
loop.close()
except Exception as e:
print(f"✗ Migration endpoint test failed: {e}")
import traceback
traceback.print_exc()
return False
def main():
"""Run all tests."""
print("Migration Debug Test Script")
print("=" * 50)
results = []
# Test 1: Plugin Loader
results.append(("Plugin Loader", test_plugin_loader()))
# Test 2: Task Manager
results.append(("Task Manager", test_task_manager()))
# Test 3: Migration Endpoint
results.append(("Migration Endpoint", test_migration_endpoint()))
# Summary
print("\n" + "=" * 50)
print("TEST RESULTS SUMMARY:")
print("=" * 50)
passed = 0
for test_name, result in results:
status = "PASS" if result else "FAIL"
print(f"{test_name}: {status}")
if result:
passed += 1
print(f"\nTotal: {passed}/{len(results)} tests passed")
if passed == len(results):
print("✓ All tests passed! The migration system appears to be working correctly.")
else:
print("✗ Some tests failed. Check the logs above for details.")
return passed == len(results)
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)