40 Commits

Author SHA1 Message Date
07ec2d9797 Работает создание коммитов и перенос в новый enviroment 2026-01-23 13:57:44 +03:00
e9d3f3c827 tasks ready 2026-01-22 23:59:16 +03:00
26ba015b75 +gitignore 2026-01-22 23:25:29 +03:00
49129d3e86 fix error 2026-01-22 23:18:48 +03:00
d99a13d91f refactor complete 2026-01-22 17:37:17 +03:00
203ce446f4 ашч 2026-01-21 14:00:48 +03:00
c96d50a3f4 fix(backend): standardize superset client init and auth
- Update plugins (debug, mapper, search) to explicitly map environment config to SupersetConfig
- Add authenticate method to SupersetClient for explicit session management
- Add get_environment method to ConfigManager
- Fix navbar dropdown hover stability in frontend with invisible bridge
2026-01-20 19:31:17 +03:00
3bbe320949 TaskLog fix 2026-01-19 17:10:43 +03:00
2d2435642d bug fixs 2026-01-19 00:07:06 +03:00
ec8d67c956 bug fixes 2026-01-18 23:21:00 +03:00
76baeb1038 semantic markup update 2026-01-18 21:29:54 +03:00
11c59fb420 semantic checker script update 2026-01-13 17:33:57 +03:00
b2529973eb constitution update 2026-01-13 15:29:42 +03:00
ae1d630ad6 semantics update 2026-01-13 09:11:27 +03:00
9a9c5879e6 tasks.md status 2026-01-12 12:35:45 +03:00
696aac32e7 1st iter 2026-01-12 12:33:51 +03:00
7a9b1a190a tasks ready 2026-01-07 18:59:49 +03:00
a3dc1fb2b9 docs: amend constitution to v1.6.0 (add 'Everything is a Plugin' principle) and refactor 010 plan 2026-01-07 18:36:38 +03:00
297b29986d Product Manager role 2026-01-07 11:39:44 +03:00
4c6fc8256d project map script | semantic parcer 2026-01-01 16:58:21 +03:00
a747a163c8 backup worked 2025-12-30 22:02:51 +03:00
fce0941e98 docs ready 2025-12-30 21:30:37 +03:00
45c077b928 +api rework 2025-12-30 20:08:48 +03:00
9ed3a5992d cleaned 2025-12-30 18:20:40 +03:00
a032fe8457 Password promt 2025-12-30 17:21:12 +03:00
4c9d554432 TaskManager refactor 2025-12-29 10:13:37 +03:00
6962a78112 mappings+migrate 2025-12-27 10:16:41 +03:00
3d75a21127 tech_lead / coder 2roles 2025-12-27 08:02:59 +03:00
07914c8728 semantic add 2025-12-27 07:14:08 +03:00
cddc259b76 new loggers logic in constitution 2025-12-27 06:51:28 +03:00
dcbf0a7d7f tasks ready 2025-12-27 06:37:03 +03:00
65f61c1f80 Merge branch '001-migration-ui-redesign' into master 2025-12-27 05:58:35 +03:00
cb7386f274 superset_tool logger rework 2025-12-27 05:53:30 +03:00
83e34e1799 feat(logging): implement configurable belief state logging
- Add LoggingConfig model and logging field to GlobalSettings
- Implement belief_scope context manager for structured logging
- Add configure_logger for dynamic level and file rotation settings
- Add logging configuration UI to Settings page
- Update ConfigManager to apply logging settings on initialization and updates
2025-12-27 05:39:33 +03:00
d197303b9f 006 plan ready 2025-12-26 19:36:49 +03:00
a43f8fb021 001-migration-ui-redesign (#3)
Reviewed-on: #3
2025-12-26 18:17:58 +03:00
4aa01b6470 Merge branch 'migration' into 001-migration-ui-redesign 2025-12-26 18:16:24 +03:00
35b423979d spec rules 2025-12-25 22:28:42 +03:00
2ffc3cc68f feat(migration): implement interactive mapping resolution workflow
- Add SQLite database integration for environments and mappings
- Update TaskManager to support pausing tasks (AWAITING_MAPPING)
- Modify MigrationPlugin to detect missing mappings and wait for resolution
- Add frontend UI for handling missing mappings interactively
- Create dedicated migration routes and API endpoints
- Update .gitignore and project documentation
2025-12-25 22:27:29 +03:00
4448352ef9 Merge pull request '001-fix-ui-ws-validation' (#2) from 001-fix-ui-ws-validation into migration
Reviewed-on: #2
2025-12-21 00:29:19 +03:00
268 changed files with 29140 additions and 14346 deletions

84
.gitignore vendored
View File

@@ -1,21 +1,71 @@
*__pycache__*
*.ps1
keyring passwords.py
*logs*
*github*
*venv*
*git*
*tech_spec*
dashboards
# Python specific
*.pyc
dist/
*.egg-info/
# Node.js specific
node_modules/
# Python
__pycache__/
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
.venv
venv/
ENV/
env/
backend/backups/*
# Node.js
frontend/node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.svelte-kit/
.vite/
build/
dist/
.env*
config.json
package-lock.json
backend/backups/*
# Logs
*.log
backend/backend.log
# OS
.DS_Store
Thumbs.db
# IDE
.vscode/
.idea/
*.swp
*.swo
# Project specific
*.ps1
keyring passwords.py
*github*
*tech_spec*
dashboards
backend/mappings.db
backend/tasks.db
# Git Integration repositories
backend/git_repos/

15
.kilocode/mcp.json Executable file → Normal file
View File

@@ -1,14 +1 @@
{
"mcpServers": {
"tavily": {
"command": "npx",
"args": [
"-y",
"tavily-mcp@0.2.3"
],
"env": {
"TAVILY_API_KEY": "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"
}
}
}
}
{"mcpServers":{}}

View File

@@ -6,9 +6,25 @@ Auto-generated from all feature plans. Last updated: 2025-12-19
- Python 3.9+, Node.js 18+ + `uvicorn`, `npm`, `bash` (003-project-launch-script)
- Python 3.9+, Node.js 18+ + SvelteKit, FastAPI, Tailwind CSS (inferred from existing frontend) (004-integrate-svelte-kit)
- N/A (Frontend integration) (004-integrate-svelte-kit)
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic (001-fix-ui-ws-validation)
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic (005-fix-ui-ws-validation)
- N/A (Configuration based) (005-fix-ui-ws-validation)
- Filesystem (plugins, logs, backups), SQLite (optional, for job history if needed) (005-fix-ui-ws-validation)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS (007-migration-dashboard-grid)
- N/A (Superset API integration) (007-migration-dashboard-grid)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, Superset API (007-migration-dashboard-grid)
- N/A (Superset API integration - read-only for metadata) (007-migration-dashboard-grid)
- Python 3.9+ (backend), Node.js 18+ (frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
- SQLite (optional for job history), existing database for mappings (008-migration-ui-improvements)
- Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, Superset API (008-migration-ui-improvements)
- Python 3.9+, Node.js 18+ + FastAPI, APScheduler, SQLAlchemy, SvelteKit, Tailwind CSS (009-backup-scheduler)
- SQLite (`tasks.db`), JSON (`config.json`) (009-backup-scheduler)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, `superset_tool` (internal lib) (010-refactor-cli-to-web)
- SQLite (for job history/results, connection configs), Filesystem (for temporary file uploads) (010-refactor-cli-to-web)
- Python 3.9+ + FastAPI, Pydantic, requests, pyyaml (migrated from superset_tool) (012-remove-superset-tool)
- SQLite (tasks.db, migrations.db), Filesystem (012-remove-superset-tool)
- Filesystem (local git repo), SQLite (for GitServerConfig, Environment) (011-git-integration-dashboard)
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, GitPython (or CLI git), Pydantic, SQLAlchemy, Superset API (011-git-integration-dashboard)
- SQLite (for config/history), Filesystem (local Git repositories) (011-git-integration-dashboard)
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
@@ -29,9 +45,9 @@ cd src; pytest; ruff check .
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
## Recent Changes
- 001-fix-ui-ws-validation: Added Python 3.9+ (Backend), Node.js 18+ (Frontend Build)
- 005-fix-ui-ws-validation: Added Python 3.9+ (Backend), Node.js 18+ (Frontend Build)
- 005-fix-ui-ws-validation: Added Python 3.9+, Node.js 18+ + FastAPI, SvelteKit, Tailwind CSS, Pydantic
- 011-git-integration-dashboard: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, GitPython (or CLI git), Pydantic, SQLAlchemy, Superset API
- 011-git-integration-dashboard: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, GitPython (or CLI git), Pydantic, SQLAlchemy, Superset API
- 011-git-integration-dashboard: Added Python 3.9+, Node.js 18+
<!-- MANUAL ADDITIONS START -->

45
.kilocodemodes Normal file
View File

@@ -0,0 +1,45 @@
customModes:
- slug: tester
name: Tester
description: QA and Plan Verification Specialist
roleDefinition: |-
You are Kilo Code, acting as a QA and Verification Specialist. Your primary goal is to validate that the project implementation aligns strictly with the defined specifications and task plans.
Your responsibilities include: - Reading and analyzing task plans and specifications (typically in the `specs/` directory). - Verifying that implemented code matches the requirements. - Executing tests and validating system behavior via CLI or Browser. - Updating the status of tasks in the plan files (e.g., marking checkboxes [x]) as they are verified. - Identifying and reporting missing features or bugs.
whenToUse: Use this mode when you need to audit the progress of a project, verify completed tasks against the plan, run quality assurance checks, or update the status of task lists in specification documents.
groups:
- read
- edit
- command
- browser
- mcp
customInstructions: 1. Always begin by loading the relevant plan or task list from the `specs/` directory. 2. Do not assume a task is done just because it is checked; verify the code or functionality first if asked to audit. 3. When updating task lists, ensure you only mark items as complete if you have verified them.
- slug: product-manager
name: Product Manager
description: Executes SpecKit workflows for feature management
roleDefinition: |-
You are Kilo Code, acting as a Product Manager. Your purpose is to rigorously execute the workflows defined in `.kilocode/workflows/`.
You act as the orchestrator for: - Specification (`speckit.specify`, `speckit.clarify`) - Planning (`speckit.plan`) - Task Management (`speckit.tasks`, `speckit.taskstoissues`) - Quality Assurance (`speckit.analyze`, `speckit.checklist`) - Governance (`speckit.constitution`) - Implementation Oversight (`speckit.implement`)
For each task, you must read the relevant workflow file from `.kilocode/workflows/` and follow its Execution Steps precisely.
whenToUse: Use this mode when you need to run any /speckit.* command or when dealing with high-level feature planning, specification writing, or project management tasks.
groups:
- read
- edit
- command
- mcp
customInstructions: 1. Always read the specific workflow file in `.kilocode/workflows/` before executing a command. 2. Adhere strictly to the "Operating Constraints" and "Execution Steps" in the workflow files.
- slug: semantic
name: Semantic Agent
roleDefinition: |-
You are Kilo Code, a Semantic Agent responsible for maintaining the semantic integrity of the codebase. Your primary goal is to ensure that all code entities (Modules, Classes, Functions, Components) are properly annotated with semantic anchors and tags as defined in `semantic_protocol.md`.
Your core responsibilities are: 1. **Semantic Mapping**: You run and maintain the `generate_semantic_map.py` script to generate up-to-date semantic maps (`semantics/semantic_map.json`, `specs/project_map.md`) and compliance reports (`semantics/reports/*.md`). 2. **Compliance Auditing**: You analyze the generated compliance reports to identify files with low semantic coverage or parsing errors. 3. **Semantic Enrichment**: You actively edit code files to add missing semantic anchors (`[DEF:...]`, `[/DEF:...]`) and mandatory tags (`@PURPOSE`, `@LAYER`, etc.) to improve the global compliance score. 4. **Protocol Enforcement**: You strictly adhere to the syntax and rules defined in `semantic_protocol.md` when modifying code.
You have access to the full codebase and tools to read, write, and execute scripts. You should prioritize fixing "Critical Parsing Errors" (unclosed anchors) before addressing missing metadata.
whenToUse: Use this mode when you need to update the project's semantic map, fix semantic compliance issues (missing anchors/tags/DbC ), or analyze the codebase structure. This mode is specialized for maintaining the `semantic_protocol.md` standards.
description: Codebase semantic mapping and compliance expert
customInstructions: Always check `semantics/reports/` for the latest compliance status before starting work. When fixing a file, try to fix all semantic issues in that file at once. After making a batch of fixes, run `python3 generate_semantic_map.py` to verify improvements.
groups:
- read
- edit
- command
- browser
- mcp
source: project

View File

@@ -1,29 +1,67 @@
# ss-tools Constitution
<!--
SYNC IMPACT REPORT
Version: 1.7.1 (Simplified Workflow)
Changes:
- Simplified Generation Workflow to a single phase: Code Generation from `tasks.md`.
- Removed multi-phase Architecture/Implementation split to streamline development.
Templates Status:
- .specify/templates/plan-template.md: ✅ Aligned (Dynamic check).
- .specify/templates/spec-template.md: ✅ Aligned.
- .specify/templates/tasks-template.md: ✅ Aligned.
-->
# Semantic Code Generation Constitution
## Core Principles
### I. SPA-First Architecture
The frontend MUST be a Static Single Page Application (SPA) served by the Python backend. No Node.js server is permitted in production. The backend serves the `index.html` entry point for all non-API routes.
### I. Semantic Protocol Compliance
The file `semantic_protocol.md` is the **authoritative technical standard** for this project. All code generation, refactoring, and architecture must strictly adhere to the standards, syntax, and workflows defined therein.
- **Syntax**: `[DEF]` anchors, `@RELATION` tags, and metadata must match the Protocol specification.
- **Structure**: File layouts and headers must follow the "File Structure Standard".
- **Workflow**: The technical steps for generating code must align with the Protocol.
### II. API-Driven Communication
All data retrieval and state changes MUST be performed via the backend REST API or WebSockets. The frontend should not access the database or filesystem directly.
### II. Causal Validity (Contracts First)
As defined in the Protocol, Semantic definitions (Contracts) must ALWAYS precede implementation code. Logic is downstream of definition. We define the structure and constraints (`[DEF]`, `@PRE`, `@POST`) before writing the executable logic.
### III. Modern Stack Consistency
The project strictly uses SvelteKit (Frontend), FastAPI (Backend), and Tailwind CSS (Styling). New dependencies must be justified and approved.
### III. Immutability of Architecture
Architectural decisions in the Module Header (`@LAYER`, `@INVARIANT`, `@CONSTRAINT`) are treated as immutable constraints. Changes to these require an explicit refactoring step, not ad-hoc modification during implementation.
### IV. Semantic Protocol Adherence (GRACE-Poly)
All code generation and modification MUST adhere to the Semantic Protocol defined in `semantic_protocol.md`.
- **Anchors**: Use `[DEF:id:Type]` and `[/DEF:id]` to define semantic boundaries.
- **Contracts**: Define `@PRE` and `@POST` conditions in headers.
- **Logging**: Use structured logging with `[AnchorID][State]` format.
- **Immutability**: Respect architectural decisions in headers.
### IV. Design by Contract (DbC)
Contracts are the Source of Truth. Functions and Classes must define their purpose, specifications, and constraints in the metadata block before implementation, strictly following the **Contracts (Section IV)** standard in `semantic_protocol.md`.
### V. Belief State Logging
Agents must maintain belief state logs for debugging and coherence checks, strictly following the **Logging Standard (Section V)** defined in `semantic_protocol.md`.
### VI. Fractal Complexity Limit
To maintain semantic coherence, code must adhere to the complexity limits (Module/Function size) defined in the **Fractal Complexity Limit (Section VI)** of `semantic_protocol.md`.
### VII. Everything is a Plugin
All functional extensions, tools, or major features must be implemented as modular Plugins inheriting from `PluginBase`. Logic should not reside in standalone services or scripts unless strictly necessary for core infrastructure. This ensures a unified execution model via the `TaskManager`, consistent logging, and modularity.
## File Structure Standards
Refer to **Section III (File Structure Standard)** in `semantic_protocol.md` for the authoritative definitions of:
- Python Module Headers (`.py`)
- Svelte Component Headers (`.svelte`)
## Generation Workflow
The development process follows a streamlined single-phase workflow:
### 1. Code Generation Phase (Mode: `code`)
**Input**: `tasks.md`
**Responsibility**:
- Select task from `tasks.md`.
- Generate Scaffolding (`[DEF]` anchors, Headers, Contracts) AND Implementation in one pass.
- Ensure strict adherence to Protocol Section IV (Contracts) and Section VII (Generation Workflow).
- **Output**: Working code with passing tests.
### 2. Validation
If logic conflicts with Contract -> Stop -> Report Error.
## Governance
This Constitution establishes the "Semantic Code Generation Protocol" as the supreme law of this repository.
### Compliance
All Pull Requests and code modifications must be verified against this Constitution. Violations of Core Principles are considered critical defects.
- **Authoritative Source**: `semantic_protocol.md` defines the specific implementation rules for these Principles.
- **Automated Enforcement**: Tools must validate adherence to the `semantic_protocol.md` syntax.
- **Amendments**: Changes to core principles require a Constitution amendment. Changes to technical syntax require a Protocol update.
- **Compliance**: Failure to adhere to the Protocol constitutes a build failure.
### Amendments
Changes to this Constitution require a formal RFC process and approval from the project lead.
**Version**: 1.0.0 | **Ratified**: 2025-12-20
**Version**: 1.7.1 | **Ratified**: 2025-12-19 | **Last Amended**: 2026-01-13

View File

@@ -0,0 +1,35 @@
---
description: "Architecture task list template (Contracts & Scaffolding)"
---
# Architecture Tasks: [FEATURE NAME]
**Role**: Architect Agent
**Goal**: Define the "What" and "Why" (Contracts, Scaffolding, Models) before implementation.
**Input**: Design documents from `/specs/[###-feature-name]/`
**Output**: Files with `[DEF]` anchors, `@PRE`/`@POST` contracts, and `@RELATION` mappings. No business logic.
## Phase 1: Setup & Models
- [ ] A001 Create/Update data models in [path] with `[DEF]` and contracts
- [ ] A002 Define API route structure/contracts in [path]
- [ ] A003 Define shared utilities/interfaces
## Phase 2: User Story 1 - [Title]
- [ ] A004 [US1] Define contracts for [Component/Service] in [path]
- [ ] A005 [US1] Define contracts for [Endpoint] in [path]
- [ ] A006 [US1] Define contracts for [Frontend Component] in [path]
## Phase 3: User Story 2 - [Title]
- [ ] A007 [US2] Define contracts for [Component/Service] in [path]
- [ ] A008 [US2] Define contracts for [Endpoint] in [path]
## Handover Checklist
- [ ] All new files created with `[DEF]` anchors
- [ ] All functions/classes have `@PURPOSE`, `@PRE`, `@POST` tags
- [ ] No "naked code" (logic outside of anchors)
- [ ] `tasks-dev.md` is ready for the Developer Agent

View File

@@ -0,0 +1,35 @@
---
description: "Developer task list template (Implementation Logic)"
---
# Developer Tasks: [FEATURE NAME]
**Role**: Developer Agent
**Goal**: Implement the "How" (Logic, State, Error Handling) inside the defined contracts.
**Input**: `tasks-arch.md` (completed), Scaffolding files with `[DEF]` anchors.
**Output**: Working code that satisfies `@PRE`/`@POST` conditions.
## Phase 1: Setup & Models
- [ ] D001 Implement logic for [Model] in [path]
- [ ] D002 Implement logic for [API Route] in [path]
- [ ] D003 Implement shared utilities
## Phase 2: User Story 1 - [Title]
- [ ] D004 [US1] Implement logic for [Component/Service] in [path]
- [ ] D005 [US1] Implement logic for [Endpoint] in [path]
- [ ] D006 [US1] Implement logic for [Frontend Component] in [path]
- [ ] D007 [US1] Verify semantic compliance and belief state logging
## Phase 3: User Story 2 - [Title]
- [ ] D008 [US2] Implement logic for [Component/Service] in [path]
- [ ] D009 [US2] Implement logic for [Endpoint] in [path]
## Polish & Quality Assurance
- [ ] DXXX Verify all tests pass
- [ ] DXXX Check error handling and edge cases
- [ ] DXXX Ensure code style compliance

Submodule backend/backend/git_repos/12 added at d592fa7ed5

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python3
"""Script to delete tasks with RUNNING status from the database."""
from sqlalchemy.orm import Session
from src.core.database import TasksSessionLocal
from src.models.task import TaskRecord
def delete_running_tasks():
"""Delete all tasks with RUNNING status from the database."""
session: Session = TasksSessionLocal()
try:
# Find all task records with RUNNING status
running_tasks = session.query(TaskRecord).filter(TaskRecord.status == "RUNNING").all()
if not running_tasks:
print("No RUNNING tasks found.")
return
print(f"Found {len(running_tasks)} RUNNING tasks:")
for task in running_tasks:
print(f"- Task ID: {task.id}, Type: {task.type}")
# Delete the found tasks
session.query(TaskRecord).filter(TaskRecord.status == "RUNNING").delete(synchronize_session=False)
session.commit()
print(f"Successfully deleted {len(running_tasks)} RUNNING tasks.")
except Exception as e:
session.rollback()
print(f"Error deleting tasks: {e}")
finally:
session.close()
if __name__ == "__main__":
delete_running_tasks()

BIN
backend/mappings.db Normal file

Binary file not shown.

BIN
backend/migrations.db Normal file

Binary file not shown.

View File

@@ -1,12 +1,47 @@
fastapi
uvicorn
pydantic
authlib
python-multipart
starlette
jsonschema
requests
keyring
httpx
PyYAML
websockets
annotated-doc==0.0.4
annotated-types==0.7.0
anyio==4.12.0
APScheduler==3.11.2
attrs==25.4.0
Authlib==1.6.6
certifi==2025.11.12
cffi==2.0.0
charset-normalizer==3.4.4
click==8.3.1
cryptography==46.0.3
fastapi==0.126.0
greenlet==3.3.0
h11==0.16.0
httpcore==1.0.9
httpx==0.28.1
idna==3.11
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.3.0
jeepney==0.9.0
jsonschema==4.25.1
jsonschema-specifications==2025.9.1
keyring==25.7.0
more-itertools==10.8.0
pycparser==2.23
pydantic==2.12.5
pydantic_core==2.41.5
python-multipart==0.0.21
PyYAML==6.0.3
RapidFuzz==3.14.3
referencing==0.37.0
requests==2.32.5
rpds-py==0.30.0
SecretStorage==3.5.0
SQLAlchemy==2.0.45
starlette==0.50.0
typing-inspection==0.4.2
typing_extensions==4.15.0
tzlocal==5.3.1
urllib3==2.6.2
uvicorn==0.38.0
websockets==15.0.1
pandas
psycopg2-binary
openpyxl
GitPython==3.1.44

View File

@@ -31,6 +31,12 @@ oauth2_scheme = OAuth2AuthorizationCodeBearer(
tokenUrl="https://your-adfs-server/adfs/oauth2/token",
)
# [DEF:get_current_user:Function]
# @PURPOSE: Dependency to get the current user from the ADFS token.
# @PARAM: token (str) - The OAuth2 bearer token.
# @PRE: token should be provided via Authorization header.
# @POST: Returns user details if authenticated, else raises 401.
# @RETURN: Dict[str, str] - User information.
async def get_current_user(token: str = Depends(oauth2_scheme)):
"""
Dependency to get the current user from the ADFS token.
@@ -49,4 +55,5 @@ async def get_current_user(token: str = Depends(oauth2_scheme)):
)
# A real implementation would return a user object.
return {"placeholder_user": "user@example.com"}
# [/DEF]
# [/DEF:get_current_user:Function]
# [/DEF:AuthModule:Module]

View File

@@ -1 +1 @@
from . import plugins, tasks, settings
from . import plugins, tasks, settings, connections, environments, mappings, migration, git

View File

@@ -0,0 +1,100 @@
# [DEF:ConnectionsRouter:Module]
# @SEMANTICS: api, router, connections, database
# @PURPOSE: Defines the FastAPI router for managing external database connections.
# @LAYER: UI (API)
# @RELATION: Depends on SQLAlchemy session.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
from typing import List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from ...core.database import get_db
from ...models.connection import ConnectionConfig
from pydantic import BaseModel, Field
from datetime import datetime
from ...core.logger import logger, belief_scope
# [/SECTION]
router = APIRouter()
# [DEF:ConnectionSchema:Class]
# @PURPOSE: Pydantic model for connection response.
class ConnectionSchema(BaseModel):
id: str
name: str
type: str
host: Optional[str] = None
port: Optional[int] = None
database: Optional[str] = None
username: Optional[str] = None
created_at: datetime
class Config:
orm_mode = True
# [/DEF:ConnectionSchema:Class]
# [DEF:ConnectionCreate:Class]
# @PURPOSE: Pydantic model for creating a connection.
class ConnectionCreate(BaseModel):
name: str
type: str
host: Optional[str] = None
port: Optional[int] = None
database: Optional[str] = None
username: Optional[str] = None
password: Optional[str] = None
# [/DEF:ConnectionCreate:Class]
# [DEF:list_connections:Function]
# @PURPOSE: Lists all saved connections.
# @PRE: Database session is active.
# @POST: Returns list of connection configs.
# @PARAM: db (Session) - Database session.
# @RETURN: List[ConnectionSchema] - List of connections.
@router.get("", response_model=List[ConnectionSchema])
async def list_connections(db: Session = Depends(get_db)):
with belief_scope("ConnectionsRouter.list_connections"):
connections = db.query(ConnectionConfig).all()
return connections
# [/DEF:list_connections:Function]
# [DEF:create_connection:Function]
# @PURPOSE: Creates a new connection configuration.
# @PRE: Connection name is unique.
# @POST: Connection is saved to DB.
# @PARAM: connection (ConnectionCreate) - Config data.
# @PARAM: db (Session) - Database session.
# @RETURN: ConnectionSchema - Created connection.
@router.post("", response_model=ConnectionSchema, status_code=status.HTTP_201_CREATED)
async def create_connection(connection: ConnectionCreate, db: Session = Depends(get_db)):
with belief_scope("ConnectionsRouter.create_connection", f"name={connection.name}"):
db_connection = ConnectionConfig(**connection.dict())
db.add(db_connection)
db.commit()
db.refresh(db_connection)
logger.info(f"[ConnectionsRouter.create_connection][Success] Created connection {db_connection.id}")
return db_connection
# [/DEF:create_connection:Function]
# [DEF:delete_connection:Function]
# @PURPOSE: Deletes a connection configuration.
# @PRE: Connection ID exists.
# @POST: Connection is removed from DB.
# @PARAM: connection_id (str) - ID to delete.
# @PARAM: db (Session) - Database session.
# @RETURN: None.
@router.delete("/{connection_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_connection(connection_id: str, db: Session = Depends(get_db)):
with belief_scope("ConnectionsRouter.delete_connection", f"id={connection_id}"):
db_connection = db.query(ConnectionConfig).filter(ConnectionConfig.id == connection_id).first()
if not db_connection:
logger.error(f"[ConnectionsRouter.delete_connection][State] Connection {connection_id} not found")
raise HTTPException(status_code=404, detail="Connection not found")
db.delete(db_connection)
db.commit()
logger.info(f"[ConnectionsRouter.delete_connection][Success] Deleted connection {connection_id}")
return
# [/DEF:delete_connection:Function]
# [/DEF:ConnectionsRouter:Module]

View File

@@ -0,0 +1,122 @@
# [DEF:backend.src.api.routes.environments:Module]
#
# @SEMANTICS: api, environments, superset, databases
# @PURPOSE: API endpoints for listing environments and their databases.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
#
# @INVARIANT: Environment IDs must exist in the configuration.
# [SECTION: IMPORTS]
from fastapi import APIRouter, Depends, HTTPException
from typing import List, Dict, Optional
from ...dependencies import get_config_manager, get_scheduler_service
from ...core.superset_client import SupersetClient
from pydantic import BaseModel, Field
from ...core.config_models import Environment as EnvModel
from ...core.logger import belief_scope
# [/SECTION]
router = APIRouter()
# [DEF:ScheduleSchema:DataClass]
class ScheduleSchema(BaseModel):
enabled: bool = False
cron_expression: str = Field(..., pattern=r'^(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|((((\d+,)*\d+|(\d+(\/|-)\d+)|\d+|\*) ?){5,7})$')
# [/DEF:ScheduleSchema:DataClass]
# [DEF:EnvironmentResponse:DataClass]
class EnvironmentResponse(BaseModel):
id: str
name: str
url: str
backup_schedule: Optional[ScheduleSchema] = None
# [/DEF:EnvironmentResponse:DataClass]
# [DEF:DatabaseResponse:DataClass]
class DatabaseResponse(BaseModel):
uuid: str
database_name: str
engine: Optional[str]
# [/DEF:DatabaseResponse:DataClass]
# [DEF:get_environments:Function]
# @PURPOSE: List all configured environments.
# @PRE: config_manager is injected via Depends.
# @POST: Returns a list of EnvironmentResponse objects.
# @RETURN: List[EnvironmentResponse]
@router.get("", response_model=List[EnvironmentResponse])
async def get_environments(config_manager=Depends(get_config_manager)):
with belief_scope("get_environments"):
envs = config_manager.get_environments()
# Ensure envs is a list
if not isinstance(envs, list):
envs = []
return [
EnvironmentResponse(
id=e.id,
name=e.name,
url=e.url,
backup_schedule=ScheduleSchema(
enabled=e.backup_schedule.enabled,
cron_expression=e.backup_schedule.cron_expression
) if getattr(e, 'backup_schedule', None) else None
) for e in envs
]
# [/DEF:get_environments:Function]
# [DEF:update_environment_schedule:Function]
# @PURPOSE: Update backup schedule for an environment.
# @PRE: Environment id exists, schedule is valid ScheduleSchema.
# @POST: Backup schedule updated and scheduler reloaded.
# @PARAM: id (str) - The environment ID.
# @PARAM: schedule (ScheduleSchema) - The new schedule.
@router.put("/{id}/schedule")
async def update_environment_schedule(
id: str,
schedule: ScheduleSchema,
config_manager=Depends(get_config_manager),
scheduler_service=Depends(get_scheduler_service)
):
with belief_scope("update_environment_schedule", f"id={id}"):
envs = config_manager.get_environments()
env = next((e for e in envs if e.id == id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
# Update environment config
env.backup_schedule.enabled = schedule.enabled
env.backup_schedule.cron_expression = schedule.cron_expression
config_manager.update_environment(id, env)
# Refresh scheduler
scheduler_service.load_schedules()
return {"message": "Schedule updated successfully"}
# [/DEF:update_environment_schedule:Function]
# [DEF:get_environment_databases:Function]
# @PURPOSE: Fetch the list of databases from a specific environment.
# @PRE: Environment id exists.
# @POST: Returns a list of database summaries from the environment.
# @PARAM: id (str) - The environment ID.
# @RETURN: List[Dict] - List of databases.
@router.get("/{id}/databases")
async def get_environment_databases(id: str, config_manager=Depends(get_config_manager)):
with belief_scope("get_environment_databases", f"id={id}"):
envs = config_manager.get_environments()
env = next((e for e in envs if e.id == id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Initialize SupersetClient from environment config
client = SupersetClient(env)
return client.get_databases_summary()
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch databases: {str(e)}")
# [/DEF:get_environment_databases:Function]
# [/DEF:backend.src.api.routes.environments:Module]

View File

@@ -0,0 +1,303 @@
# [DEF:backend.src.api.routes.git:Module]
#
# @SEMANTICS: git, routes, api, fastapi, repository, deployment
# @PURPOSE: Provides FastAPI endpoints for Git integration operations.
# @LAYER: API
# @RELATION: USES -> src.services.git_service.GitService
# @RELATION: USES -> src.api.routes.git_schemas
# @RELATION: USES -> src.models.git
#
# @INVARIANT: All Git operations must be routed through GitService.
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List, Optional
import typing
from src.dependencies import get_config_manager
from src.core.database import get_db
from src.models.git import GitServerConfig, GitStatus, DeploymentEnvironment, GitRepository
from src.api.routes.git_schemas import (
GitServerConfigSchema, GitServerConfigCreate,
GitRepositorySchema, BranchSchema, BranchCreate,
BranchCheckout, CommitSchema, CommitCreate,
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest
)
from src.services.git_service import GitService
from src.core.logger import logger, belief_scope
router = APIRouter(prefix="/api/git", tags=["git"])
git_service = GitService()
# [DEF:get_git_configs:Function]
# @PURPOSE: List all configured Git servers.
# @RETURN: List[GitServerConfigSchema]
@router.get("/config", response_model=List[GitServerConfigSchema])
async def get_git_configs(db: Session = Depends(get_db)):
with belief_scope("get_git_configs"):
return db.query(GitServerConfig).all()
# [/DEF:get_git_configs:Function]
# [DEF:create_git_config:Function]
# @PURPOSE: Register a new Git server configuration.
# @PARAM: config (GitServerConfigCreate)
# @RETURN: GitServerConfigSchema
@router.post("/config", response_model=GitServerConfigSchema)
async def create_git_config(config: GitServerConfigCreate, db: Session = Depends(get_db)):
with belief_scope("create_git_config"):
db_config = GitServerConfig(**config.dict())
db.add(db_config)
db.commit()
db.refresh(db_config)
return db_config
# [/DEF:create_git_config:Function]
# [DEF:delete_git_config:Function]
# @PURPOSE: Remove a Git server configuration.
# @PARAM: config_id (str)
@router.delete("/config/{config_id}")
async def delete_git_config(config_id: str, db: Session = Depends(get_db)):
with belief_scope("delete_git_config"):
db_config = db.query(GitServerConfig).filter(GitServerConfig.id == config_id).first()
if not db_config:
raise HTTPException(status_code=404, detail="Configuration not found")
db.delete(db_config)
db.commit()
return {"status": "success", "message": "Configuration deleted"}
# [/DEF:delete_git_config:Function]
# [DEF:test_git_config:Function]
# @PURPOSE: Validate connection to a Git server using provided credentials.
# @PARAM: config (GitServerConfigCreate)
@router.post("/config/test")
async def test_git_config(config: GitServerConfigCreate):
with belief_scope("test_git_config"):
success = await git_service.test_connection(config.provider, config.url, config.pat)
if success:
return {"status": "success", "message": "Connection successful"}
else:
raise HTTPException(status_code=400, detail="Connection failed")
# [/DEF:test_git_config:Function]
# [DEF:init_repository:Function]
# @PURPOSE: Link a dashboard to a Git repository and perform initial clone/init.
# @PARAM: dashboard_id (int)
# @PARAM: init_data (RepoInitRequest)
@router.post("/repositories/{dashboard_id}/init")
async def init_repository(dashboard_id: int, init_data: RepoInitRequest, db: Session = Depends(get_db)):
with belief_scope("init_repository"):
# 1. Get config
config = db.query(GitServerConfig).filter(GitServerConfig.id == init_data.config_id).first()
if not config:
raise HTTPException(status_code=404, detail="Git configuration not found")
try:
# 2. Perform Git clone/init
logger.info(f"[init_repository][Action] Initializing repo for dashboard {dashboard_id}")
git_service.init_repo(dashboard_id, init_data.remote_url, config.pat)
# 3. Save to DB
repo_path = git_service._get_repo_path(dashboard_id)
db_repo = db.query(GitRepository).filter(GitRepository.dashboard_id == dashboard_id).first()
if not db_repo:
db_repo = GitRepository(
dashboard_id=dashboard_id,
config_id=config.id,
remote_url=init_data.remote_url,
local_path=repo_path
)
db.add(db_repo)
else:
db_repo.config_id = config.id
db_repo.remote_url = init_data.remote_url
db_repo.local_path = repo_path
db.commit()
logger.info(f"[init_repository][Coherence:OK] Repository initialized for dashboard {dashboard_id}")
return {"status": "success", "message": "Repository initialized"}
except Exception as e:
db.rollback()
logger.error(f"[init_repository][Coherence:Failed] Failed to init repository: {e}")
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:init_repository:Function]
# [DEF:get_branches:Function]
# @PURPOSE: List all branches for a dashboard's repository.
# @PARAM: dashboard_id (int)
# @RETURN: List[BranchSchema]
@router.get("/repositories/{dashboard_id}/branches", response_model=List[BranchSchema])
async def get_branches(dashboard_id: int):
with belief_scope("get_branches"):
try:
return git_service.list_branches(dashboard_id)
except Exception as e:
raise HTTPException(status_code=404, detail=str(e))
# [/DEF:get_branches:Function]
# [DEF:create_branch:Function]
# @PURPOSE: Create a new branch in the dashboard's repository.
# @PARAM: dashboard_id (int)
# @PARAM: branch_data (BranchCreate)
@router.post("/repositories/{dashboard_id}/branches")
async def create_branch(dashboard_id: int, branch_data: BranchCreate):
with belief_scope("create_branch"):
try:
git_service.create_branch(dashboard_id, branch_data.name, branch_data.from_branch)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:create_branch:Function]
# [DEF:checkout_branch:Function]
# @PURPOSE: Switch the dashboard's repository to a specific branch.
# @PARAM: dashboard_id (int)
# @PARAM: checkout_data (BranchCheckout)
@router.post("/repositories/{dashboard_id}/checkout")
async def checkout_branch(dashboard_id: int, checkout_data: BranchCheckout):
with belief_scope("checkout_branch"):
try:
git_service.checkout_branch(dashboard_id, checkout_data.name)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:checkout_branch:Function]
# [DEF:commit_changes:Function]
# @PURPOSE: Stage and commit changes in the dashboard's repository.
# @PARAM: dashboard_id (int)
# @PARAM: commit_data (CommitCreate)
@router.post("/repositories/{dashboard_id}/commit")
async def commit_changes(dashboard_id: int, commit_data: CommitCreate):
with belief_scope("commit_changes"):
try:
git_service.commit_changes(dashboard_id, commit_data.message, commit_data.files)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:commit_changes:Function]
# [DEF:push_changes:Function]
# @PURPOSE: Push local commits to the remote repository.
# @PARAM: dashboard_id (int)
@router.post("/repositories/{dashboard_id}/push")
async def push_changes(dashboard_id: int):
with belief_scope("push_changes"):
try:
git_service.push_changes(dashboard_id)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:push_changes:Function]
# [DEF:pull_changes:Function]
# @PURPOSE: Pull changes from the remote repository.
# @PARAM: dashboard_id (int)
@router.post("/repositories/{dashboard_id}/pull")
async def pull_changes(dashboard_id: int):
with belief_scope("pull_changes"):
try:
git_service.pull_changes(dashboard_id)
return {"status": "success"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:pull_changes:Function]
# [DEF:sync_dashboard:Function]
# @PURPOSE: Sync dashboard state from Superset to Git using the GitPlugin.
# @PARAM: dashboard_id (int)
# @PARAM: source_env_id (Optional[str])
@router.post("/repositories/{dashboard_id}/sync")
async def sync_dashboard(dashboard_id: int, source_env_id: typing.Optional[str] = None):
with belief_scope("sync_dashboard"):
try:
from src.plugins.git_plugin import GitPlugin
plugin = GitPlugin()
return await plugin.execute({
"operation": "sync",
"dashboard_id": dashboard_id,
"source_env_id": source_env_id
})
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:sync_dashboard:Function]
# [DEF:get_environments:Function]
# @PURPOSE: List all deployment environments.
# @RETURN: List[DeploymentEnvironmentSchema]
@router.get("/environments", response_model=List[DeploymentEnvironmentSchema])
async def get_environments(config_manager=Depends(get_config_manager)):
with belief_scope("get_environments"):
envs = config_manager.get_environments()
return [
DeploymentEnvironmentSchema(
id=e.id,
name=e.name,
superset_url=e.url,
is_active=True
) for e in envs
]
# [/DEF:get_environments:Function]
# [DEF:deploy_dashboard:Function]
# @PURPOSE: Deploy dashboard from Git to a target environment.
# @PARAM: dashboard_id (int)
# @PARAM: deploy_data (DeployRequest)
@router.post("/repositories/{dashboard_id}/deploy")
async def deploy_dashboard(dashboard_id: int, deploy_data: DeployRequest):
with belief_scope("deploy_dashboard"):
try:
from src.plugins.git_plugin import GitPlugin
plugin = GitPlugin()
return await plugin.execute({
"operation": "deploy",
"dashboard_id": dashboard_id,
"environment_id": deploy_data.environment_id
})
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:deploy_dashboard:Function]
# [DEF:get_history:Function]
# @PURPOSE: View commit history for a dashboard's repository.
# @PARAM: dashboard_id (int)
# @PARAM: limit (int)
# @RETURN: List[CommitSchema]
@router.get("/repositories/{dashboard_id}/history", response_model=List[CommitSchema])
async def get_history(dashboard_id: int, limit: int = 50):
with belief_scope("get_history"):
try:
return git_service.get_commit_history(dashboard_id, limit)
except Exception as e:
raise HTTPException(status_code=404, detail=str(e))
# [/DEF:get_history:Function]
# [DEF:get_repository_status:Function]
# @PURPOSE: Get current Git status for a dashboard repository.
# @PARAM: dashboard_id (int)
# @RETURN: dict
@router.get("/repositories/{dashboard_id}/status")
async def get_repository_status(dashboard_id: int):
with belief_scope("get_repository_status"):
try:
return git_service.get_status(dashboard_id)
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:get_repository_status:Function]
# [DEF:get_repository_diff:Function]
# @PURPOSE: Get Git diff for a dashboard repository.
# @PARAM: dashboard_id (int)
# @PARAM: file_path (Optional[str])
# @PARAM: staged (bool)
# @RETURN: str
@router.get("/repositories/{dashboard_id}/diff")
async def get_repository_diff(dashboard_id: int, file_path: Optional[str] = None, staged: bool = False):
with belief_scope("get_repository_diff"):
try:
diff_text = git_service.get_diff(dashboard_id, file_path, staged)
return diff_text
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
# [/DEF:get_repository_diff:Function]
# [/DEF:backend.src.api.routes.git:Module]

View File

@@ -0,0 +1,130 @@
# [DEF:backend.src.api.routes.git_schemas:Module]
#
# @SEMANTICS: git, schemas, pydantic, api, contracts
# @PURPOSE: Defines Pydantic models for the Git integration API layer.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.models.git
#
# @INVARIANT: All schemas must be compatible with the FastAPI router.
from pydantic import BaseModel, Field
from typing import List, Optional
from datetime import datetime
from uuid import UUID
from src.models.git import GitProvider, GitStatus, SyncStatus
# [DEF:GitServerConfigBase:Class]
class GitServerConfigBase(BaseModel):
name: str = Field(..., description="Display name for the Git server")
provider: GitProvider = Field(..., description="Git provider (GITHUB, GITLAB, GITEA)")
url: str = Field(..., description="Server base URL")
pat: str = Field(..., description="Personal Access Token")
default_repository: Optional[str] = Field(None, description="Default repository path (org/repo)")
# [/DEF:GitServerConfigBase:Class]
# [DEF:GitServerConfigCreate:Class]
class GitServerConfigCreate(GitServerConfigBase):
"""Schema for creating a new Git server configuration."""
pass
# [/DEF:GitServerConfigCreate:Class]
# [DEF:GitServerConfigSchema:Class]
class GitServerConfigSchema(GitServerConfigBase):
"""Schema for representing a Git server configuration with metadata."""
id: str
status: GitStatus
last_validated: datetime
class Config:
from_attributes = True
# [/DEF:GitServerConfigSchema:Class]
# [DEF:GitRepositorySchema:Class]
class GitRepositorySchema(BaseModel):
"""Schema for tracking a local Git repository linked to a dashboard."""
id: str
dashboard_id: int
config_id: str
remote_url: str
local_path: str
current_branch: str
sync_status: SyncStatus
class Config:
from_attributes = True
# [/DEF:GitRepositorySchema:Class]
# [DEF:BranchSchema:Class]
class BranchSchema(BaseModel):
"""Schema for representing a Git branch."""
name: str
commit_hash: str
is_remote: bool
last_updated: datetime
# [/DEF:BranchSchema:Class]
# [DEF:CommitSchema:Class]
class CommitSchema(BaseModel):
"""Schema for representing a Git commit."""
hash: str
author: str
email: str
timestamp: datetime
message: str
files_changed: List[str]
# [/DEF:CommitSchema:Class]
# [DEF:BranchCreate:Class]
class BranchCreate(BaseModel):
"""Schema for branch creation requests."""
name: str
from_branch: str
# [/DEF:BranchCreate:Class]
# [DEF:BranchCheckout:Class]
class BranchCheckout(BaseModel):
"""Schema for branch checkout requests."""
name: str
# [/DEF:BranchCheckout:Class]
# [DEF:CommitCreate:Class]
class CommitCreate(BaseModel):
"""Schema for staging and committing changes."""
message: str
files: List[str]
# [/DEF:CommitCreate:Class]
# [DEF:ConflictResolution:Class]
class ConflictResolution(BaseModel):
"""Schema for resolving merge conflicts."""
file_path: str
resolution: str = Field(pattern="^(mine|theirs|manual)$")
content: Optional[str] = None
# [/DEF:ConflictResolution:Class]
# [DEF:DeploymentEnvironmentSchema:Class]
class DeploymentEnvironmentSchema(BaseModel):
"""Schema for representing a target deployment environment."""
id: str
name: str
superset_url: str
is_active: bool
class Config:
from_attributes = True
# [/DEF:DeploymentEnvironmentSchema:Class]
# [DEF:DeployRequest:Class]
class DeployRequest(BaseModel):
"""Schema for deployment requests."""
environment_id: str
# [/DEF:DeployRequest:Class]
# [DEF:RepoInitRequest:Class]
class RepoInitRequest(BaseModel):
"""Schema for repository initialization requests."""
config_id: str
remote_url: str
# [/DEF:RepoInitRequest:Class]
# [/DEF:backend.src.api.routes.git_schemas:Module]

View File

@@ -0,0 +1,120 @@
# [DEF:backend.src.api.routes.mappings:Module]
#
# @SEMANTICS: api, mappings, database, fuzzy-matching
# @PURPOSE: API endpoints for managing database mappings and getting suggestions.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.core.database
# @RELATION: DEPENDS_ON -> backend.src.services.mapping_service
#
# @INVARIANT: Mappings are persisted in the SQLite database.
# [SECTION: IMPORTS]
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List, Optional
from ...core.logger import belief_scope
from ...dependencies import get_config_manager
from ...core.database import get_db
from ...models.mapping import DatabaseMapping
from pydantic import BaseModel
# [/SECTION]
router = APIRouter(prefix="/api/mappings", tags=["mappings"])
# [DEF:MappingCreate:DataClass]
class MappingCreate(BaseModel):
source_env_id: str
target_env_id: str
source_db_uuid: str
target_db_uuid: str
source_db_name: str
target_db_name: str
# [/DEF:MappingCreate:DataClass]
# [DEF:MappingResponse:DataClass]
class MappingResponse(BaseModel):
id: str
source_env_id: str
target_env_id: str
source_db_uuid: str
target_db_uuid: str
source_db_name: str
target_db_name: str
class Config:
from_attributes = True
# [/DEF:MappingResponse:DataClass]
# [DEF:SuggestRequest:DataClass]
class SuggestRequest(BaseModel):
source_env_id: str
target_env_id: str
# [/DEF:SuggestRequest:DataClass]
# [DEF:get_mappings:Function]
# @PURPOSE: List all saved database mappings.
# @PRE: db session is injected.
# @POST: Returns filtered list of DatabaseMapping records.
@router.get("", response_model=List[MappingResponse])
async def get_mappings(
source_env_id: Optional[str] = None,
target_env_id: Optional[str] = None,
db: Session = Depends(get_db)
):
with belief_scope("get_mappings"):
query = db.query(DatabaseMapping)
if source_env_id:
query = query.filter(DatabaseMapping.source_env_id == source_env_id)
if target_env_id:
query = query.filter(DatabaseMapping.target_env_id == target_env_id)
return query.all()
# [/DEF:get_mappings:Function]
# [DEF:create_mapping:Function]
# @PURPOSE: Create or update a database mapping.
# @PRE: mapping is valid MappingCreate, db session is injected.
# @POST: DatabaseMapping created or updated in database.
@router.post("", response_model=MappingResponse)
async def create_mapping(mapping: MappingCreate, db: Session = Depends(get_db)):
with belief_scope("create_mapping"):
# Check if mapping already exists
existing = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == mapping.source_env_id,
DatabaseMapping.target_env_id == mapping.target_env_id,
DatabaseMapping.source_db_uuid == mapping.source_db_uuid
).first()
if existing:
existing.target_db_uuid = mapping.target_db_uuid
existing.target_db_name = mapping.target_db_name
db.commit()
db.refresh(existing)
return existing
new_mapping = DatabaseMapping(**mapping.dict())
db.add(new_mapping)
db.commit()
db.refresh(new_mapping)
return new_mapping
# [/DEF:create_mapping:Function]
# [DEF:suggest_mappings_api:Function]
# @PURPOSE: Get suggested mappings based on fuzzy matching.
# @PRE: request is valid SuggestRequest, config_manager is injected.
# @POST: Returns mapping suggestions.
@router.post("/suggest")
async def suggest_mappings_api(
request: SuggestRequest,
config_manager=Depends(get_config_manager)
):
with belief_scope("suggest_mappings_api"):
from backend.src.services.mapping_service import MappingService
service = MappingService(config_manager)
try:
return await service.get_suggestions(request.source_env_id, request.target_env_id)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
# [/DEF:suggest_mappings_api:Function]
# [/DEF:backend.src.api.routes.mappings:Module]

View File

@@ -0,0 +1,71 @@
# [DEF:backend.src.api.routes.migration:Module]
# @SEMANTICS: api, migration, dashboards
# @PURPOSE: API endpoints for migration operations.
# @LAYER: API
# @RELATION: DEPENDS_ON -> backend.src.dependencies
# @RELATION: DEPENDS_ON -> backend.src.models.dashboard
from fastapi import APIRouter, Depends, HTTPException
from typing import List, Dict
from ...dependencies import get_config_manager, get_task_manager
from ...models.dashboard import DashboardMetadata, DashboardSelection
from ...core.superset_client import SupersetClient
from ...core.logger import belief_scope
router = APIRouter(prefix="/api", tags=["migration"])
# [DEF:get_dashboards:Function]
# @PURPOSE: Fetch all dashboards from the specified environment for the grid.
# @PRE: Environment ID must be valid.
# @POST: Returns a list of dashboard metadata.
# @PARAM: env_id (str) - The ID of the environment to fetch from.
# @RETURN: List[DashboardMetadata]
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)):
with belief_scope("get_dashboards", f"env_id={env_id}"):
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None)
if not env:
raise HTTPException(status_code=404, detail="Environment not found")
client = SupersetClient(env)
dashboards = client.get_dashboards_summary()
return dashboards
# [/DEF:get_dashboards:Function]
# [DEF:execute_migration:Function]
# @PURPOSE: Execute the migration of selected dashboards.
# @PRE: Selection must be valid and environments must exist.
# @POST: Starts the migration task and returns the task ID.
# @PARAM: selection (DashboardSelection) - The dashboards to migrate.
# @RETURN: Dict - {"task_id": str, "message": str}
@router.post("/migration/execute")
async def execute_migration(selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager)):
with belief_scope("execute_migration"):
# Validate environments exist
environments = config_manager.get_environments()
env_ids = {e.id for e in environments}
if selection.source_env_id not in env_ids or selection.target_env_id not in env_ids:
raise HTTPException(status_code=400, detail="Invalid source or target environment")
# Create migration task with debug logging
from ...core.logger import logger
# Include replace_db_config in the task parameters
task_params = selection.dict()
task_params['replace_db_config'] = selection.replace_db_config
logger.info(f"Creating migration task with params: {task_params}")
logger.info(f"Available environments: {env_ids}")
logger.info(f"Source env: {selection.source_env_id}, Target env: {selection.target_env_id}")
try:
task = await task_manager.create_task("superset-migration", task_params)
logger.info(f"Task created successfully: {task.id}")
return {"task_id": task.id, "message": "Migration initiated"}
except Exception as e:
logger.error(f"Task creation failed: {e}")
raise HTTPException(status_code=500, detail=f"Failed to create migration task: {str(e)}")
# [/DEF:execute_migration:Function]
# [/DEF:backend.src.api.routes.migration:Module]

View File

@@ -8,15 +8,23 @@ from fastapi import APIRouter, Depends
from ...core.plugin_base import PluginConfig
from ...dependencies import get_plugin_loader
from ...core.logger import belief_scope
router = APIRouter()
@router.get("/", response_model=List[PluginConfig])
# [DEF:list_plugins:Function]
# @PURPOSE: Retrieve a list of all available plugins.
# @PRE: plugin_loader is injected via Depends.
# @POST: Returns a list of PluginConfig objects.
# @RETURN: List[PluginConfig] - List of registered plugins.
@router.get("", response_model=List[PluginConfig])
async def list_plugins(
plugin_loader = Depends(get_plugin_loader)
):
"""
Retrieve a list of all available plugins.
"""
return plugin_loader.get_all_plugin_configs()
# [/DEF]
with belief_scope("list_plugins"):
"""
Retrieve a list of all available plugins.
"""
return plugin_loader.get_all_plugin_configs()
# [/DEF:list_plugins:Function]
# [/DEF:PluginsRouter:Module]

View File

@@ -15,9 +15,8 @@ from typing import List
from ...core.config_models import AppConfig, Environment, GlobalSettings
from ...dependencies import get_config_manager
from ...core.config_manager import ConfigManager
from ...core.logger import logger
from superset_tool.client import SupersetClient
from superset_tool.models import SupersetConfig
from ...core.logger import logger, belief_scope
from ...core.superset_client import SupersetClient
import os
# [/SECTION]
@@ -25,43 +24,54 @@ router = APIRouter()
# [DEF:get_settings:Function]
# @PURPOSE: Retrieves all application settings.
# @PRE: Config manager is available.
# @POST: Returns masked AppConfig.
# @RETURN: AppConfig - The current configuration.
@router.get("/", response_model=AppConfig)
@router.get("", response_model=AppConfig)
async def get_settings(config_manager: ConfigManager = Depends(get_config_manager)):
logger.info("[get_settings][Entry] Fetching all settings")
with belief_scope("get_settings"):
logger.info("[get_settings][Entry] Fetching all settings")
config = config_manager.get_config().copy(deep=True)
# Mask passwords
for env in config.environments:
if env.password:
env.password = "********"
return config
# [/DEF:get_settings]
# [/DEF:get_settings:Function]
# [DEF:update_global_settings:Function]
# @PURPOSE: Updates global application settings.
# @PRE: New settings are provided.
# @POST: Global settings are updated.
# @PARAM: settings (GlobalSettings) - The new global settings.
# @RETURN: GlobalSettings - The updated settings.
@router.patch("/global", response_model=GlobalSettings)
async def update_global_settings(
settings: GlobalSettings,
settings: GlobalSettings,
config_manager: ConfigManager = Depends(get_config_manager)
):
logger.info("[update_global_settings][Entry] Updating global settings")
with belief_scope("update_global_settings"):
logger.info("[update_global_settings][Entry] Updating global settings")
config_manager.update_global_settings(settings)
return settings
# [/DEF:update_global_settings]
# [/DEF:update_global_settings:Function]
# [DEF:get_environments:Function]
# @PURPOSE: Lists all configured Superset environments.
# @PRE: Config manager is available.
# @POST: Returns list of environments.
# @RETURN: List[Environment] - List of environments.
@router.get("/environments", response_model=List[Environment])
async def get_environments(config_manager: ConfigManager = Depends(get_config_manager)):
logger.info("[get_environments][Entry] Fetching environments")
with belief_scope("get_environments"):
logger.info("[get_environments][Entry] Fetching environments")
return config_manager.get_environments()
# [/DEF:get_environments]
# [/DEF:get_environments:Function]
# [DEF:add_environment:Function]
# @PURPOSE: Adds a new Superset environment.
# @PRE: Environment data is valid and reachable.
# @POST: Environment is added to config.
# @PARAM: env (Environment) - The environment to add.
# @RETURN: Environment - The added environment.
@router.post("/environments", response_model=Environment)
@@ -69,21 +79,12 @@ async def add_environment(
env: Environment,
config_manager: ConfigManager = Depends(get_config_manager)
):
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
with belief_scope("add_environment"):
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
# Validate connection before adding
try:
superset_config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={
"provider": "db",
"username": env.username,
"password": env.password,
"refresh": "true"
}
)
client = SupersetClient(config=superset_config)
client = SupersetClient(env)
client.get_dashboards(query={"page_size": 1})
except Exception as e:
logger.error(f"[add_environment][Coherence:Failed] Connection validation failed: {e}")
@@ -91,20 +92,23 @@ async def add_environment(
config_manager.add_environment(env)
return env
# [/DEF:add_environment]
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @PURPOSE: Updates an existing Superset environment.
# @PRE: ID and valid environment data are provided.
# @POST: Environment is updated in config.
# @PARAM: id (str) - The ID of the environment to update.
# @PARAM: env (Environment) - The updated environment data.
# @RETURN: Environment - The updated environment.
@router.put("/environments/{id}", response_model=Environment)
async def update_environment(
id: str,
env: Environment,
id: str,
env: Environment,
config_manager: ConfigManager = Depends(get_config_manager)
):
logger.info(f"[update_environment][Entry] Updating environment {id}")
with belief_scope("update_environment"):
logger.info(f"[update_environment][Entry] Updating environment {id}")
# If password is masked, we need the real one for validation
env_to_validate = env.copy(deep=True)
@@ -115,17 +119,7 @@ async def update_environment(
# Validate connection before updating
try:
superset_config = SupersetConfig(
env=env_to_validate.name,
base_url=env_to_validate.url,
auth={
"provider": "db",
"username": env_to_validate.username,
"password": env_to_validate.password,
"refresh": "true"
}
)
client = SupersetClient(config=superset_config)
client = SupersetClient(env_to_validate)
client.get_dashboards(query={"page_size": 1})
except Exception as e:
logger.error(f"[update_environment][Coherence:Failed] Connection validation failed: {e}")
@@ -134,23 +128,28 @@ async def update_environment(
if config_manager.update_environment(id, env):
return env
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
# [/DEF:update_environment]
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @PURPOSE: Deletes a Superset environment.
# @PRE: ID is provided.
# @POST: Environment is removed from config.
# @PARAM: id (str) - The ID of the environment to delete.
@router.delete("/environments/{id}")
async def delete_environment(
id: str,
id: str,
config_manager: ConfigManager = Depends(get_config_manager)
):
logger.info(f"[delete_environment][Entry] Deleting environment {id}")
with belief_scope("delete_environment"):
logger.info(f"[delete_environment][Entry] Deleting environment {id}")
config_manager.delete_environment(id)
return {"message": f"Environment {id} deleted"}
# [/DEF:delete_environment]
# [/DEF:delete_environment:Function]
# [DEF:test_environment_connection:Function]
# @PURPOSE: Tests the connection to a Superset environment.
# @PRE: ID is provided.
# @POST: Returns success or error status.
# @PARAM: id (str) - The ID of the environment to test.
# @RETURN: dict - Success message or error.
@router.post("/environments/{id}/test")
@@ -158,7 +157,8 @@ async def test_environment_connection(
id: str,
config_manager: ConfigManager = Depends(get_config_manager)
):
logger.info(f"[test_environment_connection][Entry] Testing environment {id}")
with belief_scope("test_environment_connection"):
logger.info(f"[test_environment_connection][Entry] Testing environment {id}")
# Find environment
env = next((e for e in config_manager.get_environments() if e.id == id), None)
@@ -166,21 +166,8 @@ async def test_environment_connection(
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
try:
# Create SupersetConfig
# Note: SupersetConfig expects 'auth' dict with specific keys
superset_config = SupersetConfig(
env=env.name,
base_url=env.url,
auth={
"provider": "db", # Defaulting to db for now
"username": env.username,
"password": env.password,
"refresh": "true"
}
)
# Initialize client (this will trigger authentication)
client = SupersetClient(config=superset_config)
client = SupersetClient(env)
# Try a simple request to verify
client.get_dashboards(query={"page_size": 1})
@@ -190,10 +177,12 @@ async def test_environment_connection(
except Exception as e:
logger.error(f"[test_environment_connection][Coherence:Failed] Connection failed for {id}: {e}")
return {"status": "error", "message": str(e)}
# [/DEF:test_environment_connection]
# [/DEF:test_environment_connection:Function]
# [DEF:validate_backup_path:Function]
# @PURPOSE: Validates if a backup path exists and is writable.
# @PRE: Path is provided in path_data.
# @POST: Returns success or error status.
# @PARAM: path (str) - The path to validate.
# @RETURN: dict - Validation result.
@router.post("/validate-path")
@@ -201,11 +190,12 @@ async def validate_backup_path(
path_data: dict,
config_manager: ConfigManager = Depends(get_config_manager)
):
path = path_data.get("path")
if not path:
raise HTTPException(status_code=400, detail="Path is required")
logger.info(f"[validate_backup_path][Entry] Validating path: {path}")
with belief_scope("validate_backup_path"):
path = path_data.get("path")
if not path:
raise HTTPException(status_code=400, detail="Path is required")
logger.info(f"[validate_backup_path][Entry] Validating path: {path}")
valid, message = config_manager.validate_path(path)
@@ -213,6 +203,6 @@ async def validate_backup_path(
return {"status": "error", "message": message}
return {"status": "success", "message": message}
# [/DEF:validate_backup_path]
# [/DEF:validate_backup_path:Function]
# [/DEF:SettingsRouter]
# [/DEF:SettingsRouter:Module]

View File

@@ -3,11 +3,12 @@
# @PURPOSE: Defines the FastAPI router for task-related endpoints, allowing clients to create, list, and get the status of tasks.
# @LAYER: UI (API)
# @RELATION: Depends on the TaskManager. It is included by the main app.
from typing import List, Dict, Any
from typing import List, Dict, Any, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel
from ...core.logger import belief_scope
from ...core.task_manager import TaskManager, Task
from ...core.task_manager import TaskManager, Task, TaskStatus, LogEntry
from ...dependencies import get_task_manager
router = APIRouter()
@@ -16,7 +17,20 @@ class CreateTaskRequest(BaseModel):
plugin_id: str
params: Dict[str, Any]
@router.post("/", response_model=Task, status_code=status.HTTP_201_CREATED)
class ResolveTaskRequest(BaseModel):
resolution_params: Dict[str, Any]
class ResumeTaskRequest(BaseModel):
passwords: Dict[str, str]
@router.post("", response_model=Task, status_code=status.HTTP_201_CREATED)
# [DEF:create_task:Function]
# @PURPOSE: Create and start a new task for a given plugin.
# @PARAM: request (CreateTaskRequest) - The request body containing plugin_id and params.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: plugin_id must exist and params must be valid for that plugin.
# @POST: A new task is created and started.
# @RETURN: Task - The created task instance.
async def create_task(
request: CreateTaskRequest,
task_manager: TaskManager = Depends(get_task_manager)
@@ -24,25 +38,48 @@ async def create_task(
"""
Create and start a new task for a given plugin.
"""
try:
task = await task_manager.create_task(
plugin_id=request.plugin_id,
params=request.params
)
return task
except ValueError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
with belief_scope("create_task"):
try:
task = await task_manager.create_task(
plugin_id=request.plugin_id,
params=request.params
)
return task
except ValueError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
# [/DEF:create_task:Function]
@router.get("/", response_model=List[Task])
@router.get("", response_model=List[Task])
# [DEF:list_tasks:Function]
# @PURPOSE: Retrieve a list of tasks with pagination and optional status filter.
# @PARAM: limit (int) - Maximum number of tasks to return.
# @PARAM: offset (int) - Number of tasks to skip.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task_manager must be available.
# @POST: Returns a list of tasks.
# @RETURN: List[Task] - List of tasks.
async def list_tasks(
limit: int = 10,
offset: int = 0,
status: Optional[TaskStatus] = None,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Retrieve a list of all tasks.
Retrieve a list of tasks with pagination and optional status filter.
"""
return task_manager.get_all_tasks()
with belief_scope("list_tasks"):
return task_manager.get_tasks(limit=limit, offset=offset, status=status)
# [/DEF:list_tasks:Function]
@router.get("/{task_id}", response_model=Task)
# [DEF:get_task:Function]
# @PURPOSE: Retrieve the details of a specific task.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task_id must exist.
# @POST: Returns task details or raises 404.
# @RETURN: Task - The task details.
async def get_task(
task_id: str,
task_manager: TaskManager = Depends(get_task_manager)
@@ -50,8 +87,101 @@ async def get_task(
"""
Retrieve the details of a specific task.
"""
task = task_manager.get_task(task_id)
if not task:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
return task
# [/DEF]
with belief_scope("get_task"):
task = task_manager.get_task(task_id)
if not task:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
return task
# [/DEF:get_task:Function]
@router.get("/{task_id}/logs", response_model=List[LogEntry])
# [DEF:get_task_logs:Function]
# @PURPOSE: Retrieve logs for a specific task.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task_id must exist.
# @POST: Returns a list of log entries or raises 404.
# @RETURN: List[LogEntry] - List of log entries.
async def get_task_logs(
task_id: str,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Retrieve logs for a specific task.
"""
with belief_scope("get_task_logs"):
task = task_manager.get_task(task_id)
if not task:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Task not found")
return task_manager.get_task_logs(task_id)
# [/DEF:get_task_logs:Function]
@router.post("/{task_id}/resolve", response_model=Task)
# [DEF:resolve_task:Function]
# @PURPOSE: Resolve a task that is awaiting mapping.
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: request (ResolveTaskRequest) - The resolution parameters.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task must be in AWAITING_MAPPING status.
# @POST: Task is resolved and resumes execution.
# @RETURN: Task - The updated task object.
async def resolve_task(
task_id: str,
request: ResolveTaskRequest,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Resolve a task that is awaiting mapping.
"""
with belief_scope("resolve_task"):
try:
await task_manager.resolve_task(task_id, request.resolution_params)
return task_manager.get_task(task_id)
except ValueError as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
# [/DEF:resolve_task:Function]
@router.post("/{task_id}/resume", response_model=Task)
# [DEF:resume_task:Function]
# @PURPOSE: Resume a task that is awaiting input (e.g., passwords).
# @PARAM: task_id (str) - The unique identifier of the task.
# @PARAM: request (ResumeTaskRequest) - The input (passwords).
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task must be in AWAITING_INPUT status.
# @POST: Task resumes execution with provided input.
# @RETURN: Task - The updated task object.
async def resume_task(
task_id: str,
request: ResumeTaskRequest,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Resume a task that is awaiting input (e.g., passwords).
"""
with belief_scope("resume_task"):
try:
task_manager.resume_task_with_password(task_id, request.passwords)
return task_manager.get_task(task_id)
except ValueError as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
# [/DEF:resume_task:Function]
@router.delete("", status_code=status.HTTP_204_NO_CONTENT)
# [DEF:clear_tasks:Function]
# @PURPOSE: Clear tasks matching the status filter.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @PARAM: task_manager (TaskManager) - The task manager instance.
# @PRE: task_manager is available.
# @POST: Tasks are removed from memory/persistence.
async def clear_tasks(
status: Optional[TaskStatus] = None,
task_manager: TaskManager = Depends(get_task_manager)
):
"""
Clear tasks matching the status filter. If no filter, clears all non-running tasks.
"""
with belief_scope("clear_tasks", f"status={status}"):
task_manager.clear_tasks(status)
return
# [/DEF:clear_tasks:Function]
# [/DEF:TasksRouter:Module]

View File

@@ -6,21 +6,20 @@
import sys
from pathlib import Path
# Add project root to sys.path to allow importing superset_tool
# Assuming app.py is in backend/src/
# project_root is used for static files mounting
project_root = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(project_root))
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, Request, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
import asyncio
import os
from .dependencies import get_task_manager
from .core.logger import logger
from .api.routes import plugins, tasks, settings
from .dependencies import get_task_manager, get_scheduler_service
from .core.logger import logger, belief_scope
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections, git
from .core.database import init_db
# [DEF:App:Global]
# @SEMANTICS: app, fastapi, instance
@@ -30,6 +29,31 @@ app = FastAPI(
description="API for managing Superset automation tools and plugins.",
version="1.0.0",
)
# [/DEF:App:Global]
# [DEF:startup_event:Function]
# @PURPOSE: Handles application startup tasks, such as starting the scheduler.
# @PRE: None.
# @POST: Scheduler is started.
# Startup event
@app.on_event("startup")
async def startup_event():
with belief_scope("startup_event"):
scheduler = get_scheduler_service()
scheduler.start()
# [/DEF:startup_event:Function]
# [DEF:shutdown_event:Function]
# @PURPOSE: Handles application shutdown tasks, such as stopping the scheduler.
# @PRE: None.
# @POST: Scheduler is stopped.
# Shutdown event
@app.on_event("shutdown")
async def shutdown_event():
with belief_scope("shutdown_event"):
scheduler = get_scheduler_service()
scheduler.stop()
# [/DEF:shutdown_event:Function]
# Configure CORS
app.add_middleware(
@@ -41,31 +65,67 @@ app.add_middleware(
)
# [DEF:log_requests:Function]
# @PURPOSE: Middleware to log incoming HTTP requests and their response status.
# @PRE: request is a FastAPI Request object.
# @POST: Logs request and response details.
# @PARAM: request (Request) - The incoming request object.
# @PARAM: call_next (Callable) - The next middleware or route handler.
@app.middleware("http")
async def log_requests(request: Request, call_next):
with belief_scope("log_requests", f"{request.method} {request.url.path}"):
logger.info(f"[DEBUG] Incoming request: {request.method} {request.url.path}")
response = await call_next(request)
logger.info(f"[DEBUG] Response status: {response.status_code} for {request.url.path}")
return response
# [/DEF:log_requests:Function]
# Include API routes
app.include_router(plugins.router, prefix="/api/plugins", tags=["Plugins"])
app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
app.include_router(settings.router, prefix="/api/settings", tags=["Settings"])
app.include_router(connections.router, prefix="/api/settings/connections", tags=["Connections"])
app.include_router(environments.router, prefix="/api/environments", tags=["Environments"])
app.include_router(mappings.router)
app.include_router(migration.router)
app.include_router(git.router)
# [DEF:WebSocketEndpoint:Endpoint]
# @SEMANTICS: websocket, logs, streaming, real-time
# @PURPOSE: Provides a WebSocket endpoint for clients to connect to and receive real-time log entries for a specific task.
# [DEF:websocket_endpoint:Function]
# @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task.
# @PRE: task_id must be a valid task ID.
# @POST: WebSocket connection is managed and logs are streamed until disconnect.
@app.websocket("/ws/logs/{task_id}")
async def websocket_endpoint(websocket: WebSocket, task_id: str):
await websocket.accept()
with belief_scope("websocket_endpoint", f"task_id={task_id}"):
await websocket.accept()
logger.info(f"WebSocket connection accepted for task {task_id}")
task_manager = get_task_manager()
queue = await task_manager.subscribe_logs(task_id)
try:
# Send initial logs if any
# Stream new logs
logger.info(f"Starting log stream for task {task_id}")
# Send initial logs first to build context
initial_logs = task_manager.get_task_logs(task_id)
for log_entry in initial_logs:
# Convert datetime to string for JSON serialization
log_dict = log_entry.dict()
log_dict['timestamp'] = log_dict['timestamp'].isoformat()
await websocket.send_json(log_dict)
# Stream new logs
logger.info(f"Starting log stream for task {task_id}")
# Force a check for AWAITING_INPUT status immediately upon connection
# This ensures that if the task is already waiting when the user connects, they get the prompt.
task = task_manager.get_task(task_id)
if task and task.status == "AWAITING_INPUT" and task.input_request:
# Construct a synthetic log entry to trigger the frontend handler
# This is a bit of a hack but avoids changing the websocket protocol significantly
synthetic_log = {
"timestamp": task.logs[-1].timestamp.isoformat() if task.logs else "2024-01-01T00:00:00",
"level": "INFO",
"message": "Task paused for user input (Connection Re-established)",
"context": {"input_request": task.input_request}
}
await websocket.send_json(synthetic_log)
while True:
log_entry = await queue.get()
log_dict = log_entry.dict()
@@ -77,7 +137,9 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
if "Task completed successfully" in log_entry.message or "Task failed" in log_entry.message:
# Wait a bit to ensure client receives the last message
await asyncio.sleep(2)
break
# DO NOT BREAK here - allow client to keep connection open if they want to review logs
# or until they disconnect. Breaking closes the socket immediately.
# break
except WebSocketDisconnect:
logger.info(f"WebSocket connection disconnected for task {task_id}")
@@ -85,8 +147,7 @@ async def websocket_endpoint(websocket: WebSocket, task_id: str):
logger.error(f"WebSocket error for task {task_id}: {e}")
finally:
task_manager.unsubscribe_logs(task_id, queue)
# [/DEF]
# [/DEF:websocket_endpoint:Function]
# [DEF:StaticFiles:Mount]
# @SEMANTICS: static, frontend, spa
@@ -96,18 +157,33 @@ if frontend_path.exists():
app.mount("/_app", StaticFiles(directory=str(frontend_path / "_app")), name="static")
# Serve other static files from the root of build directory
# [DEF:serve_spa:Function]
# @PURPOSE: Serves frontend static files or index.html for SPA routing.
# @PRE: file_path is requested by the client.
# @POST: Returns the requested file or index.html as a fallback.
@app.get("/{file_path:path}")
async def serve_spa(file_path: str):
full_path = frontend_path / file_path
if full_path.is_file():
return FileResponse(str(full_path))
# Fallback to index.html for SPA routing
return FileResponse(str(frontend_path / "index.html"))
with belief_scope("serve_spa", f"path={file_path}"):
# Don't serve SPA for API routes that fell through
if file_path.startswith("api/"):
logger.info(f"[DEBUG] API route fell through to serve_spa: {file_path}")
raise HTTPException(status_code=404, detail=f"API endpoint not found: {file_path}")
full_path = frontend_path / file_path
if full_path.is_file():
return FileResponse(str(full_path))
# Fallback to index.html for SPA routing
return FileResponse(str(frontend_path / "index.html"))
# [/DEF:serve_spa:Function]
else:
# [DEF:RootEndpoint:Endpoint]
# @SEMANTICS: root, healthcheck
# @PURPOSE: A simple root endpoint to confirm that the API is running.
# [DEF:read_root:Function]
# @PURPOSE: A simple root endpoint to confirm that the API is running when frontend is missing.
# @PRE: None.
# @POST: Returns a JSON message indicating API status.
@app.get("/")
async def read_root():
return {"message": "Superset Tools API is running (Frontend build not found)"}
# [/DEF]
with belief_scope("read_root"):
return {"message": "Superset Tools API is running (Frontend build not found)"}
# [/DEF:read_root:Function]
# [/DEF:StaticFiles:Mount]
# [/DEF:AppModule:Module]

View File

@@ -16,7 +16,7 @@ import os
from pathlib import Path
from typing import Optional, List
from .config_models import AppConfig, Environment, GlobalSettings
from .logger import logger
from .logger import logger, configure_logger, belief_scope
# [/SECTION]
# [DEF:ConfigManager:Class]
@@ -30,27 +30,33 @@ class ConfigManager:
# @POST: self.config is an instance of AppConfig
# @PARAM: config_path (str) - Path to the configuration file.
def __init__(self, config_path: str = "config.json"):
# 1. Runtime check of @PRE
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
# 2. Logic implementation
self.config_path = Path(config_path)
self.config: AppConfig = self._load_config()
# 3. Runtime check of @POST
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
logger.info(f"[ConfigManager][Exit] Initialized")
# [/DEF:__init__]
with belief_scope("__init__"):
# 1. Runtime check of @PRE
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
# 2. Logic implementation
self.config_path = Path(config_path)
self.config: AppConfig = self._load_config()
# Configure logger with loaded settings
configure_logger(self.config.settings.logging)
# 3. Runtime check of @POST
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
logger.info(f"[ConfigManager][Exit] Initialized")
# [/DEF:__init__:Function]
# [DEF:_load_config:Function]
# @PURPOSE: Loads the configuration from disk or creates a default one.
# @PRE: self.config_path is set.
# @POST: isinstance(return, AppConfig)
# @RETURN: AppConfig - The loaded or default configuration.
def _load_config(self) -> AppConfig:
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
with belief_scope("_load_config"):
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
if not self.config_path.exists():
logger.info(f"[_load_config][Action] Config file not found. Creating default.")
@@ -69,18 +75,22 @@ class ConfigManager:
return config
except Exception as e:
logger.error(f"[_load_config][Coherence:Failed] Error loading config: {e}")
# Fallback but try to preserve existing settings if possible?
# For now, return default to be safe, but log the error prominently.
return AppConfig(
environments=[],
settings=GlobalSettings(backup_path="backups")
)
# [/DEF:_load_config]
# [/DEF:_load_config:Function]
# [DEF:_save_config_to_disk:Function]
# @PURPOSE: Saves the provided configuration object to disk.
# @PRE: isinstance(config, AppConfig)
# @POST: Configuration saved to disk.
# @PARAM: config (AppConfig) - The configuration to save.
def _save_config_to_disk(self, config: AppConfig):
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
with belief_scope("_save_config_to_disk"):
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
# 1. Runtime check of @PRE
assert isinstance(config, AppConfig), "config must be an instance of AppConfig"
@@ -92,27 +102,35 @@ class ConfigManager:
logger.info(f"[_save_config_to_disk][Action] Configuration saved")
except Exception as e:
logger.error(f"[_save_config_to_disk][Coherence:Failed] Failed to save: {e}")
# [/DEF:_save_config_to_disk]
# [/DEF:_save_config_to_disk:Function]
# [DEF:save:Function]
# @PURPOSE: Saves the current configuration state to disk.
# @PRE: self.config is set.
# @POST: self._save_config_to_disk called.
def save(self):
self._save_config_to_disk(self.config)
# [/DEF:save]
with belief_scope("save"):
self._save_config_to_disk(self.config)
# [/DEF:save:Function]
# [DEF:get_config:Function]
# @PURPOSE: Returns the current configuration.
# @PRE: self.config is set.
# @POST: Returns self.config.
# @RETURN: AppConfig - The current configuration.
def get_config(self) -> AppConfig:
return self.config
# [/DEF:get_config]
with belief_scope("get_config"):
return self.config
# [/DEF:get_config:Function]
# [DEF:update_global_settings:Function]
# @PURPOSE: Updates the global settings and persists the change.
# @PRE: isinstance(settings, GlobalSettings)
# @POST: self.config.settings updated and saved.
# @PARAM: settings (GlobalSettings) - The new global settings.
def update_global_settings(self, settings: GlobalSettings):
logger.info(f"[update_global_settings][Entry] Updating settings")
with belief_scope("update_global_settings"):
logger.info(f"[update_global_settings][Entry] Updating settings")
# 1. Runtime check of @PRE
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
@@ -120,16 +138,22 @@ class ConfigManager:
# 2. Logic implementation
self.config.settings = settings
self.save()
# Reconfigure logger with new settings
configure_logger(settings.logging)
logger.info(f"[update_global_settings][Exit] Settings updated")
# [/DEF:update_global_settings]
# [/DEF:update_global_settings:Function]
# [DEF:validate_path:Function]
# @PURPOSE: Validates if a path exists and is writable.
# @PRE: path is a string.
# @POST: Returns (bool, str) status.
# @PARAM: path (str) - The path to validate.
# @RETURN: tuple (bool, str) - (is_valid, message)
def validate_path(self, path: str) -> tuple[bool, str]:
p = os.path.abspath(path)
with belief_scope("validate_path"):
p = os.path.abspath(path)
if not os.path.exists(p):
try:
os.makedirs(p, exist_ok=True)
@@ -140,28 +164,50 @@ class ConfigManager:
return False, "Path is not writable"
return True, "Path is valid and writable"
# [/DEF:validate_path]
# [/DEF:validate_path:Function]
# [DEF:get_environments:Function]
# @PURPOSE: Returns the list of configured environments.
# @PRE: self.config is set.
# @POST: Returns list of environments.
# @RETURN: List[Environment] - List of environments.
def get_environments(self) -> List[Environment]:
return self.config.environments
# [/DEF:get_environments]
with belief_scope("get_environments"):
return self.config.environments
# [/DEF:get_environments:Function]
# [DEF:has_environments:Function]
# @PURPOSE: Checks if at least one environment is configured.
# @PRE: self.config is set.
# @POST: Returns boolean indicating if environments exist.
# @RETURN: bool - True if at least one environment exists.
def has_environments(self) -> bool:
return len(self.config.environments) > 0
# [/DEF:has_environments]
with belief_scope("has_environments"):
return len(self.config.environments) > 0
# [/DEF:has_environments:Function]
# [DEF:get_environment:Function]
# @PURPOSE: Returns a single environment by ID.
# @PRE: self.config is set and isinstance(env_id, str) and len(env_id) > 0.
# @POST: Returns Environment object if found, None otherwise.
# @PARAM: env_id (str) - The ID of the environment to retrieve.
# @RETURN: Optional[Environment] - The environment with the given ID, or None.
def get_environment(self, env_id: str) -> Optional[Environment]:
with belief_scope("get_environment"):
for env in self.config.environments:
if env.id == env_id:
return env
return None
# [/DEF:get_environment:Function]
# [DEF:add_environment:Function]
# @PURPOSE: Adds a new environment to the configuration.
# @PRE: isinstance(env, Environment)
# @POST: Environment added or updated in self.config.environments.
# @PARAM: env (Environment) - The environment to add.
def add_environment(self, env: Environment):
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
with belief_scope("add_environment"):
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
# 1. Runtime check of @PRE
assert isinstance(env, Environment), "env must be an instance of Environment"
@@ -173,16 +219,18 @@ class ConfigManager:
self.save()
logger.info(f"[add_environment][Exit] Environment added")
# [/DEF:add_environment]
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]
# @PURPOSE: Updates an existing environment.
# @PRE: isinstance(env_id, str) and len(env_id) > 0 and isinstance(updated_env, Environment)
# @POST: Returns True if environment was found and updated.
# @PARAM: env_id (str) - The ID of the environment to update.
# @PARAM: updated_env (Environment) - The updated environment data.
# @RETURN: bool - True if updated, False otherwise.
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
logger.info(f"[update_environment][Entry] Updating {env_id}")
with belief_scope("update_environment"):
logger.info(f"[update_environment][Entry] Updating {env_id}")
# 1. Runtime check of @PRE
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
@@ -202,14 +250,16 @@ class ConfigManager:
logger.warning(f"[update_environment][Coherence:Failed] Environment {env_id} not found")
return False
# [/DEF:update_environment]
# [/DEF:update_environment:Function]
# [DEF:delete_environment:Function]
# @PURPOSE: Deletes an environment by ID.
# @PRE: isinstance(env_id, str) and len(env_id) > 0
# @POST: Environment removed from self.config.environments if it existed.
# @PARAM: env_id (str) - The ID of the environment to delete.
def delete_environment(self, env_id: str):
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
with belief_scope("delete_environment"):
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
# 1. Runtime check of @PRE
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
@@ -223,8 +273,8 @@ class ConfigManager:
logger.info(f"[delete_environment][Action] Deleted {env_id}")
else:
logger.warning(f"[delete_environment][Coherence:Failed] Environment {env_id} not found")
# [/DEF:delete_environment]
# [/DEF:delete_environment:Function]
# [/DEF:ConfigManager]
# [/DEF:ConfigManager:Class]
# [/DEF:ConfigManagerModule]
# [/DEF:ConfigManagerModule:Module]

View File

@@ -8,6 +8,13 @@
from pydantic import BaseModel, Field
from typing import List, Optional
# [DEF:Schedule:DataClass]
# @PURPOSE: Represents a backup schedule configuration.
class Schedule(BaseModel):
enabled: bool = False
cron_expression: str = "0 0 * * *" # Default: daily at midnight
# [/DEF:Schedule:DataClass]
# [DEF:Environment:DataClass]
# @PURPOSE: Represents a Superset environment configuration.
class Environment(BaseModel):
@@ -16,21 +23,40 @@ class Environment(BaseModel):
url: str
username: str
password: str # Will be masked in UI
verify_ssl: bool = True
timeout: int = 30
is_default: bool = False
# [/DEF:Environment]
backup_schedule: Schedule = Field(default_factory=Schedule)
# [/DEF:Environment:DataClass]
# [DEF:LoggingConfig:DataClass]
# @PURPOSE: Defines the configuration for the application's logging system.
class LoggingConfig(BaseModel):
level: str = "INFO"
file_path: Optional[str] = "logs/app.log"
max_bytes: int = 10 * 1024 * 1024
backup_count: int = 5
enable_belief_state: bool = True
# [/DEF:LoggingConfig:DataClass]
# [DEF:GlobalSettings:DataClass]
# @PURPOSE: Represents global application settings.
class GlobalSettings(BaseModel):
backup_path: str
default_environment_id: Optional[str] = None
# [/DEF:GlobalSettings]
logging: LoggingConfig = Field(default_factory=LoggingConfig)
# Task retention settings
task_retention_days: int = 30
task_retention_limit: int = 100
pagination_limit: int = 10
# [/DEF:GlobalSettings:DataClass]
# [DEF:AppConfig:DataClass]
# @PURPOSE: The root configuration model containing all application settings.
class AppConfig(BaseModel):
environments: List[Environment] = []
settings: GlobalSettings
# [/DEF:AppConfig]
# [/DEF:AppConfig:DataClass]
# [/DEF:ConfigModels]
# [/DEF:ConfigModels:Module]

View File

@@ -0,0 +1,87 @@
# [DEF:backend.src.core.database:Module]
#
# @SEMANTICS: database, sqlite, sqlalchemy, session, persistence
# @PURPOSE: Configures the SQLite database connection and session management.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> sqlalchemy
# @RELATION: USES -> backend.src.models.mapping
#
# @INVARIANT: A single engine instance is used for the entire application.
# [SECTION: IMPORTS]
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from ..models.mapping import Base
# Import models to ensure they're registered with Base
from ..models.task import TaskRecord
from ..models.connection import ConnectionConfig
from ..models.git import GitServerConfig, GitRepository, DeploymentEnvironment
from .logger import belief_scope
import os
# [/SECTION]
# [DEF:DATABASE_URL:Constant]
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///./mappings.db")
# [/DEF:DATABASE_URL:Constant]
# [DEF:TASKS_DATABASE_URL:Constant]
TASKS_DATABASE_URL = os.getenv("TASKS_DATABASE_URL", "sqlite:///./tasks.db")
# [/DEF:TASKS_DATABASE_URL:Constant]
# [DEF:engine:Variable]
engine = create_engine(DATABASE_URL, connect_args={"check_same_thread": False})
# [/DEF:engine:Variable]
# [DEF:tasks_engine:Variable]
tasks_engine = create_engine(TASKS_DATABASE_URL, connect_args={"check_same_thread": False})
# [/DEF:tasks_engine:Variable]
# [DEF:SessionLocal:Class]
# @PURPOSE: A session factory for the main mappings database.
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# [/DEF:SessionLocal:Class]
# [DEF:TasksSessionLocal:Class]
# @PURPOSE: A session factory for the tasks execution database.
TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_engine)
# [/DEF:TasksSessionLocal:Class]
# [DEF:init_db:Function]
# @PURPOSE: Initializes the database by creating all tables.
# @PRE: engine and tasks_engine are initialized.
# @POST: Database tables created.
def init_db():
with belief_scope("init_db"):
Base.metadata.create_all(bind=engine)
Base.metadata.create_all(bind=tasks_engine)
# [/DEF:init_db:Function]
# [DEF:get_db:Function]
# @PURPOSE: Dependency for getting a database session.
# @PRE: SessionLocal is initialized.
# @POST: Session is closed after use.
# @RETURN: Generator[Session, None, None]
def get_db():
with belief_scope("get_db"):
db = SessionLocal()
try:
yield db
finally:
db.close()
# [/DEF:get_db:Function]
# [DEF:get_tasks_db:Function]
# @PURPOSE: Dependency for getting a tasks database session.
# @PRE: TasksSessionLocal is initialized.
# @POST: Session is closed after use.
# @RETURN: Generator[Session, None, None]
def get_tasks_db():
with belief_scope("get_tasks_db"):
db = TasksSessionLocal()
try:
yield db
finally:
db.close()
# [/DEF:get_tasks_db:Function]
# [/DEF:backend.src.core.database:Module]

View File

@@ -4,12 +4,38 @@
# @LAYER: Core
# @RELATION: Used by the main application and other modules to log events. The WebSocketLogHandler is used by the WebSocket endpoint in app.py.
import logging
import threading
from datetime import datetime
from typing import Dict, Any, List, Optional
from collections import deque
from contextlib import contextmanager
from logging.handlers import RotatingFileHandler
from pydantic import BaseModel, Field
# Thread-local storage for belief state
_belief_state = threading.local()
# Global flag for belief state logging
_enable_belief_state = True
# [DEF:BeliefFormatter:Class]
# @PURPOSE: Custom logging formatter that adds belief state prefixes to log messages.
class BeliefFormatter(logging.Formatter):
# [DEF:format:Function]
# @PURPOSE: Formats the log record, adding belief state context if available.
# @PRE: record is a logging.LogRecord.
# @POST: Returns formatted string.
# @PARAM: record (logging.LogRecord) - The log record to format.
# @RETURN: str - The formatted log message.
def format(self, record):
anchor_id = getattr(_belief_state, 'anchor_id', None)
if anchor_id:
record.msg = f"[{anchor_id}][Action] {record.msg}"
return super().format(record)
# [/DEF:format:Function]
# [/DEF:BeliefFormatter:Class]
# Re-using LogEntry from task_manager for consistency
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
@@ -20,7 +46,86 @@ class LogEntry(BaseModel):
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF]
# [/DEF:LogEntry:Class]
# [DEF:belief_scope:Function]
# @PURPOSE: Context manager for structured Belief State logging.
# @PARAM: anchor_id (str) - The identifier for the current semantic block.
# @PARAM: message (str) - Optional entry message.
# @PRE: anchor_id must be provided.
# @POST: Thread-local belief state is updated and entry/exit logs are generated.
@contextmanager
def belief_scope(anchor_id: str, message: str = ""):
# Log Entry if enabled
if _enable_belief_state:
entry_msg = f"[{anchor_id}][Entry]"
if message:
entry_msg += f" {message}"
logger.info(entry_msg)
# Set thread-local anchor_id
old_anchor = getattr(_belief_state, 'anchor_id', None)
_belief_state.anchor_id = anchor_id
try:
yield
# Log Coherence OK and Exit
logger.info(f"[{anchor_id}][Coherence:OK]")
if _enable_belief_state:
logger.info(f"[{anchor_id}][Exit]")
except Exception as e:
# Log Coherence Failed
logger.info(f"[{anchor_id}][Coherence:Failed] {str(e)}")
raise
finally:
# Restore old anchor
_belief_state.anchor_id = old_anchor
# [/DEF:belief_scope:Function]
# [DEF:configure_logger:Function]
# @PURPOSE: Configures the logger with the provided logging settings.
# @PRE: config is a valid LoggingConfig instance.
# @POST: Logger level, handlers, and belief state flag are updated.
# @PARAM: config (LoggingConfig) - The logging configuration.
def configure_logger(config):
global _enable_belief_state
_enable_belief_state = config.enable_belief_state
# Set logger level
level = getattr(logging, config.level.upper(), logging.INFO)
logger.setLevel(level)
# Remove existing file handlers
handlers_to_remove = [h for h in logger.handlers if isinstance(h, RotatingFileHandler)]
for h in handlers_to_remove:
logger.removeHandler(h)
h.close()
# Add file handler if file_path is set
if config.file_path:
import os
from pathlib import Path
log_file = Path(config.file_path)
log_file.parent.mkdir(parents=True, exist_ok=True)
file_handler = RotatingFileHandler(
config.file_path,
maxBytes=config.max_bytes,
backupCount=config.backup_count
)
file_handler.setFormatter(BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
))
logger.addHandler(file_handler)
# Update existing handlers' formatters to BeliefFormatter
for handler in logger.handlers:
if not isinstance(handler, RotatingFileHandler):
handler.setFormatter(BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
))
# [/DEF:configure_logger:Function]
# [DEF:WebSocketLogHandler:Class]
# @SEMANTICS: logging, handler, websocket, buffer
@@ -30,12 +135,23 @@ class WebSocketLogHandler(logging.Handler):
A logging handler that stores log records and can be extended to send them
over WebSockets.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initializes the handler with a fixed-capacity buffer.
# @PRE: capacity is an integer.
# @POST: Instance initialized with empty deque.
# @PARAM: capacity (int) - Maximum number of logs to keep in memory.
def __init__(self, capacity: int = 1000):
super().__init__()
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
# In a real implementation, you'd have a way to manage active WebSocket connections
# e.g., self.active_connections: Set[WebSocket] = set()
# [/DEF:__init__:Function]
# [DEF:emit:Function]
# @PURPOSE: Captures a log record, formats it, and stores it in the buffer.
# @PRE: record is a logging.LogRecord.
# @POST: Log is added to the log_buffer.
# @PARAM: record (logging.LogRecord) - The log record to emit.
def emit(self, record: logging.LogRecord):
try:
log_entry = LogEntry(
@@ -56,23 +172,42 @@ class WebSocketLogHandler(logging.Handler):
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
except Exception:
self.handleError(record)
# [/DEF:emit:Function]
# [DEF:get_recent_logs:Function]
# @PURPOSE: Returns a list of recent log entries from the buffer.
# @PRE: None.
# @POST: Returns list of LogEntry objects.
# @RETURN: List[LogEntry] - List of buffered log entries.
def get_recent_logs(self) -> List[LogEntry]:
"""
Returns a list of recent log entries from the buffer.
"""
return list(self.log_buffer)
# [/DEF:get_recent_logs:Function]
# [/DEF]
# [/DEF:WebSocketLogHandler:Class]
# [DEF:Logger:Global]
# @SEMANTICS: logger, global, instance
# @PURPOSE: The global logger instance for the application, configured with both a console handler and the custom WebSocket handler.
logger = logging.getLogger("superset_tools_app")
# [DEF:believed:Function]
# @PURPOSE: A decorator that wraps a function in a belief scope.
# @PARAM: anchor_id (str) - The identifier for the semantic block.
def believed(anchor_id: str):
def decorator(func):
def wrapper(*args, **kwargs):
with belief_scope(anchor_id):
return func(*args, **kwargs)
return wrapper
return decorator
# [/DEF:believed:Function]
logger.setLevel(logging.INFO)
# Create a formatter
formatter = logging.Formatter(
formatter = BeliefFormatter(
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
)
@@ -89,4 +224,5 @@ logger.addHandler(websocket_log_handler)
# Example usage:
# logger.info("Application started", extra={"context_key": "context_value"})
# logger.error("An error occurred", exc_info=True)
# [/DEF]
# [/DEF:Logger:Global]
# [/DEF:LoggerModule:Module]

View File

@@ -0,0 +1,104 @@
# [DEF:backend.src.core.migration_engine:Module]
#
# @SEMANTICS: migration, engine, zip, yaml, transformation
# @PURPOSE: Handles the interception and transformation of Superset asset ZIP archives.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> PyYAML
#
# @INVARIANT: ZIP structure must be preserved after transformation.
# [SECTION: IMPORTS]
import zipfile
import yaml
import os
import shutil
import tempfile
from pathlib import Path
from typing import Dict
from .logger import logger, belief_scope
import yaml
# [/SECTION]
# [DEF:MigrationEngine:Class]
# @PURPOSE: Engine for transforming Superset export ZIPs.
class MigrationEngine:
# [DEF:transform_zip:Function]
# @PURPOSE: Extracts ZIP, replaces database UUIDs in YAMLs, and re-packages.
# @PARAM: zip_path (str) - Path to the source ZIP file.
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
# @PRE: zip_path must point to a valid Superset export archive.
# @POST: Transformed archive is saved to output_path.
# @RETURN: bool - True if successful.
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
"""
Transform a Superset export ZIP by replacing database UUIDs.
"""
with belief_scope("MigrationEngine.transform_zip"):
with tempfile.TemporaryDirectory() as temp_dir_str:
temp_dir = Path(temp_dir_str)
try:
# 1. Extract
logger.info(f"[MigrationEngine.transform_zip][Action] Extracting ZIP: {zip_path}")
with zipfile.ZipFile(zip_path, 'r') as zf:
zf.extractall(temp_dir)
# 2. Transform YAMLs
# Datasets are usually in datasets/*.yaml
dataset_files = list(temp_dir.glob("**/datasets/**/*.yaml")) + list(temp_dir.glob("**/datasets/*.yaml"))
dataset_files = list(set(dataset_files))
logger.info(f"[MigrationEngine.transform_zip][State] Found {len(dataset_files)} dataset files.")
for ds_file in dataset_files:
logger.info(f"[MigrationEngine.transform_zip][Action] Transforming dataset: {ds_file}")
self._transform_yaml(ds_file, db_mapping)
# 3. Re-package
logger.info(f"[MigrationEngine.transform_zip][Action] Re-packaging ZIP to: {output_path} (strip_databases={strip_databases})")
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as zf:
for root, dirs, files in os.walk(temp_dir):
rel_root = Path(root).relative_to(temp_dir)
if strip_databases and "databases" in rel_root.parts:
logger.info(f"[MigrationEngine.transform_zip][Action] Skipping file in databases directory: {rel_root}")
continue
for file in files:
file_path = Path(root) / file
arcname = file_path.relative_to(temp_dir)
zf.write(file_path, arcname)
return True
except Exception as e:
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
return False
# [/DEF:transform_zip:Function]
# [DEF:_transform_yaml:Function]
# @PURPOSE: Replaces database_uuid in a single YAML file.
# @PARAM: file_path (Path) - Path to the YAML file.
# @PARAM: db_mapping (Dict[str, str]) - UUID mapping dictionary.
# @PRE: file_path must exist and be readable.
# @POST: File is modified in-place if source UUID matches mapping.
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
with open(file_path, 'r') as f:
data = yaml.safe_load(f)
if not data:
return
# Superset dataset YAML structure:
# database_uuid: ...
source_uuid = data.get('database_uuid')
if source_uuid in db_mapping:
data['database_uuid'] = db_mapping[source_uuid]
with open(file_path, 'w') as f:
yaml.dump(data, f)
# [/DEF:_transform_yaml:Function]
# [/DEF:MigrationEngine:Class]
# [/DEF:backend.src.core.migration_engine:Module]

View File

@@ -1,5 +1,6 @@
from abc import ABC, abstractmethod
from typing import Dict, Any
from .logger import belief_scope
from pydantic import BaseModel, Field
@@ -17,44 +18,87 @@ class PluginBase(ABC):
@property
@abstractmethod
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string ID.
# @RETURN: str - Plugin ID.
def id(self) -> str:
"""A unique identifier for the plugin."""
pass
with belief_scope("id"):
pass
# [/DEF:id:Function]
@property
@abstractmethod
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string name.
# @RETURN: str - Plugin name.
def name(self) -> str:
"""A human-readable name for the plugin."""
pass
with belief_scope("name"):
pass
# [/DEF:name:Function]
@property
@abstractmethod
# [DEF:description:Function]
# @PURPOSE: Returns a brief description of the plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string description.
# @RETURN: str - Plugin description.
def description(self) -> str:
"""A brief description of what the plugin does."""
pass
with belief_scope("description"):
pass
# [/DEF:description:Function]
@property
@abstractmethod
# [DEF:version:Function]
# @PURPOSE: Returns the version of the plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string version.
# @RETURN: str - Plugin version.
def version(self) -> str:
"""The version of the plugin."""
pass
with belief_scope("version"):
pass
# [/DEF:version:Function]
@abstractmethod
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for the plugin's input parameters.
# @PRE: Plugin instance exists.
# @POST: Returns dict schema.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
"""
Returns the JSON schema for the plugin's input parameters.
This schema will be used to generate the frontend form.
"""
pass
with belief_scope("get_schema"):
pass
# [/DEF:get_schema:Function]
@abstractmethod
# [DEF:execute:Function]
# @PURPOSE: Executes the plugin's core logic.
# @PARAM: params (Dict[str, Any]) - Validated input parameters.
# @PRE: params must be a dictionary.
# @POST: Plugin execution is completed.
async def execute(self, params: Dict[str, Any]):
with belief_scope("execute"):
pass
"""
Executes the plugin's logic.
The `params` argument will be validated against the schema returned by `get_schema()`.
"""
pass
# [/DEF]
# [/DEF:execute:Function]
# [/DEF:PluginBase:Class]
# [DEF:PluginConfig:Class]
# @SEMANTICS: plugin, config, schema, pydantic
@@ -68,4 +112,4 @@ class PluginConfig(BaseModel):
description: str = Field(..., description="Brief description of what the plugin does")
version: str = Field(..., description="Version of the plugin")
input_schema: Dict[str, Any] = Field(..., description="JSON schema for input parameters", alias="schema")
# [/DEF]
# [/DEF:PluginConfig:Class]

View File

@@ -4,6 +4,7 @@ import sys # Added this line
from typing import Dict, Type, List, Optional
from .plugin_base import PluginBase, PluginConfig
from jsonschema import validate
from .logger import belief_scope
# [DEF:PluginLoader:Class]
# @SEMANTICS: plugin, loader, dynamic, import
@@ -16,16 +17,28 @@ class PluginLoader:
that inherit from PluginBase.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
# @PRE: plugin_dir is a valid directory path.
# @POST: Plugins are loaded and registered.
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
def __init__(self, plugin_dir: str):
self.plugin_dir = plugin_dir
self._plugins: Dict[str, PluginBase] = {}
self._plugin_configs: Dict[str, PluginConfig] = {}
self._load_plugins()
with belief_scope("__init__"):
self.plugin_dir = plugin_dir
self._plugins: Dict[str, PluginBase] = {}
self._plugin_configs: Dict[str, PluginConfig] = {}
self._load_plugins()
# [/DEF:__init__:Function]
# [DEF:_load_plugins:Function]
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
# @PRE: plugin_dir exists or can be created.
# @POST: _load_module is called for each .py file.
def _load_plugins(self):
"""
Scans the plugin directory, imports modules, and registers valid plugins.
"""
with belief_scope("_load_plugins"):
"""
Scans the plugin directory, imports modules, and registers valid plugins.
"""
if not os.path.exists(self.plugin_dir):
os.makedirs(self.plugin_dir)
@@ -41,18 +54,31 @@ class PluginLoader:
module_name = filename[:-3]
file_path = os.path.join(self.plugin_dir, filename)
self._load_module(module_name, file_path)
# [/DEF:_load_plugins:Function]
# [DEF:_load_module:Function]
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
# @PRE: module_name and file_path are valid.
# @POST: Plugin classes are instantiated and registered.
# @PARAM: module_name (str) - The name of the module.
# @PARAM: file_path (str) - The path to the module file.
def _load_module(self, module_name: str, file_path: str):
"""
Loads a single Python module and extracts PluginBase subclasses.
"""
with belief_scope("_load_module"):
"""
Loads a single Python module and extracts PluginBase subclasses.
"""
# Try to determine the correct package prefix based on how the app is running
if "backend.src" in __name__:
# For standalone execution, we need to handle the import differently
if __name__ == "__main__" or "test" in __name__:
# When running as standalone or in tests, use relative import
package_name = f"plugins.{module_name}"
elif "backend.src" in __name__:
package_prefix = "backend.src.plugins"
package_name = f"{package_prefix}.{module_name}"
else:
package_prefix = "src.plugins"
package_name = f"{package_prefix}.{module_name}"
package_name = f"{package_prefix}.{module_name}"
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
spec = importlib.util.spec_from_file_location(package_name, file_path)
if spec is None or spec.loader is None:
@@ -78,11 +104,18 @@ class PluginLoader:
self._register_plugin(plugin_instance)
except Exception as e:
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
# [/DEF:_load_module:Function]
# [DEF:_register_plugin:Function]
# @PURPOSE: Registers a PluginBase instance and its configuration.
# @PRE: plugin_instance is a valid implementation of PluginBase.
# @POST: Plugin is added to _plugins and _plugin_configs.
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
def _register_plugin(self, plugin_instance: PluginBase):
"""
Registers a valid plugin instance.
"""
with belief_scope("_register_plugin"):
"""
Registers a valid plugin instance.
"""
plugin_id = plugin_instance.id
if plugin_id in self._plugins:
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
@@ -106,25 +139,53 @@ class PluginLoader:
# validate(instance={}, schema=schema)
self._plugins[plugin_id] = plugin_instance
self._plugin_configs[plugin_id] = plugin_config
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging
from ..core.logger import logger
logger.info(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.")
except Exception as e:
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging
from ..core.logger import logger
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
# [/DEF:_register_plugin:Function]
# [DEF:get_plugin:Function]
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
# @PRE: plugin_id is a string.
# @POST: Returns plugin instance or None.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
"""
Returns a loaded plugin instance by its ID.
"""
with belief_scope("get_plugin"):
"""
Returns a loaded plugin instance by its ID.
"""
return self._plugins.get(plugin_id)
# [/DEF:get_plugin:Function]
# [DEF:get_all_plugin_configs:Function]
# @PURPOSE: Returns a list of all registered plugin configurations.
# @PRE: None.
# @POST: Returns list of all PluginConfig objects.
# @RETURN: List[PluginConfig] - A list of plugin configurations.
def get_all_plugin_configs(self) -> List[PluginConfig]:
"""
Returns a list of all loaded plugin configurations.
"""
with belief_scope("get_all_plugin_configs"):
"""
Returns a list of all loaded plugin configurations.
"""
return list(self._plugin_configs.values())
# [/DEF:get_all_plugin_configs:Function]
# [DEF:has_plugin:Function]
# @PURPOSE: Checks if a plugin with the given ID is registered.
# @PRE: plugin_id is a string.
# @POST: Returns True if plugin exists.
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
# @RETURN: bool - True if the plugin is registered, False otherwise.
def has_plugin(self, plugin_id: str) -> bool:
"""
Checks if a plugin with the given ID is loaded.
"""
return plugin_id in self._plugins
with belief_scope("has_plugin"):
"""
Checks if a plugin with the given ID is loaded.
"""
return plugin_id in self._plugins
# [/DEF:has_plugin:Function]
# [/DEF:PluginLoader:Class]

View File

@@ -0,0 +1,119 @@
# [DEF:SchedulerModule:Module]
# @SEMANTICS: scheduler, apscheduler, cron, backup
# @PURPOSE: Manages scheduled tasks using APScheduler.
# @LAYER: Core
# @RELATION: Uses TaskManager to run scheduled backups.
# [SECTION: IMPORTS]
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from .logger import logger, belief_scope
from .config_manager import ConfigManager
from typing import Optional
import asyncio
# [/SECTION]
# [DEF:SchedulerService:Class]
# @SEMANTICS: scheduler, service, apscheduler
# @PURPOSE: Provides a service to manage scheduled backup tasks.
class SchedulerService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the scheduler service with task and config managers.
# @PRE: task_manager and config_manager must be provided.
# @POST: Scheduler instance is created but not started.
def __init__(self, task_manager, config_manager: ConfigManager):
with belief_scope("SchedulerService.__init__"):
self.task_manager = task_manager
self.config_manager = config_manager
self.scheduler = BackgroundScheduler()
self.loop = asyncio.get_event_loop()
# [/DEF:__init__:Function]
# [DEF:start:Function]
# @PURPOSE: Starts the background scheduler and loads initial schedules.
# @PRE: Scheduler should be initialized.
# @POST: Scheduler is running and schedules are loaded.
def start(self):
with belief_scope("SchedulerService.start"):
if not self.scheduler.running:
self.scheduler.start()
logger.info("Scheduler started.")
self.load_schedules()
# [/DEF:start:Function]
# [DEF:stop:Function]
# @PURPOSE: Stops the background scheduler.
# @PRE: Scheduler should be running.
# @POST: Scheduler is shut down.
def stop(self):
with belief_scope("SchedulerService.stop"):
if self.scheduler.running:
self.scheduler.shutdown()
logger.info("Scheduler stopped.")
# [/DEF:stop:Function]
# [DEF:load_schedules:Function]
# @PURPOSE: Loads backup schedules from configuration and registers them.
# @PRE: config_manager must have valid configuration.
# @POST: All enabled backup jobs are added to the scheduler.
def load_schedules(self):
with belief_scope("SchedulerService.load_schedules"):
# Clear existing jobs
self.scheduler.remove_all_jobs()
config = self.config_manager.get_config()
for env in config.environments:
if env.backup_schedule and env.backup_schedule.enabled:
self.add_backup_job(env.id, env.backup_schedule.cron_expression)
# [/DEF:load_schedules:Function]
# [DEF:add_backup_job:Function]
# @PURPOSE: Adds a scheduled backup job for an environment.
# @PRE: env_id and cron_expression must be valid strings.
# @POST: A new job is added to the scheduler or replaced if it already exists.
# @PARAM: env_id (str) - The ID of the environment.
# @PARAM: cron_expression (str) - The cron expression for the schedule.
def add_backup_job(self, env_id: str, cron_expression: str):
with belief_scope("SchedulerService.add_backup_job", f"env_id={env_id}, cron={cron_expression}"):
job_id = f"backup_{env_id}"
try:
self.scheduler.add_job(
self._trigger_backup,
CronTrigger.from_crontab(cron_expression),
id=job_id,
args=[env_id],
replace_existing=True
)
logger.info(f"Scheduled backup job added for environment {env_id}: {cron_expression}")
except Exception as e:
logger.error(f"Failed to add backup job for environment {env_id}: {e}")
# [/DEF:add_backup_job:Function]
# [DEF:_trigger_backup:Function]
# @PURPOSE: Triggered by the scheduler to start a backup task.
# @PRE: env_id must be a valid environment ID.
# @POST: A new backup task is created in the task manager if not already running.
# @PARAM: env_id (str) - The ID of the environment.
def _trigger_backup(self, env_id: str):
with belief_scope("SchedulerService._trigger_backup", f"env_id={env_id}"):
logger.info(f"Triggering scheduled backup for environment {env_id}")
# Check if a backup is already running for this environment
active_tasks = self.task_manager.get_tasks(limit=100)
for task in active_tasks:
if (task.plugin_id == "superset-backup" and
task.status in ["PENDING", "RUNNING"] and
task.params.get("environment_id") == env_id):
logger.warning(f"Backup already running for environment {env_id}. Skipping scheduled run.")
return
# Run the backup task
# We need to run this in the event loop since create_task is async
asyncio.run_coroutine_threadsafe(
self.task_manager.create_task("superset-backup", {"environment_id": env_id}),
self.loop
)
# [/DEF:_trigger_backup:Function]
# [/DEF:SchedulerService:Class]
# [/DEF:SchedulerModule:Module]

View File

@@ -0,0 +1,400 @@
# [DEF:backend.src.core.superset_client:Module]
#
# @SEMANTICS: superset, api, client, rest, http, dashboard, dataset, import, export
# @PURPOSE: Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
# @LAYER: Core
# @RELATION: USES -> backend.src.core.utils.network.APIClient
# @RELATION: USES -> backend.src.core.config_models.Environment
#
# @INVARIANT: All network operations must use the internal APIClient instance.
# @PUBLIC_API: SupersetClient
# [SECTION: IMPORTS]
import json
import zipfile
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union, cast
from requests import Response
from .logger import logger as app_logger, belief_scope
from .utils.network import APIClient, SupersetAPIError, AuthenticationError, DashboardNotFoundError, NetworkError
from .utils.fileio import get_filename_from_headers
from .config_models import Environment
# [/SECTION]
# [DEF:SupersetClient:Class]
# @PURPOSE: Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
class SupersetClient:
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
# @PRE: `env` должен быть валидным объектом Environment.
# @POST: Атрибуты `env` и `network` созданы и готовы к работе.
# @PARAM: env (Environment) - Конфигурация окружения.
def __init__(self, env: Environment):
with belief_scope("__init__"):
app_logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient for env %s.", env.name)
self.env = env
# Construct auth payload expected by Superset API
auth_payload = {
"username": env.username,
"password": env.password,
"provider": "db",
"refresh": "true"
}
self.network = APIClient(
config={
"base_url": env.url,
"auth": auth_payload
},
verify_ssl=env.verify_ssl,
timeout=env.timeout
)
self.delete_before_reimport: bool = False
app_logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.")
# [/DEF:__init__:Function]
# [DEF:authenticate:Function]
# @PURPOSE: Authenticates the client using the configured credentials.
# @PRE: self.network must be initialized with valid auth configuration.
# @POST: Client is authenticated and tokens are stored.
# @RETURN: Dict[str, str] - Authentication tokens.
def authenticate(self) -> Dict[str, str]:
with belief_scope("SupersetClient.authenticate"):
return self.network.authenticate()
# [/DEF:authenticate:Function]
@property
# [DEF:headers:Function]
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
def headers(self) -> dict:
with belief_scope("headers"):
return self.network.headers
# [/DEF:headers:Function]
# [SECTION: DASHBOARD OPERATIONS]
# [DEF:get_dashboards:Function]
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса для API.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список дашбордов).
def get_dashboards(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
with belief_scope("get_dashboards"):
app_logger.info("[get_dashboards][Enter] Fetching dashboards.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
paginated_data = self._fetch_all_pages(
endpoint="/dashboard/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
app_logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
return total_count, paginated_data
# [/DEF:get_dashboards:Function]
# [DEF:get_dashboards_summary:Function]
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
# @RETURN: List[Dict]
def get_dashboards_summary(self) -> List[Dict]:
with belief_scope("SupersetClient.get_dashboards_summary"):
query = {
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
}
_, dashboards = self.get_dashboards(query=query)
# Map fields to DashboardMetadata schema
result = []
for dash in dashboards:
result.append({
"id": dash.get("id"),
"title": dash.get("dashboard_title"),
"last_modified": dash.get("changed_on_utc"),
"status": "published" if dash.get("published") else "draft"
})
return result
# [/DEF:get_dashboards_summary:Function]
# [DEF:export_dashboard:Function]
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
# @PARAM: dashboard_id (int) - ID дашборда для экспорта.
# @RETURN: Tuple[bytes, str] - Бинарное содержимое ZIP-архива и имя файла.
def export_dashboard(self, dashboard_id: int) -> Tuple[bytes, str]:
with belief_scope("export_dashboard"):
app_logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id)
response = self.network.request(
method="GET",
endpoint="/dashboard/export/",
params={"q": json.dumps([dashboard_id])},
stream=True,
raw_response=True,
)
response = cast(Response, response)
self._validate_export_response(response, dashboard_id)
filename = self._resolve_export_filename(response, dashboard_id)
app_logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
return response.content, filename
# [/DEF:export_dashboard:Function]
# [DEF:import_dashboard:Function]
# @PURPOSE: Импортирует дашборд из ZIP-файла.
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-архиву.
# @PARAM: dash_id (Optional[int]) - ID дашборда для удаления при сбое.
# @PARAM: dash_slug (Optional[str]) - Slug дашборда для поиска ID.
# @RETURN: Dict - Ответ API в случае успеха.
def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict:
with belief_scope("import_dashboard"):
file_path = str(file_name)
self._validate_import_file(file_path)
try:
return self._do_import(file_path)
except Exception as exc:
app_logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True)
if not self.delete_before_reimport:
raise
target_id = self._resolve_target_id_for_delete(dash_id, dash_slug)
if target_id is None:
app_logger.error("[import_dashboard][Failure] No ID available for delete-retry.")
raise
self.delete_dashboard(target_id)
app_logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
return self._do_import(file_path)
# [/DEF:import_dashboard:Function]
# [DEF:delete_dashboard:Function]
# @PURPOSE: Удаляет дашборд по его ID или slug.
# @PARAM: dashboard_id (Union[int, str]) - ID или slug дашборда.
def delete_dashboard(self, dashboard_id: Union[int, str]) -> None:
with belief_scope("delete_dashboard"):
app_logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id)
response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}")
response = cast(Dict, response)
if response.get("result", True) is not False:
app_logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
else:
app_logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
# [/DEF:delete_dashboard:Function]
# [/SECTION]
# [SECTION: DATASET OPERATIONS]
# [DEF:get_datasets:Function]
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список датасетов).
def get_datasets(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
with belief_scope("get_datasets"):
app_logger.info("[get_datasets][Enter] Fetching datasets.")
validated_query = self._validate_query_params(query)
total_count = self._fetch_total_object_count(endpoint="/dataset/")
paginated_data = self._fetch_all_pages(
endpoint="/dataset/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
app_logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
return total_count, paginated_data
# [/DEF:get_datasets:Function]
# [DEF:get_dataset:Function]
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
# @PARAM: dataset_id (int) - ID датасета.
# @RETURN: Dict - Информация о датасете.
def get_dataset(self, dataset_id: int) -> Dict:
with belief_scope("SupersetClient.get_dataset", f"id={dataset_id}"):
app_logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id)
response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
response = cast(Dict, response)
app_logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
return response
# [/DEF:get_dataset:Function]
# [DEF:update_dataset:Function]
# @PURPOSE: Обновляет данные датасета по его ID.
# @PARAM: dataset_id (int) - ID датасета.
# @PARAM: data (Dict) - Данные для обновления.
# @RETURN: Dict - Ответ API.
def update_dataset(self, dataset_id: int, data: Dict) -> Dict:
with belief_scope("SupersetClient.update_dataset", f"id={dataset_id}"):
app_logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id)
response = self.network.request(
method="PUT",
endpoint=f"/dataset/{dataset_id}",
data=json.dumps(data),
headers={'Content-Type': 'application/json'}
)
response = cast(Dict, response)
app_logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
return response
# [/DEF:update_dataset:Function]
# [/SECTION]
# [SECTION: DATABASE OPERATIONS]
# [DEF:get_databases:Function]
# @PURPOSE: Получает полный список баз данных.
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список баз данных).
def get_databases(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
with belief_scope("get_databases"):
app_logger.info("[get_databases][Enter] Fetching databases.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
validated_query['columns'] = []
total_count = self._fetch_total_object_count(endpoint="/database/")
paginated_data = self._fetch_all_pages(
endpoint="/database/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
app_logger.info("[get_databases][Exit] Found %d databases.", total_count)
return total_count, paginated_data
# [/DEF:get_databases:Function]
# [DEF:get_database:Function]
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
# @PARAM: database_id (int) - ID базы данных.
# @RETURN: Dict - Информация о базе данных.
def get_database(self, database_id: int) -> Dict:
with belief_scope("get_database"):
app_logger.info("[get_database][Enter] Fetching database %s.", database_id)
response = self.network.request(method="GET", endpoint=f"/database/{database_id}")
response = cast(Dict, response)
app_logger.info("[get_database][Exit] Got database %s.", database_id)
return response
# [/DEF:get_database:Function]
# [DEF:get_databases_summary:Function]
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
# @RETURN: List[Dict] - Summary of databases.
def get_databases_summary(self) -> List[Dict]:
with belief_scope("SupersetClient.get_databases_summary"):
query = {
"columns": ["uuid", "database_name", "backend"]
}
_, databases = self.get_databases(query=query)
# Map 'backend' to 'engine' for consistency with contracts
for db in databases:
db['engine'] = db.pop('backend', None)
return databases
# [/DEF:get_databases_summary:Function]
# [DEF:get_database_by_uuid:Function]
# @PURPOSE: Find a database by its UUID.
# @PARAM: db_uuid (str) - The UUID of the database.
# @RETURN: Optional[Dict] - Database info if found, else None.
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"):
query = {
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
}
_, databases = self.get_databases(query=query)
return databases[0] if databases else None
# [/DEF:get_database_by_uuid:Function]
# [/SECTION]
# [SECTION: HELPERS]
# [DEF:_resolve_target_id_for_delete:Function]
def _resolve_target_id_for_delete(self, dash_id: Optional[int], dash_slug: Optional[str]) -> Optional[int]:
with belief_scope("_resolve_target_id_for_delete"):
if dash_id is not None:
return dash_id
if dash_slug is not None:
app_logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
try:
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
if candidates:
target_id = candidates[0]["id"]
app_logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id)
return target_id
except Exception as e:
app_logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
return None
# [/DEF:_resolve_target_id_for_delete:Function]
# [DEF:_do_import:Function]
def _do_import(self, file_name: Union[str, Path]) -> Dict:
with belief_scope("_do_import"):
app_logger.debug(f"[_do_import][State] Uploading file: {file_name}")
file_path = Path(file_name)
if not file_path.exists():
app_logger.error(f"[_do_import][Failure] File does not exist: {file_name}")
raise FileNotFoundError(f"File does not exist: {file_name}")
return self.network.upload_file(
endpoint="/dashboard/import/",
file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"},
extra_data={"overwrite": "true"},
timeout=self.env.timeout * 2,
)
# [/DEF:_do_import:Function]
# [DEF:_validate_export_response:Function]
def _validate_export_response(self, response: Response, dashboard_id: int) -> None:
with belief_scope("_validate_export_response"):
content_type = response.headers.get("Content-Type", "")
if "application/zip" not in content_type:
raise SupersetAPIError(f"Получен не ZIP-архив (Content-Type: {content_type})")
if not response.content:
raise SupersetAPIError("Получены пустые данные при экспорте")
# [/DEF:_validate_export_response:Function]
# [DEF:_resolve_export_filename:Function]
def _resolve_export_filename(self, response: Response, dashboard_id: int) -> str:
with belief_scope("_resolve_export_filename"):
filename = get_filename_from_headers(dict(response.headers))
if not filename:
from datetime import datetime
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
app_logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
return filename
# [/DEF:_resolve_export_filename:Function]
# [DEF:_validate_query_params:Function]
def _validate_query_params(self, query: Optional[Dict]) -> Dict:
with belief_scope("_validate_query_params"):
base_query = {"page": 0, "page_size": 1000}
return {**base_query, **(query or {})}
# [/DEF:_validate_query_params:Function]
# [DEF:_fetch_total_object_count:Function]
def _fetch_total_object_count(self, endpoint: str) -> int:
with belief_scope("_fetch_total_object_count"):
return self.network.fetch_paginated_count(
endpoint=endpoint,
query_params={"page": 0, "page_size": 1},
count_field="count",
)
# [/DEF:_fetch_total_object_count:Function]
# [DEF:_fetch_all_pages:Function]
def _fetch_all_pages(self, endpoint: str, pagination_options: Dict) -> List[Dict]:
with belief_scope("_fetch_all_pages"):
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
# [/DEF:_fetch_all_pages:Function]
# [DEF:_validate_import_file:Function]
def _validate_import_file(self, zip_path: Union[str, Path]) -> None:
with belief_scope("_validate_import_file"):
path = Path(zip_path)
if not path.exists():
raise FileNotFoundError(f"Файл {zip_path} не существует")
if not zipfile.is_zipfile(path):
raise SupersetAPIError(f"Файл {zip_path} не является ZIP-архивом")
with zipfile.ZipFile(path, "r") as zf:
if not any(n.endswith("metadata.yaml") for n in zf.namelist()):
raise SupersetAPIError(f"Архив {zip_path} не содержит 'metadata.yaml'")
# [/DEF:_validate_import_file:Function]
# [/SECTION]
# [/DEF:SupersetClient:Class]
# [/DEF:backend.src.core.superset_client:Module]

View File

@@ -1,167 +0,0 @@
# [DEF:TaskManagerModule:Module]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
# @LAYER: Core
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
import asyncio
import uuid
from datetime import datetime
from enum import Enum
from typing import Dict, Any, List, Optional
from concurrent.futures import ThreadPoolExecutor
from pydantic import BaseModel, Field
# Assuming PluginBase and PluginConfig are defined in plugin_base.py
# from .plugin_base import PluginBase, PluginConfig # Not needed here, TaskManager interacts with the PluginLoader
# [DEF:TaskStatus:Enum]
# @SEMANTICS: task, status, state, enum
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
class TaskStatus(str, Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
# [/DEF]
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
class LogEntry(BaseModel):
timestamp: datetime = Field(default_factory=datetime.utcnow)
level: str
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF]
# [DEF:Task:Class]
# @SEMANTICS: task, job, execution, state, pydantic
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
class Task(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
plugin_id: str
status: TaskStatus = TaskStatus.PENDING
started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None
user_id: Optional[str] = None
logs: List[LogEntry] = Field(default_factory=list)
params: Dict[str, Any] = Field(default_factory=dict)
# [/DEF]
# [DEF:TaskManager:Class]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
class TaskManager:
"""
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
"""
def __init__(self, plugin_loader):
self.plugin_loader = plugin_loader
self.tasks: Dict[str, Task] = {}
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
self.loop = asyncio.get_event_loop()
# [/DEF]
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
"""
Creates and queues a new task for execution.
"""
if not self.plugin_loader.has_plugin(plugin_id):
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id)
# Validate params against plugin schema (this will be done at a higher level, e.g., API route)
# For now, a basic check
if not isinstance(params, dict):
raise ValueError("Task parameters must be a dictionary.")
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
self.tasks[task.id] = task
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task
async def _run_task(self, task_id: str):
"""
Internal method to execute a task.
"""
task = self.tasks[task_id]
plugin = self.plugin_loader.get_plugin(task.plugin_id)
task.status = TaskStatus.RUNNING
task.started_at = datetime.utcnow()
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
try:
# Execute plugin in a separate thread to avoid blocking the event loop
# if the plugin's execute method is synchronous and potentially CPU-bound.
# If the plugin's execute method is already async, this can be simplified.
await self.loop.run_in_executor(
self.executor,
lambda: asyncio.run(plugin.execute(task.params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(task.params)
)
task.status = TaskStatus.SUCCESS
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
except Exception as e:
task.status = TaskStatus.FAILED
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
finally:
task.finished_at = datetime.utcnow()
# In a real system, you might notify clients via WebSocket here
def get_task(self, task_id: str) -> Optional[Task]:
"""
Retrieves a task by its ID.
"""
return self.tasks.get(task_id)
def get_all_tasks(self) -> List[Task]:
"""
Retrieves all registered tasks.
"""
return list(self.tasks.values())
def get_task_logs(self, task_id: str) -> List[LogEntry]:
"""
Retrieves logs for a specific task.
"""
task = self.tasks.get(task_id)
return task.logs if task else []
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
"""
Adds a log entry to a task and notifies subscribers.
"""
task = self.tasks.get(task_id)
if not task:
return
log_entry = LogEntry(level=level, message=message, context=context)
task.logs.append(log_entry)
# Notify subscribers
if task_id in self.subscribers:
for queue in self.subscribers[task_id]:
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
"""
Subscribes to real-time logs for a task.
"""
queue = asyncio.Queue()
if task_id not in self.subscribers:
self.subscribers[task_id] = []
self.subscribers[task_id].append(queue)
return queue
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
"""
Unsubscribes from real-time logs for a task.
"""
if task_id in self.subscribers:
self.subscribers[task_id].remove(queue)
if not self.subscribers[task_id]:
del self.subscribers[task_id]

View File

@@ -0,0 +1,12 @@
# [DEF:TaskManagerPackage:Module]
# @SEMANTICS: task, manager, package, exports
# @PURPOSE: Exports the public API of the task manager package.
# @LAYER: Core
# @RELATION: Aggregates models and manager.
from .models import Task, TaskStatus, LogEntry
from .manager import TaskManager
__all__ = ["TaskManager", "Task", "TaskStatus", "LogEntry"]
# [/DEF:TaskManagerPackage:Module]

View File

@@ -0,0 +1,47 @@
# [DEF:TaskCleanupModule:Module]
# @SEMANTICS: task, cleanup, retention
# @PURPOSE: Implements task cleanup and retention policies.
# @LAYER: Core
# @RELATION: Uses TaskPersistenceService to delete old tasks.
from datetime import datetime, timedelta
from .persistence import TaskPersistenceService
from ..logger import logger, belief_scope
from ..config_manager import ConfigManager
# [DEF:TaskCleanupService:Class]
# @PURPOSE: Provides methods to clean up old task records.
class TaskCleanupService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the cleanup service with dependencies.
# @PRE: persistence_service and config_manager are valid.
# @POST: Cleanup service is ready.
def __init__(self, persistence_service: TaskPersistenceService, config_manager: ConfigManager):
self.persistence_service = persistence_service
self.config_manager = config_manager
# [/DEF:__init__:Function]
# [DEF:run_cleanup:Function]
# @PURPOSE: Deletes tasks older than the configured retention period.
# @PRE: Config manager has valid settings.
# @POST: Old tasks are deleted from persistence.
def run_cleanup(self):
with belief_scope("TaskCleanupService.run_cleanup"):
settings = self.config_manager.get_config().settings
retention_days = settings.task_retention_days
# This is a simplified implementation.
# In a real scenario, we would query IDs of tasks older than retention_days.
# For now, we'll log the action.
logger.info(f"Cleaning up tasks older than {retention_days} days.")
# Re-loading tasks to check for limit
tasks = self.persistence_service.load_tasks(limit=1000)
if len(tasks) > settings.task_retention_limit:
to_delete = [t.id for t in tasks[settings.task_retention_limit:]]
self.persistence_service.delete_tasks(to_delete)
logger.info(f"Deleted {len(to_delete)} tasks exceeding limit of {settings.task_retention_limit}")
# [/DEF:run_cleanup:Function]
# [/DEF:TaskCleanupService:Class]
# [/DEF:TaskCleanupModule:Module]

View File

@@ -0,0 +1,398 @@
# [DEF:TaskManagerModule:Module]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
# @LAYER: Core
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
# @INVARIANT: Task IDs are unique.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
import asyncio
from datetime import datetime
from typing import Dict, Any, List, Optional
from concurrent.futures import ThreadPoolExecutor
from .models import Task, TaskStatus, LogEntry
from .persistence import TaskPersistenceService
from ..logger import logger, belief_scope
# [/SECTION]
# [DEF:TaskManager:Class]
# @SEMANTICS: task, manager, lifecycle, execution, state
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
class TaskManager:
"""
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initialize the TaskManager with dependencies.
# @PRE: plugin_loader is initialized.
# @POST: TaskManager is ready to accept tasks.
# @PARAM: plugin_loader - The plugin loader instance.
def __init__(self, plugin_loader):
with belief_scope("TaskManager.__init__"):
self.plugin_loader = plugin_loader
self.tasks: Dict[str, Task] = {}
self.subscribers: Dict[str, List[asyncio.Queue]] = {}
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
self.persistence_service = TaskPersistenceService()
try:
self.loop = asyncio.get_running_loop()
except RuntimeError:
self.loop = asyncio.get_event_loop()
self.task_futures: Dict[str, asyncio.Future] = {}
# Load persisted tasks on startup
self.load_persisted_tasks()
# [/DEF:__init__:Function]
# [DEF:create_task:Function]
# @PURPOSE: Creates and queues a new task for execution.
# @PRE: Plugin with plugin_id exists. Params are valid.
# @POST: Task is created, added to registry, and scheduled for execution.
# @PARAM: plugin_id (str) - The ID of the plugin to run.
# @PARAM: params (Dict[str, Any]) - Parameters for the plugin.
# @PARAM: user_id (Optional[str]) - ID of the user requesting the task.
# @RETURN: Task - The created task instance.
# @THROWS: ValueError if plugin not found or params invalid.
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
with belief_scope("TaskManager.create_task", f"plugin_id={plugin_id}"):
if not self.plugin_loader.has_plugin(plugin_id):
logger.error(f"Plugin with ID '{plugin_id}' not found.")
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id)
if not isinstance(params, dict):
logger.error("Task parameters must be a dictionary.")
raise ValueError("Task parameters must be a dictionary.")
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
self.tasks[task.id] = task
self.persistence_service.persist_task(task)
logger.info(f"Task {task.id} created and scheduled for execution")
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
return task
# [/DEF:create_task:Function]
# [DEF:_run_task:Function]
# @PURPOSE: Internal method to execute a task.
# @PRE: Task exists in registry.
# @POST: Task is executed, status updated to SUCCESS or FAILED.
# @PARAM: task_id (str) - The ID of the task to run.
async def _run_task(self, task_id: str):
with belief_scope("TaskManager._run_task", f"task_id={task_id}"):
task = self.tasks[task_id]
plugin = self.plugin_loader.get_plugin(task.plugin_id)
logger.info(f"Starting execution of task {task_id} for plugin '{plugin.name}'")
task.status = TaskStatus.RUNNING
task.started_at = datetime.utcnow()
self.persistence_service.persist_task(task)
self._add_log(task_id, "INFO", f"Task started for plugin '{plugin.name}'")
try:
# Execute plugin
params = {**task.params, "_task_id": task_id}
if asyncio.iscoroutinefunction(plugin.execute):
task.result = await plugin.execute(params)
else:
task.result = await self.loop.run_in_executor(
self.executor,
plugin.execute,
params
)
logger.info(f"Task {task_id} completed successfully")
task.status = TaskStatus.SUCCESS
self._add_log(task_id, "INFO", f"Task completed successfully for plugin '{plugin.name}'")
except Exception as e:
logger.error(f"Task {task_id} failed: {e}")
task.status = TaskStatus.FAILED
self._add_log(task_id, "ERROR", f"Task failed: {e}", {"error_type": type(e).__name__})
finally:
task.finished_at = datetime.utcnow()
self.persistence_service.persist_task(task)
logger.info(f"Task {task_id} execution finished with status: {task.status}")
# [/DEF:_run_task:Function]
# [DEF:resolve_task:Function]
# @PURPOSE: Resumes a task that is awaiting mapping.
# @PRE: Task exists and is in AWAITING_MAPPING state.
# @POST: Task status updated to RUNNING, params updated, execution resumed.
# @PARAM: task_id (str) - The ID of the task.
# @PARAM: resolution_params (Dict[str, Any]) - Params to resolve the wait.
# @THROWS: ValueError if task not found or not awaiting mapping.
async def resolve_task(self, task_id: str, resolution_params: Dict[str, Any]):
with belief_scope("TaskManager.resolve_task", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task or task.status != TaskStatus.AWAITING_MAPPING:
raise ValueError("Task is not awaiting mapping.")
# Update task params with resolution
task.params.update(resolution_params)
task.status = TaskStatus.RUNNING
self.persistence_service.persist_task(task)
self._add_log(task_id, "INFO", "Task resumed after mapping resolution.")
# Signal the future to continue
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# [/DEF:resolve_task:Function]
# [DEF:wait_for_resolution:Function]
# @PURPOSE: Pauses execution and waits for a resolution signal.
# @PRE: Task exists.
# @POST: Execution pauses until future is set.
# @PARAM: task_id (str) - The ID of the task.
async def wait_for_resolution(self, task_id: str):
with belief_scope("TaskManager.wait_for_resolution", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
task.status = TaskStatus.AWAITING_MAPPING
self.persistence_service.persist_task(task)
self.task_futures[task_id] = self.loop.create_future()
try:
await self.task_futures[task_id]
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:wait_for_resolution:Function]
# [DEF:wait_for_input:Function]
# @PURPOSE: Pauses execution and waits for user input.
# @PRE: Task exists.
# @POST: Execution pauses until future is set via resume_task_with_password.
# @PARAM: task_id (str) - The ID of the task.
async def wait_for_input(self, task_id: str):
with belief_scope("TaskManager.wait_for_input", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
# Status is already set to AWAITING_INPUT by await_input()
self.task_futures[task_id] = self.loop.create_future()
try:
await self.task_futures[task_id]
finally:
if task_id in self.task_futures:
del self.task_futures[task_id]
# [/DEF:wait_for_input:Function]
# [DEF:get_task:Function]
# @PURPOSE: Retrieves a task by its ID.
# @PRE: task_id is a string.
# @POST: Returns Task object or None.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: Optional[Task] - The task or None.
def get_task(self, task_id: str) -> Optional[Task]:
with belief_scope("TaskManager.get_task", f"task_id={task_id}"):
return self.tasks.get(task_id)
# [/DEF:get_task:Function]
# [DEF:get_all_tasks:Function]
# @PURPOSE: Retrieves all registered tasks.
# @PRE: None.
# @POST: Returns list of all Task objects.
# @RETURN: List[Task] - All tasks.
def get_all_tasks(self) -> List[Task]:
with belief_scope("TaskManager.get_all_tasks"):
return list(self.tasks.values())
# [/DEF:get_all_tasks:Function]
# [DEF:get_tasks:Function]
# @PURPOSE: Retrieves tasks with pagination and optional status filter.
# @PRE: limit and offset are non-negative integers.
# @POST: Returns a list of tasks sorted by start_time descending.
# @PARAM: limit (int) - Maximum number of tasks to return.
# @PARAM: offset (int) - Number of tasks to skip.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @RETURN: List[Task] - List of tasks matching criteria.
def get_tasks(self, limit: int = 10, offset: int = 0, status: Optional[TaskStatus] = None) -> List[Task]:
with belief_scope("TaskManager.get_tasks"):
tasks = list(self.tasks.values())
if status:
tasks = [t for t in tasks if t.status == status]
# Sort by start_time descending (most recent first)
tasks.sort(key=lambda t: t.started_at or datetime.min, reverse=True)
return tasks[offset:offset + limit]
# [/DEF:get_tasks:Function]
# [DEF:get_task_logs:Function]
# @PURPOSE: Retrieves logs for a specific task.
# @PRE: task_id is a string.
# @POST: Returns list of LogEntry objects.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: List[LogEntry] - List of log entries.
def get_task_logs(self, task_id: str) -> List[LogEntry]:
with belief_scope("TaskManager.get_task_logs", f"task_id={task_id}"):
task = self.tasks.get(task_id)
return task.logs if task else []
# [/DEF:get_task_logs:Function]
# [DEF:_add_log:Function]
# @PURPOSE: Adds a log entry to a task and notifies subscribers.
# @PRE: Task exists.
# @POST: Log added to task and pushed to queues.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: level (str) - Log level.
# @PARAM: message (str) - Log message.
# @PARAM: context (Optional[Dict]) - Log context.
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
with belief_scope("TaskManager._add_log", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task:
return
log_entry = LogEntry(level=level, message=message, context=context)
task.logs.append(log_entry)
self.persistence_service.persist_task(task)
# Notify subscribers
if task_id in self.subscribers:
for queue in self.subscribers[task_id]:
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
# [/DEF:_add_log:Function]
# [DEF:subscribe_logs:Function]
# @PURPOSE: Subscribes to real-time logs for a task.
# @PRE: task_id is a string.
# @POST: Returns an asyncio.Queue for log entries.
# @PARAM: task_id (str) - ID of the task.
# @RETURN: asyncio.Queue - Queue for log entries.
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
with belief_scope("TaskManager.subscribe_logs", f"task_id={task_id}"):
queue = asyncio.Queue()
if task_id not in self.subscribers:
self.subscribers[task_id] = []
self.subscribers[task_id].append(queue)
return queue
# [/DEF:subscribe_logs:Function]
# [DEF:unsubscribe_logs:Function]
# @PURPOSE: Unsubscribes from real-time logs for a task.
# @PRE: task_id is a string, queue is asyncio.Queue.
# @POST: Queue removed from subscribers.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: queue (asyncio.Queue) - Queue to remove.
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
with belief_scope("TaskManager.unsubscribe_logs", f"task_id={task_id}"):
if task_id in self.subscribers:
if queue in self.subscribers[task_id]:
self.subscribers[task_id].remove(queue)
if not self.subscribers[task_id]:
del self.subscribers[task_id]
# [/DEF:unsubscribe_logs:Function]
# [DEF:load_persisted_tasks:Function]
# @PURPOSE: Load persisted tasks using persistence service.
# @PRE: None.
# @POST: Persisted tasks loaded into self.tasks.
def load_persisted_tasks(self) -> None:
with belief_scope("TaskManager.load_persisted_tasks"):
loaded_tasks = self.persistence_service.load_tasks(limit=100)
for task in loaded_tasks:
if task.id not in self.tasks:
self.tasks[task.id] = task
# [/DEF:load_persisted_tasks:Function]
# [DEF:await_input:Function]
# @PURPOSE: Transition a task to AWAITING_INPUT state with input request.
# @PRE: Task exists and is in RUNNING state.
# @POST: Task status changed to AWAITING_INPUT, input_request set, persisted.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: input_request (Dict) - Details about required input.
# @THROWS: ValueError if task not found or not RUNNING.
def await_input(self, task_id: str, input_request: Dict[str, Any]) -> None:
with belief_scope("TaskManager.await_input", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task:
raise ValueError(f"Task {task_id} not found")
if task.status != TaskStatus.RUNNING:
raise ValueError(f"Task {task_id} is not RUNNING (current: {task.status})")
task.status = TaskStatus.AWAITING_INPUT
task.input_required = True
task.input_request = input_request
self.persistence_service.persist_task(task)
self._add_log(task_id, "INFO", "Task paused for user input", {"input_request": input_request})
# [/DEF:await_input:Function]
# [DEF:resume_task_with_password:Function]
# @PURPOSE: Resume a task that is awaiting input with provided passwords.
# @PRE: Task exists and is in AWAITING_INPUT state.
# @POST: Task status changed to RUNNING, passwords injected, task resumed.
# @PARAM: task_id (str) - ID of the task.
# @PARAM: passwords (Dict[str, str]) - Mapping of database name to password.
# @THROWS: ValueError if task not found, not awaiting input, or passwords invalid.
def resume_task_with_password(self, task_id: str, passwords: Dict[str, str]) -> None:
with belief_scope("TaskManager.resume_task_with_password", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task:
raise ValueError(f"Task {task_id} not found")
if task.status != TaskStatus.AWAITING_INPUT:
raise ValueError(f"Task {task_id} is not AWAITING_INPUT (current: {task.status})")
if not isinstance(passwords, dict) or not passwords:
raise ValueError("Passwords must be a non-empty dictionary")
task.params["passwords"] = passwords
task.input_required = False
task.input_request = None
task.status = TaskStatus.RUNNING
self.persistence_service.persist_task(task)
self._add_log(task_id, "INFO", "Task resumed with passwords", {"databases": list(passwords.keys())})
if task_id in self.task_futures:
self.task_futures[task_id].set_result(True)
# [/DEF:resume_task_with_password:Function]
# [DEF:clear_tasks:Function]
# @PURPOSE: Clears tasks based on status filter.
# @PRE: status is Optional[TaskStatus].
# @POST: Tasks matching filter (or all non-active) cleared from registry and database.
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
# @RETURN: int - Number of tasks cleared.
def clear_tasks(self, status: Optional[TaskStatus] = None) -> int:
with belief_scope("TaskManager.clear_tasks"):
tasks_to_remove = []
for task_id, task in list(self.tasks.items()):
# If status is provided, match it.
# If status is None, match everything EXCEPT RUNNING (unless they are awaiting input/mapping which are technically running but paused?)
# Actually, AWAITING_INPUT and AWAITING_MAPPING are distinct statuses in TaskStatus enum.
# RUNNING is active execution.
should_remove = False
if status:
if task.status == status:
should_remove = True
else:
# Clear all non-active tasks (keep RUNNING, AWAITING_INPUT, AWAITING_MAPPING)
if task.status not in [TaskStatus.RUNNING, TaskStatus.AWAITING_INPUT, TaskStatus.AWAITING_MAPPING]:
should_remove = True
if should_remove:
tasks_to_remove.append(task_id)
for tid in tasks_to_remove:
# Cancel future if exists (e.g. for AWAITING_INPUT/MAPPING)
if tid in self.task_futures:
self.task_futures[tid].cancel()
del self.task_futures[tid]
del self.tasks[tid]
# Remove from persistence
self.persistence_service.delete_tasks(tasks_to_remove)
logger.info(f"Cleared {len(tasks_to_remove)} tasks.")
return len(tasks_to_remove)
# [/DEF:clear_tasks:Function]
# [/DEF:TaskManager:Class]
# [/DEF:TaskManagerModule:Module]

View File

@@ -0,0 +1,68 @@
# [DEF:TaskManagerModels:Module]
# @SEMANTICS: task, models, pydantic, enum, state
# @PURPOSE: Defines the data models and enumerations used by the Task Manager.
# @LAYER: Core
# @RELATION: Used by TaskManager and API routes.
# @INVARIANT: Task IDs are immutable once created.
# @CONSTRAINT: Must use Pydantic for data validation.
# [SECTION: IMPORTS]
import uuid
from datetime import datetime
from enum import Enum
from typing import Dict, Any, List, Optional
from pydantic import BaseModel, Field
# [/SECTION]
# [DEF:TaskStatus:Enum]
# @SEMANTICS: task, status, state, enum
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
class TaskStatus(str, Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
AWAITING_MAPPING = "AWAITING_MAPPING"
AWAITING_INPUT = "AWAITING_INPUT"
# [/DEF:TaskStatus:Enum]
# [DEF:LogEntry:Class]
# @SEMANTICS: log, entry, record, pydantic
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
class LogEntry(BaseModel):
timestamp: datetime = Field(default_factory=datetime.utcnow)
level: str
message: str
context: Optional[Dict[str, Any]] = None
# [/DEF:LogEntry:Class]
# [DEF:Task:Class]
# @SEMANTICS: task, job, execution, state, pydantic
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
class Task(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
plugin_id: str
status: TaskStatus = TaskStatus.PENDING
started_at: Optional[datetime] = None
finished_at: Optional[datetime] = None
user_id: Optional[str] = None
logs: List[LogEntry] = Field(default_factory=list)
params: Dict[str, Any] = Field(default_factory=dict)
input_required: bool = False
input_request: Optional[Dict[str, Any]] = None
result: Optional[Dict[str, Any]] = None
# [DEF:__init__:Function]
# @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status.
# @PRE: If status is AWAITING_INPUT, input_request must be provided.
# @POST: Task instance is created or ValueError is raised.
# @PARAM: **data - Keyword arguments for model initialization.
def __init__(self, **data):
super().__init__(**data)
if self.status == TaskStatus.AWAITING_INPUT and not self.input_request:
raise ValueError("input_request is required when status is AWAITING_INPUT")
# [/DEF:__init__:Function]
# [/DEF:Task:Class]
# [/DEF:TaskManagerModels:Module]

View File

@@ -0,0 +1,158 @@
# [DEF:TaskPersistenceModule:Module]
# @SEMANTICS: persistence, sqlite, sqlalchemy, task, storage
# @PURPOSE: Handles the persistence of tasks using SQLAlchemy and the tasks.db database.
# @LAYER: Core
# @RELATION: Used by TaskManager to save and load tasks.
# @INVARIANT: Database schema must match the TaskRecord model structure.
# [SECTION: IMPORTS]
from datetime import datetime
from typing import List, Optional, Dict, Any
import json
from sqlalchemy.orm import Session
from ...models.task import TaskRecord
from ..database import TasksSessionLocal
from .models import Task, TaskStatus, LogEntry
from ..logger import logger, belief_scope
# [/SECTION]
# [DEF:TaskPersistenceService:Class]
# @SEMANTICS: persistence, service, database, sqlalchemy
# @PURPOSE: Provides methods to save and load tasks from the tasks.db database using SQLAlchemy.
class TaskPersistenceService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the persistence service.
# @PRE: None.
# @POST: Service is ready.
def __init__(self):
with belief_scope("TaskPersistenceService.__init__"):
# We use TasksSessionLocal from database.py
pass
# [/DEF:__init__:Function]
# [DEF:persist_task:Function]
# @PURPOSE: Persists or updates a single task in the database.
# @PRE: isinstance(task, Task)
# @POST: Task record created or updated in database.
# @PARAM: task (Task) - The task object to persist.
def persist_task(self, task: Task) -> None:
with belief_scope("TaskPersistenceService.persist_task", f"task_id={task.id}"):
session: Session = TasksSessionLocal()
try:
record = session.query(TaskRecord).filter(TaskRecord.id == task.id).first()
if not record:
record = TaskRecord(id=task.id)
session.add(record)
record.type = task.plugin_id
record.status = task.status.value
record.environment_id = task.params.get("environment_id") or task.params.get("source_env_id")
record.started_at = task.started_at
record.finished_at = task.finished_at
record.params = task.params
record.result = task.result
# Store logs as JSON, converting datetime to string
record.logs = []
for log in task.logs:
log_dict = log.dict()
if isinstance(log_dict.get('timestamp'), datetime):
log_dict['timestamp'] = log_dict['timestamp'].isoformat()
record.logs.append(log_dict)
# Extract error if failed
if task.status == TaskStatus.FAILED:
for log in reversed(task.logs):
if log.level == "ERROR":
record.error = log.message
break
session.commit()
except Exception as e:
session.rollback()
logger.error(f"Failed to persist task {task.id}: {e}")
finally:
session.close()
# [/DEF:persist_task:Function]
# [DEF:persist_tasks:Function]
# @PURPOSE: Persists multiple tasks.
# @PRE: isinstance(tasks, list)
# @POST: All tasks in list are persisted.
# @PARAM: tasks (List[Task]) - The list of tasks to persist.
def persist_tasks(self, tasks: List[Task]) -> None:
with belief_scope("TaskPersistenceService.persist_tasks"):
for task in tasks:
self.persist_task(task)
# [/DEF:persist_tasks:Function]
# [DEF:load_tasks:Function]
# @PURPOSE: Loads tasks from the database.
# @PRE: limit is an integer.
# @POST: Returns list of Task objects.
# @PARAM: limit (int) - Max tasks to load.
# @PARAM: status (Optional[TaskStatus]) - Filter by status.
# @RETURN: List[Task] - The loaded tasks.
def load_tasks(self, limit: int = 100, status: Optional[TaskStatus] = None) -> List[Task]:
with belief_scope("TaskPersistenceService.load_tasks"):
session: Session = TasksSessionLocal()
try:
query = session.query(TaskRecord)
if status:
query = query.filter(TaskRecord.status == status.value)
records = query.order_by(TaskRecord.created_at.desc()).limit(limit).all()
loaded_tasks = []
for record in records:
try:
logs = []
if record.logs:
for log_data in record.logs:
# Handle timestamp conversion if it's a string
if isinstance(log_data.get('timestamp'), str):
log_data['timestamp'] = datetime.fromisoformat(log_data['timestamp'])
logs.append(LogEntry(**log_data))
task = Task(
id=record.id,
plugin_id=record.type,
status=TaskStatus(record.status),
started_at=record.started_at,
finished_at=record.finished_at,
params=record.params or {},
result=record.result,
logs=logs
)
loaded_tasks.append(task)
except Exception as e:
logger.error(f"Failed to reconstruct task {record.id}: {e}")
return loaded_tasks
finally:
session.close()
# [/DEF:load_tasks:Function]
# [DEF:delete_tasks:Function]
# @PURPOSE: Deletes specific tasks from the database.
# @PRE: task_ids is a list of strings.
# @POST: Specified task records deleted from database.
# @PARAM: task_ids (List[str]) - List of task IDs to delete.
def delete_tasks(self, task_ids: List[str]) -> None:
if not task_ids:
return
with belief_scope("TaskPersistenceService.delete_tasks"):
session: Session = TasksSessionLocal()
try:
session.query(TaskRecord).filter(TaskRecord.id.in_(task_ids)).delete(synchronize_session=False)
session.commit()
except Exception as e:
session.rollback()
logger.error(f"Failed to delete tasks: {e}")
finally:
session.close()
# [/DEF:delete_tasks:Function]
# [/DEF:TaskPersistenceService:Class]
# [/DEF:TaskPersistenceModule:Module]

View File

@@ -0,0 +1,237 @@
# [DEF:backend.core.utils.dataset_mapper:Module]
#
# @SEMANTICS: dataset, mapping, postgresql, xlsx, superset
# @PURPOSE: Этот модуль отвечает за обновление метаданных (verbose_map) в датасетах Superset, извлекая их из PostgreSQL или XLSX-файлов.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> backend.core.superset_client
# @RELATION: DEPENDS_ON -> pandas
# @RELATION: DEPENDS_ON -> psycopg2
# @PUBLIC_API: DatasetMapper
# [SECTION: IMPORTS]
import pandas as pd # type: ignore
import psycopg2 # type: ignore
from typing import Dict, List, Optional, Any
from ..logger import logger as app_logger, belief_scope
# [/SECTION]
# [DEF:DatasetMapper:Class]
# @PURPOSE: Класс для меппинга и обновления verbose_map в датасетах Superset.
class DatasetMapper:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the mapper.
# @POST: Объект DatasetMapper инициализирован.
def __init__(self):
pass
# [/DEF:__init__:Function]
# [DEF:get_postgres_comments:Function]
# @PURPOSE: Извлекает комментарии к колонкам из системного каталога PostgreSQL.
# @PRE: db_config должен содержать валидные параметры подключения (host, port, user, password, dbname).
# @PRE: table_name и table_schema должны быть строками.
# @POST: Возвращается словарь, где ключи - имена колонок, значения - комментарии из БД.
# @THROW: Exception - При ошибках подключения или выполнения запроса к БД.
# @PARAM: db_config (Dict) - Конфигурация для подключения к БД.
# @PARAM: table_name (str) - Имя таблицы.
# @PARAM: table_schema (str) - Схема таблицы.
# @RETURN: Dict[str, str] - Словарь с комментариями к колонкам.
def get_postgres_comments(self, db_config: Dict, table_name: str, table_schema: str) -> Dict[str, str]:
with belief_scope("Fetch comments from PostgreSQL"):
app_logger.info("[get_postgres_comments][Enter] Fetching comments from PostgreSQL for %s.%s.", table_schema, table_name)
query = f"""
SELECT
cols.column_name,
CASE
WHEN pg_catalog.col_description(
(SELECT c.oid
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = cols.table_name
AND n.nspname = cols.table_schema),
cols.ordinal_position::int
) LIKE '%|%' THEN
split_part(
pg_catalog.col_description(
(SELECT c.oid
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = cols.table_name
AND n.nspname = cols.table_schema),
cols.ordinal_position::int
),
'|',
1
)
ELSE
pg_catalog.col_description(
(SELECT c.oid
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relname = cols.table_name
AND n.nspname = cols.table_schema),
cols.ordinal_position::int
)
END AS column_comment
FROM
information_schema.columns cols
WHERE cols.table_catalog = '{db_config.get('dbname')}' AND cols.table_name = '{table_name}' AND cols.table_schema = '{table_schema}';
"""
comments = {}
try:
with psycopg2.connect(**db_config) as conn, conn.cursor() as cursor:
cursor.execute(query)
for row in cursor.fetchall():
if row[1]:
comments[row[0]] = row[1]
app_logger.info("[get_postgres_comments][Success] Fetched %d comments.", len(comments))
except Exception as e:
app_logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
raise
return comments
# [/DEF:get_postgres_comments:Function]
# [DEF:load_excel_mappings:Function]
# @PURPOSE: Загружает меппинги 'column_name' -> 'column_comment' из XLSX файла.
# @PRE: file_path должен указывать на существующий XLSX файл.
# @POST: Возвращается словарь с меппингами из файла.
# @THROW: Exception - При ошибках чтения файла или парсинга.
# @PARAM: file_path (str) - Путь к XLSX файлу.
# @RETURN: Dict[str, str] - Словарь с меппингами.
def load_excel_mappings(self, file_path: str) -> Dict[str, str]:
with belief_scope("Load mappings from Excel"):
app_logger.info("[load_excel_mappings][Enter] Loading mappings from %s.", file_path)
try:
df = pd.read_excel(file_path)
mappings = df.set_index('column_name')['verbose_name'].to_dict()
app_logger.info("[load_excel_mappings][Success] Loaded %d mappings.", len(mappings))
return mappings
except Exception as e:
app_logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
raise
# [/DEF:load_excel_mappings:Function]
# [DEF:run_mapping:Function]
# @PURPOSE: Основная функция для выполнения меппинга и обновления verbose_map датасета в Superset.
# @PRE: superset_client должен быть авторизован.
# @PRE: dataset_id должен быть существующим ID в Superset.
# @POST: Если найдены изменения, датасет в Superset обновлен через API.
# @RELATION: CALLS -> self.get_postgres_comments
# @RELATION: CALLS -> self.load_excel_mappings
# @RELATION: CALLS -> superset_client.get_dataset
# @RELATION: CALLS -> superset_client.update_dataset
# @PARAM: superset_client (Any) - Клиент Superset.
# @PARAM: dataset_id (int) - ID датасета для обновления.
# @PARAM: source (str) - Источник данных ('postgres', 'excel', 'both').
# @PARAM: postgres_config (Optional[Dict]) - Конфигурация для подключения к PostgreSQL.
# @PARAM: excel_path (Optional[str]) - Путь к XLSX файлу.
# @PARAM: table_name (Optional[str]) - Имя таблицы в PostgreSQL.
# @PARAM: table_schema (Optional[str]) - Схема таблицы в PostgreSQL.
def run_mapping(self, superset_client: Any, dataset_id: int, source: str, postgres_config: Optional[Dict] = None, excel_path: Optional[str] = None, table_name: Optional[str] = None, table_schema: Optional[str] = None):
with belief_scope(f"Run dataset mapping for ID {dataset_id}"):
app_logger.info("[run_mapping][Enter] Starting dataset mapping for ID %d from source '%s'.", dataset_id, source)
mappings: Dict[str, str] = {}
try:
if source in ['postgres', 'both']:
assert postgres_config and table_name and table_schema, "Postgres config is required."
mappings.update(self.get_postgres_comments(postgres_config, table_name, table_schema))
if source in ['excel', 'both']:
assert excel_path, "Excel path is required."
mappings.update(self.load_excel_mappings(excel_path))
if source not in ['postgres', 'excel', 'both']:
app_logger.error("[run_mapping][Failure] Invalid source: %s.", source)
return
dataset_response = superset_client.get_dataset(dataset_id)
dataset_data = dataset_response['result']
original_columns = dataset_data.get('columns', [])
updated_columns = []
changes_made = False
for column in original_columns:
col_name = column.get('column_name')
new_column = {
"column_name": col_name,
"id": column.get("id"),
"advanced_data_type": column.get("advanced_data_type"),
"description": column.get("description"),
"expression": column.get("expression"),
"extra": column.get("extra"),
"filterable": column.get("filterable"),
"groupby": column.get("groupby"),
"is_active": column.get("is_active"),
"is_dttm": column.get("is_dttm"),
"python_date_format": column.get("python_date_format"),
"type": column.get("type"),
"uuid": column.get("uuid"),
"verbose_name": column.get("verbose_name"),
}
new_column = {k: v for k, v in new_column.items() if v is not None}
if col_name in mappings:
mapping_value = mappings[col_name]
if isinstance(mapping_value, str) and new_column.get('verbose_name') != mapping_value:
new_column['verbose_name'] = mapping_value
changes_made = True
updated_columns.append(new_column)
updated_metrics = []
for metric in dataset_data.get("metrics", []):
new_metric = {
"id": metric.get("id"),
"metric_name": metric.get("metric_name"),
"expression": metric.get("expression"),
"verbose_name": metric.get("verbose_name"),
"description": metric.get("description"),
"d3format": metric.get("d3format"),
"currency": metric.get("currency"),
"extra": metric.get("extra"),
"warning_text": metric.get("warning_text"),
"metric_type": metric.get("metric_type"),
"uuid": metric.get("uuid"),
}
updated_metrics.append({k: v for k, v in new_metric.items() if v is not None})
if changes_made:
payload_for_update = {
"database_id": dataset_data.get("database", {}).get("id"),
"table_name": dataset_data.get("table_name"),
"schema": dataset_data.get("schema"),
"columns": updated_columns,
"owners": [owner["id"] for owner in dataset_data.get("owners", [])],
"metrics": updated_metrics,
"extra": dataset_data.get("extra"),
"description": dataset_data.get("description"),
"sql": dataset_data.get("sql"),
"cache_timeout": dataset_data.get("cache_timeout"),
"catalog": dataset_data.get("catalog"),
"default_endpoint": dataset_data.get("default_endpoint"),
"external_url": dataset_data.get("external_url"),
"fetch_values_predicate": dataset_data.get("fetch_values_predicate"),
"filter_select_enabled": dataset_data.get("filter_select_enabled"),
"is_managed_externally": dataset_data.get("is_managed_externally"),
"is_sqllab_view": dataset_data.get("is_sqllab_view"),
"main_dttm_col": dataset_data.get("main_dttm_col"),
"normalize_columns": dataset_data.get("normalize_columns"),
"offset": dataset_data.get("offset"),
"template_params": dataset_data.get("template_params"),
}
payload_for_update = {k: v for k, v in payload_for_update.items() if v is not None}
superset_client.update_dataset(dataset_id, payload_for_update)
app_logger.info("[run_mapping][Success] Dataset %d columns' verbose_name updated.", dataset_id)
else:
app_logger.info("[run_mapping][State] No changes in columns' verbose_name, skipping update.")
except (AssertionError, FileNotFoundError, Exception) as e:
app_logger.error("[run_mapping][Failure] %s", e, exc_info=True)
return
# [/DEF:run_mapping:Function]
# [/DEF:DatasetMapper:Class]
# [/DEF:backend.core.utils.dataset_mapper:Module]

View File

@@ -0,0 +1,486 @@
# [DEF:backend.core.utils.fileio:Module]
#
# @SEMANTICS: file, io, zip, yaml, temp, archive, utility
# @PURPOSE: Предоставляет набор утилит для управления файловыми операциями, включая работу с временными файлами, архивами ZIP, файлами YAML и очистку директорий.
# @LAYER: Infra
# @RELATION: DEPENDS_ON -> backend.src.core.logger
# @RELATION: DEPENDS_ON -> pyyaml
# @PUBLIC_API: create_temp_file, remove_empty_directories, read_dashboard_from_disk, calculate_crc32, RetentionPolicy, archive_exports, save_and_unpack_dashboard, update_yamls, create_dashboard_export, sanitize_filename, get_filename_from_headers, consolidate_archive_folders
# [SECTION: IMPORTS]
import os
import re
import zipfile
from pathlib import Path
from typing import Any, Optional, Tuple, Dict, List, Union, LiteralString, Generator
from contextlib import contextmanager
import tempfile
from datetime import date, datetime
import shutil
import zlib
from dataclasses import dataclass
import yaml
from ..logger import logger as app_logger, belief_scope
# [/SECTION]
# [DEF:InvalidZipFormatError:Class]
class InvalidZipFormatError(Exception):
pass
# [DEF:create_temp_file:Function]
# @PURPOSE: Контекстный менеджер для создания временного файла или директории с гарантированным удалением.
# @PRE: suffix должен быть строкой, определяющей тип ресурса.
# @POST: Временный ресурс создан и путь к нему возвращен; ресурс удален после выхода из контекста.
# @PARAM: content (Optional[bytes]) - Бинарное содержимое для записи во временный файл.
# @PARAM: suffix (str) - Суффикс ресурса. Если `.dir`, создается директория.
# @PARAM: mode (str) - Режим записи в файл (e.g., 'wb').
# @YIELDS: Path - Путь к временному ресурсу.
# @THROW: IOError - При ошибках создания ресурса.
@contextmanager
def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode: str = 'wb', dry_run = False) -> Generator[Path, None, None]:
with belief_scope("Create temporary resource"):
resource_path = None
is_dir = suffix.startswith('.dir')
try:
if is_dir:
with tempfile.TemporaryDirectory(suffix=suffix) as temp_dir:
resource_path = Path(temp_dir)
app_logger.debug("[create_temp_file][State] Created temporary directory: %s", resource_path)
yield resource_path
else:
fd, temp_path_str = tempfile.mkstemp(suffix=suffix)
resource_path = Path(temp_path_str)
os.close(fd)
if content:
resource_path.write_bytes(content)
app_logger.debug("[create_temp_file][State] Created temporary file: %s", resource_path)
yield resource_path
finally:
if resource_path and resource_path.exists() and not dry_run:
try:
if resource_path.is_dir():
shutil.rmtree(resource_path)
app_logger.debug("[create_temp_file][Cleanup] Removed temporary directory: %s", resource_path)
else:
resource_path.unlink()
app_logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
except OSError as e:
app_logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
# [/DEF:create_temp_file:Function]
# [DEF:remove_empty_directories:Function]
# @PURPOSE: Рекурсивно удаляет все пустые поддиректории, начиная с указанного пути.
# @PRE: root_dir должен быть путем к существующей директории.
# @POST: Все пустые поддиректории удалены, возвращено их количество.
# @PARAM: root_dir (str) - Путь к корневой директории для очистки.
# @RETURN: int - Количество удаленных директорий.
def remove_empty_directories(root_dir: str) -> int:
with belief_scope(f"Remove empty directories in {root_dir}"):
app_logger.info("[remove_empty_directories][Enter] Starting cleanup of empty directories in %s", root_dir)
removed_count = 0
if not os.path.isdir(root_dir):
app_logger.error("[remove_empty_directories][Failure] Directory not found: %s", root_dir)
return 0
for current_dir, _, _ in os.walk(root_dir, topdown=False):
if not os.listdir(current_dir):
try:
os.rmdir(current_dir)
removed_count += 1
app_logger.info("[remove_empty_directories][State] Removed empty directory: %s", current_dir)
except OSError as e:
app_logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
app_logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
return removed_count
# [/DEF:remove_empty_directories:Function]
# [DEF:read_dashboard_from_disk:Function]
# @PURPOSE: Читает бинарное содержимое файла с диска.
# @PRE: file_path должен указывать на существующий файл.
# @POST: Возвращает байты содержимого и имя файла.
# @PARAM: file_path (str) - Путь к файлу.
# @RETURN: Tuple[bytes, str] - Кортеж (содержимое, имя файла).
# @THROW: FileNotFoundError - Если файл не найден.
def read_dashboard_from_disk(file_path: str) -> Tuple[bytes, str]:
with belief_scope(f"Read dashboard from {file_path}"):
path = Path(file_path)
assert path.is_file(), f"Файл дашборда не найден: {file_path}"
app_logger.info("[read_dashboard_from_disk][Enter] Reading file: %s", file_path)
content = path.read_bytes()
if not content:
app_logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
return content, path.name
# [/DEF:read_dashboard_from_disk:Function]
# [DEF:calculate_crc32:Function]
# @PURPOSE: Вычисляет контрольную сумму CRC32 для файла.
# @PRE: file_path должен быть объектом Path к существующему файлу.
# @POST: Возвращает 8-значную hex-строку CRC32.
# @PARAM: file_path (Path) - Путь к файлу.
# @RETURN: str - 8-значное шестнадцатеричное представление CRC32.
# @THROW: IOError - При ошибках чтения файла.
def calculate_crc32(file_path: Path) -> str:
with belief_scope(f"Calculate CRC32 for {file_path}"):
with open(file_path, 'rb') as f:
crc32_value = zlib.crc32(f.read())
return f"{crc32_value:08x}"
# [/DEF:calculate_crc32:Function]
# [SECTION: DATA_CLASSES]
# [DEF:RetentionPolicy:DataClass]
# @PURPOSE: Определяет политику хранения для архивов (ежедневные, еженедельные, ежемесячные).
@dataclass
class RetentionPolicy:
daily: int = 7
weekly: int = 4
monthly: int = 12
# [/DEF:RetentionPolicy:DataClass]
# [/SECTION]
# [DEF:archive_exports:Function]
# @PURPOSE: Управляет архивом экспортированных файлов, применяя политику хранения и дедупликацию.
# @PRE: output_dir должен быть путем к существующей директории.
# @POST: Старые или дублирующиеся архивы удалены согласно политике.
# @RELATION: CALLS -> apply_retention_policy
# @RELATION: CALLS -> calculate_crc32
# @PARAM: output_dir (str) - Директория с архивами.
# @PARAM: policy (RetentionPolicy) - Политика хранения.
# @PARAM: deduplicate (bool) - Флаг для включения удаления дубликатов по CRC32.
def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool = False) -> None:
with belief_scope(f"Archive exports in {output_dir}"):
output_path = Path(output_dir)
if not output_path.is_dir():
app_logger.warning("[archive_exports][Skip] Archive directory not found: %s", output_dir)
return
app_logger.info("[archive_exports][Enter] Managing archive in %s", output_dir)
# 1. Collect all zip files
zip_files = list(output_path.glob("*.zip"))
if not zip_files:
app_logger.info("[archive_exports][State] No zip files found in %s", output_dir)
return
# 2. Deduplication
if deduplicate:
app_logger.info("[archive_exports][State] Starting deduplication...")
checksums = {}
files_to_remove = []
# Sort by modification time (newest first) to keep the latest version
zip_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
for file_path in zip_files:
try:
crc = calculate_crc32(file_path)
if crc in checksums:
files_to_remove.append(file_path)
app_logger.debug("[archive_exports][State] Duplicate found: %s (same as %s)", file_path.name, checksums[crc].name)
else:
checksums[crc] = file_path
except Exception as e:
app_logger.error("[archive_exports][Failure] Failed to calculate CRC32 for %s: %s", file_path, e)
for f in files_to_remove:
try:
f.unlink()
zip_files.remove(f)
app_logger.info("[archive_exports][State] Removed duplicate: %s", f.name)
except OSError as e:
app_logger.error("[archive_exports][Failure] Failed to remove duplicate %s: %s", f, e)
# 3. Retention Policy
files_with_dates = []
for file_path in zip_files:
# Try to extract date from filename
# Pattern: ..._YYYYMMDD_HHMMSS.zip or ..._YYYYMMDD.zip
match = re.search(r'_(\d{8})_', file_path.name)
file_date = None
if match:
try:
date_str = match.group(1)
file_date = datetime.strptime(date_str, "%Y%m%d").date()
except ValueError:
pass
if not file_date:
# Fallback to modification time
file_date = datetime.fromtimestamp(file_path.stat().st_mtime).date()
files_with_dates.append((file_path, file_date))
files_to_keep = apply_retention_policy(files_with_dates, policy)
for file_path, _ in files_with_dates:
if file_path not in files_to_keep:
try:
file_path.unlink()
app_logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
except OSError as e:
app_logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
# [/DEF:archive_exports:Function]
# [DEF:apply_retention_policy:Function]
# @PURPOSE: (Helper) Применяет политику хранения к списку файлов, возвращая те, что нужно сохранить.
# @PRE: files_with_dates is a list of (Path, date) tuples.
# @POST: Returns a set of files to keep.
# @PARAM: files_with_dates (List[Tuple[Path, date]]) - Список файлов с датами.
# @PARAM: policy (RetentionPolicy) - Политика хранения.
# @RETURN: set - Множество путей к файлам, которые должны быть сохранены.
def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: RetentionPolicy) -> set:
with belief_scope("Apply retention policy"):
# Сортируем по дате (от новой к старой)
sorted_files = sorted(files_with_dates, key=lambda x: x[1], reverse=True)
# Словарь для хранения файлов по категориям
daily_files = []
weekly_files = []
monthly_files = []
today = date.today()
for file_path, file_date in sorted_files:
# Ежедневные
if (today - file_date).days < policy.daily:
daily_files.append(file_path)
# Еженедельные
elif (today - file_date).days < policy.weekly * 7:
weekly_files.append(file_path)
# Ежемесячные
elif (today - file_date).days < policy.monthly * 30:
monthly_files.append(file_path)
# Возвращаем множество файлов, которые нужно сохранить
files_to_keep = set()
files_to_keep.update(daily_files)
files_to_keep.update(weekly_files[:policy.weekly])
files_to_keep.update(monthly_files[:policy.monthly])
app_logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
return files_to_keep
# [/DEF:apply_retention_policy:Function]
# [DEF:save_and_unpack_dashboard:Function]
# @PURPOSE: Сохраняет бинарное содержимое ZIP-архива на диск и опционально распаковывает его.
# @PRE: zip_content должен быть байтами валидного ZIP-архива.
# @POST: ZIP-файл сохранен, и если unpack=True, он распакован в output_dir.
# @PARAM: zip_content (bytes) - Содержимое ZIP-архива.
# @PARAM: output_dir (Union[str, Path]) - Директория для сохранения.
# @PARAM: unpack (bool) - Флаг, нужно ли распаковывать архив.
# @PARAM: original_filename (Optional[str]) - Исходное имя файла для сохранения.
# @RETURN: Tuple[Path, Optional[Path]] - Путь к ZIP-файлу и, если применимо, путь к директории с распаковкой.
# @THROW: InvalidZipFormatError - При ошибке формата ZIP.
def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path], unpack: bool = False, original_filename: Optional[str] = None) -> Tuple[Path, Optional[Path]]:
with belief_scope("Save and unpack dashboard"):
app_logger.info("[save_and_unpack_dashboard][Enter] Processing dashboard. Unpack: %s", unpack)
try:
output_path = Path(output_dir)
output_path.mkdir(parents=True, exist_ok=True)
zip_name = sanitize_filename(original_filename) if original_filename else f"dashboard_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip"
zip_path = output_path / zip_name
zip_path.write_bytes(zip_content)
app_logger.info("[save_and_unpack_dashboard][State] Dashboard saved to: %s", zip_path)
if unpack:
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(output_path)
app_logger.info("[save_and_unpack_dashboard][State] Dashboard unpacked to: %s", output_path)
return zip_path, output_path
return zip_path, None
except zipfile.BadZipFile as e:
app_logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
raise InvalidZipFormatError(f"Invalid ZIP file: {e}") from e
# [/DEF:save_and_unpack_dashboard:Function]
# [DEF:update_yamls:Function]
# @PURPOSE: Обновляет конфигурации в YAML-файлах, заменяя значения или применяя regex.
# @PRE: path должен быть существующей директорией.
# @POST: Все YAML файлы в директории обновлены согласно переданным параметрам.
# @RELATION: CALLS -> _update_yaml_file
# @THROW: FileNotFoundError - Если `path` не существует.
# @PARAM: db_configs (Optional[List[Dict]]) - Список конфигураций для замены.
# @PARAM: path (str) - Путь к директории с YAML файлами.
# @PARAM: regexp_pattern (Optional[LiteralString]) - Паттерн для поиска.
# @PARAM: replace_string (Optional[LiteralString]) - Строка для замены.
def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str = "dashboards", regexp_pattern: Optional[LiteralString] = None, replace_string: Optional[LiteralString] = None) -> None:
with belief_scope("Update YAML configurations"):
app_logger.info("[update_yamls][Enter] Starting YAML configuration update.")
dir_path = Path(path)
assert dir_path.is_dir(), f"Путь {path} не существует или не является директорией"
configs: List[Dict[str, Any]] = db_configs or []
for file_path in dir_path.rglob("*.yaml"):
_update_yaml_file(file_path, configs, regexp_pattern, replace_string)
# [/DEF:update_yamls:Function]
# [DEF:_update_yaml_file:Function]
# @PURPOSE: (Helper) Обновляет один YAML файл.
# @PRE: file_path должен быть объектом Path к существующему YAML файлу.
# @POST: Файл обновлен согласно переданным конфигурациям или регулярному выражению.
# @PARAM: file_path (Path) - Путь к файлу.
# @PARAM: db_configs (List[Dict]) - Конфигурации.
# @PARAM: regexp_pattern (Optional[str]) - Паттерн.
# @PARAM: replace_string (Optional[str]) - Замена.
def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_pattern: Optional[str], replace_string: Optional[str]) -> None:
with belief_scope(f"Update YAML file: {file_path}"):
# Читаем содержимое файла
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
except Exception as e:
app_logger.error("[_update_yaml_file][Failure] Failed to read %s: %s", file_path, e)
return
# Если задан pattern и replace_string, применяем замену по регулярному выражению
if regexp_pattern and replace_string:
try:
new_content = re.sub(regexp_pattern, replace_string, content)
if new_content != content:
with open(file_path, 'w', encoding='utf-8') as f:
f.write(new_content)
app_logger.info("[_update_yaml_file][State] Updated %s using regex pattern", file_path)
except Exception as e:
app_logger.error("[_update_yaml_file][Failure] Error applying regex to %s: %s", file_path, e)
# Если заданы конфигурации, заменяем значения (поддержка old/new)
if db_configs:
try:
# Прямой текстовый заменитель для старых/новых значений, чтобы сохранить структуру файла
modified_content = content
for cfg in db_configs:
# Ожидаем структуру: {'old': {...}, 'new': {...}}
old_cfg = cfg.get('old', {})
new_cfg = cfg.get('new', {})
for key, old_val in old_cfg.items():
if key in new_cfg:
new_val = new_cfg[key]
# Заменяем только точные совпадения старого значения в тексте YAML, используя ключ для контекста
if isinstance(old_val, str):
# Ищем паттерн: key: "value" или key: value
key_pattern = re.escape(key)
val_pattern = re.escape(old_val)
# Группы: 1=ключ+разделитель, 2=открывающая кавычка (опц), 3=значение, 4=закрывающая кавычка (опц)
pattern = rf'({key_pattern}\s*:\s*)(["\']?)({val_pattern})(["\']?)'
# [DEF:replacer:Function]
# @PURPOSE: Функция замены, сохраняющая кавычки если они были.
# @PRE: match должен быть объектом совпадения регулярного выражения.
# @POST: Возвращает строку с новым значением, сохраняя префикс и кавычки.
def replacer(match):
prefix = match.group(1)
quote_open = match.group(2)
quote_close = match.group(4)
return f"{prefix}{quote_open}{new_val}{quote_close}"
# [/DEF:replacer:Function]
modified_content = re.sub(pattern, replacer, modified_content)
app_logger.info("[_update_yaml_file][State] Replaced '%s' with '%s' for key %s in %s", old_val, new_val, key, file_path)
# Записываем обратно изменённый контент без парсинга YAML, сохраняем оригинальное форматирование
with open(file_path, 'w', encoding='utf-8') as f:
f.write(modified_content)
except Exception as e:
app_logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
# [/DEF:_update_yaml_file:Function]
# [DEF:create_dashboard_export:Function]
# @PURPOSE: Создает ZIP-архив из указанных исходных путей.
# @PRE: source_paths должен содержать существующие пути.
# @POST: ZIP-архив создан по пути zip_path.
# @PARAM: zip_path (Union[str, Path]) - Путь для сохранения ZIP архива.
# @PARAM: source_paths (List[Union[str, Path]]) - Список исходных путей для архивации.
# @PARAM: exclude_extensions (Optional[List[str]]) - Список расширений для исключения.
# @RETURN: bool - `True` при успехе, `False` при ошибке.
def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union[str, Path]], exclude_extensions: Optional[List[str]] = None) -> bool:
with belief_scope(f"Create dashboard export: {zip_path}"):
app_logger.info("[create_dashboard_export][Enter] Packing dashboard: %s -> %s", source_paths, zip_path)
try:
exclude_ext = [ext.lower() for ext in exclude_extensions or []]
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for src_path_str in source_paths:
src_path = Path(src_path_str)
assert src_path.exists(), f"Путь не найден: {src_path}"
for item in src_path.rglob('*'):
if item.is_file() and item.suffix.lower() not in exclude_ext:
arcname = item.relative_to(src_path.parent)
zipf.write(item, arcname)
app_logger.info("[create_dashboard_export][Exit] Archive created: %s", zip_path)
return True
except (IOError, zipfile.BadZipFile, AssertionError) as e:
app_logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
return False
# [/DEF:create_dashboard_export:Function]
# [DEF:sanitize_filename:Function]
# @PURPOSE: Очищает строку от символов, недопустимых в именах файлов.
# @PRE: filename должен быть строкой.
# @POST: Возвращает строку без спецсимволов.
# @PARAM: filename (str) - Исходное имя файла.
# @RETURN: str - Очищенная строка.
def sanitize_filename(filename: str) -> str:
with belief_scope(f"Sanitize filename: {filename}"):
return re.sub(r'[\\/*?:"<>|]', "_", filename).strip()
# [/DEF:sanitize_filename:Function]
# [DEF:get_filename_from_headers:Function]
# @PURPOSE: Извлекает имя файла из HTTP заголовка 'Content-Disposition'.
# @PRE: headers должен быть словарем заголовков.
# @POST: Возвращает имя файла или None, если заголовок отсутствует.
# @PARAM: headers (dict) - Словарь HTTP заголовков.
# @RETURN: Optional[str] - Имя файла or `None`.
def get_filename_from_headers(headers: dict) -> Optional[str]:
with belief_scope("Get filename from headers"):
content_disposition = headers.get("Content-Disposition", "")
if match := re.search(r'filename="?([^"]+)"?', content_disposition):
return match.group(1).strip()
return None
# [/DEF:get_filename_from_headers:Function]
# [DEF:consolidate_archive_folders:Function]
# @PURPOSE: Консолидирует директории архивов на основе общего слага в имени.
# @PRE: root_directory должен быть объектом Path к существующей директории.
# @POST: Директории с одинаковым префиксом объединены в одну.
# @THROW: TypeError, ValueError - Если `root_directory` невалиден.
# @PARAM: root_directory (Path) - Корневая директория для консолидации.
def consolidate_archive_folders(root_directory: Path) -> None:
with belief_scope(f"Consolidate archives in {root_directory}"):
assert isinstance(root_directory, Path), "root_directory must be a Path object."
assert root_directory.is_dir(), "root_directory must be an existing directory."
app_logger.info("[consolidate_archive_folders][Enter] Consolidating archives in %s", root_directory)
# Собираем все директории с архивами
archive_dirs = []
for item in root_directory.iterdir():
if item.is_dir():
# Проверяем, есть ли в директории ZIP-архивы
if any(item.glob("*.zip")):
archive_dirs.append(item)
# Группируем по слагу (части имени до первого '_')
slug_groups = {}
for dir_path in archive_dirs:
dir_name = dir_path.name
slug = dir_name.split('_')[0] if '_' in dir_name else dir_name
if slug not in slug_groups:
slug_groups[slug] = []
slug_groups[slug].append(dir_path)
# Для каждой группы консолидируем
for slug, dirs in slug_groups.items():
if len(dirs) <= 1:
continue
# Создаем целевую директорию
target_dir = root_directory / slug
target_dir.mkdir(exist_ok=True)
app_logger.info("[consolidate_archive_folders][State] Consolidating %d directories under %s", len(dirs), target_dir)
# Перемещаем содержимое
for source_dir in dirs:
if source_dir == target_dir:
continue
for item in source_dir.iterdir():
dest_item = target_dir / item.name
try:
if item.is_dir():
shutil.move(str(item), str(dest_item))
else:
shutil.move(str(item), str(dest_item))
except Exception as e:
app_logger.error("[consolidate_archive_folders][Failure] Failed to move %s to %s: %s", item, dest_item, e)
# Удаляем исходную директорию
try:
source_dir.rmdir()
app_logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
except Exception as e:
app_logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
# [/DEF:consolidate_archive_folders:Function]
# [/DEF:backend.core.utils.fileio:Module]

View File

@@ -0,0 +1,53 @@
# [DEF:backend.src.core.utils.matching:Module]
#
# @SEMANTICS: fuzzy, matching, rapidfuzz, database, mapping
# @PURPOSE: Provides utility functions for fuzzy matching database names.
# @LAYER: Core
# @RELATION: DEPENDS_ON -> rapidfuzz
#
# @INVARIANT: Confidence scores are returned as floats between 0.0 and 1.0.
# [SECTION: IMPORTS]
from rapidfuzz import fuzz, process
from typing import List, Dict
# [/SECTION]
# [DEF:suggest_mappings:Function]
# @PURPOSE: Suggests mappings between source and target databases using fuzzy matching.
# @PRE: source_databases and target_databases are lists of dictionaries with 'uuid' and 'database_name'.
# @POST: Returns a list of suggested mappings with confidence scores.
# @PARAM: source_databases (List[Dict]) - Databases from the source environment.
# @PARAM: target_databases (List[Dict]) - Databases from the target environment.
# @PARAM: threshold (int) - Minimum confidence score (0-100).
# @RETURN: List[Dict] - Suggested mappings.
def suggest_mappings(source_databases: List[Dict], target_databases: List[Dict], threshold: int = 60) -> List[Dict]:
"""
Suggest mappings between source and target databases using fuzzy matching.
"""
suggestions = []
if not target_databases:
return suggestions
target_names = [db['database_name'] for db in target_databases]
for s_db in source_databases:
# Use token_sort_ratio as decided in research.md
match = process.extractOne(
s_db['database_name'],
target_names,
scorer=fuzz.token_sort_ratio
)
if match:
name, score, index = match
if score >= threshold:
suggestions.append({
"source_db_uuid": s_db['uuid'],
"target_db_uuid": target_databases[index]['uuid'],
"confidence": score / 100.0
})
return suggestions
# [/DEF:suggest_mappings:Function]
# [/DEF:backend.src.core.utils.matching:Module]

View File

@@ -1,232 +1,286 @@
# [DEF:superset_tool.utils.network:Module]
#
# @SEMANTICS: network, http, client, api, requests, session, authentication
# @PURPOSE: Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
# @LAYER: Infra
# @RELATION: DEPENDS_ON -> superset_tool.exceptions
# @RELATION: DEPENDS_ON -> superset_tool.utils.logger
# @RELATION: DEPENDS_ON -> requests
# @PUBLIC_API: APIClient
# [SECTION: IMPORTS]
from typing import Optional, Dict, Any, List, Union, cast
import json
import io
from pathlib import Path
import requests
from requests.adapters import HTTPAdapter
import urllib3
from urllib3.util.retry import Retry
from superset_tool.exceptions import AuthenticationError, NetworkError, DashboardNotFoundError, SupersetAPIError, PermissionDeniedError
from superset_tool.utils.logger import SupersetLogger
# [/SECTION]
# [DEF:APIClient:Class]
# @PURPOSE: Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
class APIClient:
DEFAULT_TIMEOUT = 30
# [DEF:APIClient.__init__:Function]
# @PURPOSE: Инициализирует API клиент с конфигурацией, сессией и логгером.
# @PARAM: config (Dict[str, Any]) - Конфигурация.
# @PARAM: verify_ssl (bool) - Проверять ли SSL.
# @PARAM: timeout (int) - Таймаут запросов.
# @PARAM: logger (Optional[SupersetLogger]) - Логгер.
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT, logger: Optional[SupersetLogger] = None):
self.logger = logger or SupersetLogger(name="APIClient")
self.logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
self.base_url: str = config.get("base_url", "")
self.auth = config.get("auth")
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
self.session = self._init_session()
self._tokens: Dict[str, str] = {}
self._authenticated = False
self.logger.info("[APIClient.__init__][Exit] APIClient initialized.")
# [/DEF:APIClient.__init__]
# [DEF:APIClient._init_session:Function]
# @PURPOSE: Создает и настраивает `requests.Session` с retry-логикой.
# @RETURN: requests.Session - Настроенная сессия.
def _init_session(self) -> requests.Session:
session = requests.Session()
retries = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
adapter = HTTPAdapter(max_retries=retries)
session.mount('http://', adapter)
session.mount('https://', adapter)
if not self.request_settings["verify_ssl"]:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
self.logger.warning("[_init_session][State] SSL verification disabled.")
session.verify = self.request_settings["verify_ssl"]
return session
# [/DEF:APIClient._init_session]
# [DEF:APIClient.authenticate:Function]
# @PURPOSE: Выполняет аутентификацию в Superset API и получает access и CSRF токены.
# @POST: `self._tokens` заполнен, `self._authenticated` установлен в `True`.
# @RETURN: Dict[str, str] - Словарь с токенами.
# @THROW: AuthenticationError, NetworkError - при ошибках.
def authenticate(self) -> Dict[str, str]:
self.logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
try:
login_url = f"{self.base_url}/security/login"
response = self.session.post(login_url, json=self.auth, timeout=self.request_settings["timeout"])
response.raise_for_status()
access_token = response.json()["access_token"]
csrf_url = f"{self.base_url}/security/csrf_token/"
csrf_response = self.session.get(csrf_url, headers={"Authorization": f"Bearer {access_token}"}, timeout=self.request_settings["timeout"])
csrf_response.raise_for_status()
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
self._authenticated = True
self.logger.info("[authenticate][Exit] Authenticated successfully.")
return self._tokens
except requests.exceptions.HTTPError as e:
raise AuthenticationError(f"Authentication failed: {e}") from e
except (requests.exceptions.RequestException, KeyError) as e:
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
# [/DEF:APIClient.authenticate]
@property
def headers(self) -> Dict[str, str]:
# [DEF:APIClient.headers:Function]
# @PURPOSE: Возвращает HTTP-заголовки для аутентифицированных запросов.
if not self._authenticated: self.authenticate()
return {
"Authorization": f"Bearer {self._tokens['access_token']}",
"X-CSRFToken": self._tokens.get("csrf_token", ""),
"Referer": self.base_url,
"Content-Type": "application/json"
}
# [/DEF:APIClient.headers]
# [DEF:APIClient.request:Function]
# @PURPOSE: Выполняет универсальный HTTP-запрос к API.
# @RETURN: `requests.Response` если `raw_response=True`, иначе `dict`.
# @THROW: SupersetAPIError, NetworkError и их подклассы.
# @PARAM: method (str) - HTTP метод.
# @PARAM: endpoint (str) - API эндпоинт.
# @PARAM: headers (Optional[Dict]) - Дополнительные заголовки.
# @PARAM: raw_response (bool) - Возвращать ли сырой ответ.
def request(self, method: str, endpoint: str, headers: Optional[Dict] = None, raw_response: bool = False, **kwargs) -> Union[requests.Response, Dict[str, Any]]:
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy()
if headers: _headers.update(headers)
try:
response = self.session.request(method, full_url, headers=_headers, **kwargs)
response.raise_for_status()
return response if raw_response else response.json()
except requests.exceptions.HTTPError as e:
self._handle_http_error(e, endpoint)
except requests.exceptions.RequestException as e:
self._handle_network_error(e, full_url)
# [/DEF:APIClient.request]
# [DEF:APIClient._handle_http_error:Function]
# @PURPOSE: (Helper) Преобразует HTTP ошибки в кастомные исключения.
# @PARAM: e (requests.exceptions.HTTPError) - Ошибка.
# @PARAM: endpoint (str) - Эндпоинт.
def _handle_http_error(self, e: requests.exceptions.HTTPError, endpoint: str):
status_code = e.response.status_code
if status_code == 404: raise DashboardNotFoundError(endpoint) from e
if status_code == 403: raise PermissionDeniedError() from e
if status_code == 401: raise AuthenticationError() from e
raise SupersetAPIError(f"API Error {status_code}: {e.response.text}") from e
# [/DEF:APIClient._handle_http_error]
# [DEF:APIClient._handle_network_error:Function]
# @PURPOSE: (Helper) Преобразует сетевые ошибки в `NetworkError`.
# @PARAM: e (requests.exceptions.RequestException) - Ошибка.
# @PARAM: url (str) - URL.
def _handle_network_error(self, e: requests.exceptions.RequestException, url: str):
if isinstance(e, requests.exceptions.Timeout): msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:APIClient._handle_network_error]
# [DEF:APIClient.upload_file:Function]
# @PURPOSE: Загружает файл на сервер через multipart/form-data.
# @RETURN: Ответ API в виде словаря.
# @THROW: SupersetAPIError, NetworkError, TypeError.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: file_info (Dict[str, Any]) - Информация о файле.
# @PARAM: extra_data (Optional[Dict]) - Дополнительные данные.
# @PARAM: timeout (Optional[int]) - Таймаут.
def upload_file(self, endpoint: str, file_info: Dict[str, Any], extra_data: Optional[Dict] = None, timeout: Optional[int] = None) -> Dict:
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy(); _headers.pop('Content-Type', None)
file_obj, file_name, form_field = file_info.get("file_obj"), file_info.get("file_name"), file_info.get("form_field", "file")
files_payload = {}
if isinstance(file_obj, (str, Path)):
with open(file_obj, 'rb') as f:
files_payload = {form_field: (file_name, f.read(), 'application/x-zip-compressed')}
elif isinstance(file_obj, io.BytesIO):
files_payload = {form_field: (file_name, file_obj.getvalue(), 'application/x-zip-compressed')}
else:
raise TypeError(f"Unsupported file_obj type: {type(file_obj)}")
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
# [/DEF:APIClient.upload_file]
# [DEF:APIClient._perform_upload:Function]
# @PURPOSE: (Helper) Выполняет POST запрос с файлом.
# @PARAM: url (str) - URL.
# @PARAM: files (Dict) - Файлы.
# @PARAM: data (Optional[Dict]) - Данные.
# @PARAM: headers (Dict) - Заголовки.
# @PARAM: timeout (Optional[int]) - Таймаут.
# @RETURN: Dict - Ответ.
def _perform_upload(self, url: str, files: Dict, data: Optional[Dict], headers: Dict, timeout: Optional[int]) -> Dict:
try:
response = self.session.post(url, files=files, data=data or {}, headers=headers, timeout=timeout or self.request_settings["timeout"])
response.raise_for_status()
# Добавляем логирование для отладки
if response.status_code == 200:
try:
return response.json()
except Exception as json_e:
self.logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
raise SupersetAPIError(f"API error during upload: Response is not valid JSON: {json_e}") from json_e
return response.json()
except requests.exceptions.HTTPError as e:
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
except requests.exceptions.RequestException as e:
raise NetworkError(f"Network error during upload: {e}", url=url) from e
# [/DEF:APIClient._perform_upload]
# [DEF:APIClient.fetch_paginated_count:Function]
# @PURPOSE: Получает общее количество элементов для пагинации.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: query_params (Dict) - Параметры запроса.
# @PARAM: count_field (str) - Поле с количеством.
# @RETURN: int - Количество.
def fetch_paginated_count(self, endpoint: str, query_params: Dict, count_field: str = "count") -> int:
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query_params)}))
return response_json.get(count_field, 0)
# [/DEF:APIClient.fetch_paginated_count]
# [DEF:APIClient.fetch_paginated_data:Function]
# @PURPOSE: Автоматически собирает данные со всех страниц пагинированного эндпоинта.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: pagination_options (Dict[str, Any]) - Опции пагинации.
# @RETURN: List[Any] - Список данных.
def fetch_paginated_data(self, endpoint: str, pagination_options: Dict[str, Any]) -> List[Any]:
base_query, total_count = pagination_options["base_query"], pagination_options["total_count"]
results_field, page_size = pagination_options["results_field"], base_query.get('page_size')
assert page_size and page_size > 0, "'page_size' must be a positive number."
results = []
for page in range((total_count + page_size - 1) // page_size):
query = {**base_query, 'page': page}
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query)}))
results.extend(response_json.get(results_field, []))
return results
# [/DEF:APIClient.fetch_paginated_data]
# [/DEF:APIClient]
# [/DEF:superset_tool.utils.network]
# [DEF:backend.core.utils.network:Module]
#
# @SEMANTICS: network, http, client, api, requests, session, authentication
# @PURPOSE: Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
# @LAYER: Infra
# @RELATION: DEPENDS_ON -> backend.src.core.logger
# @RELATION: DEPENDS_ON -> requests
# @PUBLIC_API: APIClient
# [SECTION: IMPORTS]
from typing import Optional, Dict, Any, List, Union, cast
import json
import io
from pathlib import Path
import requests
from requests.adapters import HTTPAdapter
import urllib3
from urllib3.util.retry import Retry
from ..logger import logger as app_logger, belief_scope
# [/SECTION]
# [DEF:SupersetAPIError:Class]
class SupersetAPIError(Exception):
def __init__(self, message: str = "Superset API error", **context: Any):
self.context = context
super().__init__(f"[API_FAILURE] {message} | Context: {self.context}")
# [DEF:AuthenticationError:Class]
class AuthenticationError(SupersetAPIError):
def __init__(self, message: str = "Authentication failed", **context: Any):
super().__init__(message, type="authentication", **context)
# [DEF:PermissionDeniedError:Class]
class PermissionDeniedError(AuthenticationError):
def __init__(self, message: str = "Permission denied", **context: Any):
super().__init__(message, **context)
# [DEF:DashboardNotFoundError:Class]
class DashboardNotFoundError(SupersetAPIError):
def __init__(self, resource_id: Union[int, str], message: str = "Dashboard not found", **context: Any):
super().__init__(f"Dashboard '{resource_id}' {message}", subtype="not_found", resource_id=resource_id, **context)
# [DEF:NetworkError:Class]
class NetworkError(Exception):
def __init__(self, message: str = "Network connection failed", **context: Any):
self.context = context
super().__init__(f"[NETWORK_FAILURE] {message} | Context: {self.context}")
# [DEF:APIClient:Class]
# @PURPOSE: Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
class APIClient:
DEFAULT_TIMEOUT = 30
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует API клиент с конфигурацией, сессией и логгером.
# @PARAM: config (Dict[str, Any]) - Конфигурация.
# @PARAM: verify_ssl (bool) - Проверять ли SSL.
# @PARAM: timeout (int) - Таймаут запросов.
# @PRE: config must contain 'base_url' and 'auth'.
# @POST: APIClient instance is initialized with a session.
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT):
with belief_scope("__init__"):
app_logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
self.base_url: str = config.get("base_url", "")
self.auth = config.get("auth")
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
self.session = self._init_session()
self._tokens: Dict[str, str] = {}
self._authenticated = False
app_logger.info("[APIClient.__init__][Exit] APIClient initialized.")
# [/DEF:__init__:Function]
# [DEF:_init_session:Function]
# @PURPOSE: Создает и настраивает `requests.Session` с retry-логикой.
# @PRE: self.request_settings must be initialized.
# @POST: Returns a configured requests.Session instance.
# @RETURN: requests.Session - Настроенная сессия.
def _init_session(self) -> requests.Session:
with belief_scope("_init_session"):
session = requests.Session()
retries = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
adapter = HTTPAdapter(max_retries=retries)
session.mount('http://', adapter)
session.mount('https://', adapter)
if not self.request_settings["verify_ssl"]:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
app_logger.warning("[_init_session][State] SSL verification disabled.")
session.verify = self.request_settings["verify_ssl"]
return session
# [/DEF:_init_session:Function]
# [DEF:authenticate:Function]
# @PURPOSE: Выполняет аутентификацию в Superset API и получает access и CSRF токены.
# @PRE: self.auth and self.base_url must be valid.
# @POST: `self._tokens` заполнен, `self._authenticated` установлен в `True`.
# @RETURN: Dict[str, str] - Словарь с токенами.
# @THROW: AuthenticationError, NetworkError - при ошибках.
def authenticate(self) -> Dict[str, str]:
with belief_scope("authenticate"):
app_logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
try:
login_url = f"{self.base_url}/security/login"
response = self.session.post(login_url, json=self.auth, timeout=self.request_settings["timeout"])
response.raise_for_status()
access_token = response.json()["access_token"]
csrf_url = f"{self.base_url}/security/csrf_token/"
csrf_response = self.session.get(csrf_url, headers={"Authorization": f"Bearer {access_token}"}, timeout=self.request_settings["timeout"])
csrf_response.raise_for_status()
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
self._authenticated = True
app_logger.info("[authenticate][Exit] Authenticated successfully.")
return self._tokens
except requests.exceptions.HTTPError as e:
raise AuthenticationError(f"Authentication failed: {e}") from e
except (requests.exceptions.RequestException, KeyError) as e:
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
# [/DEF:authenticate:Function]
@property
# [DEF:headers:Function]
# @PURPOSE: Возвращает HTTP-заголовки для аутентифицированных запросов.
# @PRE: APIClient is initialized and authenticated or can be authenticated.
# @POST: Returns headers including auth tokens.
def headers(self) -> Dict[str, str]:
with belief_scope("headers"):
if not self._authenticated: self.authenticate()
return {
"Authorization": f"Bearer {self._tokens['access_token']}",
"X-CSRFToken": self._tokens.get("csrf_token", ""),
"Referer": self.base_url,
"Content-Type": "application/json"
}
# [/DEF:headers:Function]
# [DEF:request:Function]
# @PURPOSE: Выполняет универсальный HTTP-запрос к API.
# @PARAM: method (str) - HTTP метод.
# @PARAM: endpoint (str) - API эндпоинт.
# @PARAM: headers (Optional[Dict]) - Дополнительные заголовки.
# @PARAM: raw_response (bool) - Возвращать ли сырой ответ.
# @PRE: method and endpoint must be strings.
# @POST: Returns response content or raw Response object.
# @RETURN: `requests.Response` если `raw_response=True`, иначе `dict`.
# @THROW: SupersetAPIError, NetworkError и их подклассы.
def request(self, method: str, endpoint: str, headers: Optional[Dict] = None, raw_response: bool = False, **kwargs) -> Union[requests.Response, Dict[str, Any]]:
with belief_scope("request"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy()
if headers: _headers.update(headers)
try:
response = self.session.request(method, full_url, headers=_headers, **kwargs)
response.raise_for_status()
return response if raw_response else response.json()
except requests.exceptions.HTTPError as e:
self._handle_http_error(e, endpoint)
except requests.exceptions.RequestException as e:
self._handle_network_error(e, full_url)
# [/DEF:request:Function]
# [DEF:_handle_http_error:Function]
# @PURPOSE: (Helper) Преобразует HTTP ошибки в кастомные исключения.
# @PARAM: e (requests.exceptions.HTTPError) - Ошибка.
# @PARAM: endpoint (str) - Эндпоинт.
# @PRE: e must be a valid HTTPError with a response.
# @POST: Raises a specific SupersetAPIError or subclass.
def _handle_http_error(self, e: requests.exceptions.HTTPError, endpoint: str):
with belief_scope("_handle_http_error"):
status_code = e.response.status_code
if status_code == 404: raise DashboardNotFoundError(endpoint) from e
if status_code == 403: raise PermissionDeniedError() from e
if status_code == 401: raise AuthenticationError() from e
raise SupersetAPIError(f"API Error {status_code}: {e.response.text}") from e
# [/DEF:_handle_http_error:Function]
# [DEF:_handle_network_error:Function]
# @PURPOSE: (Helper) Преобразует сетевые ошибки в `NetworkError`.
# @PARAM: e (requests.exceptions.RequestException) - Ошибка.
# @PARAM: url (str) - URL.
# @PRE: e must be a RequestException.
# @POST: Raises a NetworkError.
def _handle_network_error(self, e: requests.exceptions.RequestException, url: str):
with belief_scope("_handle_network_error"):
if isinstance(e, requests.exceptions.Timeout): msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:_handle_network_error:Function]
# [DEF:upload_file:Function]
# @PURPOSE: Загружает файл на сервер через multipart/form-data.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: file_info (Dict[str, Any]) - Информация о файле.
# @PARAM: extra_data (Optional[Dict]) - Дополнительные данные.
# @PARAM: timeout (Optional[int]) - Таймаут.
# @PRE: file_info must contain 'file_obj' and 'file_name'.
# @POST: File is uploaded and response returned.
# @RETURN: Ответ API в виде словаря.
# @THROW: SupersetAPIError, NetworkError, TypeError.
def upload_file(self, endpoint: str, file_info: Dict[str, Any], extra_data: Optional[Dict] = None, timeout: Optional[int] = None) -> Dict:
with belief_scope("upload_file"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy(); _headers.pop('Content-Type', None)
file_obj, file_name, form_field = file_info.get("file_obj"), file_info.get("file_name"), file_info.get("form_field", "file")
files_payload = {}
if isinstance(file_obj, (str, Path)):
with open(file_obj, 'rb') as f:
files_payload = {form_field: (file_name, f.read(), 'application/x-zip-compressed')}
elif isinstance(file_obj, io.BytesIO):
files_payload = {form_field: (file_name, file_obj.getvalue(), 'application/x-zip-compressed')}
else:
raise TypeError(f"Unsupported file_obj type: {type(file_obj)}")
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
# [/DEF:upload_file:Function]
# [DEF:_perform_upload:Function]
# @PURPOSE: (Helper) Выполняет POST запрос с файлом.
# @PARAM: url (str) - URL.
# @PARAM: files (Dict) - Файлы.
# @PARAM: data (Optional[Dict]) - Данные.
# @PARAM: headers (Dict) - Заголовки.
# @PARAM: timeout (Optional[int]) - Таймаут.
# @PRE: url, files, and headers must be provided.
# @POST: POST request is performed and JSON response returned.
# @RETURN: Dict - Ответ.
def _perform_upload(self, url: str, files: Dict, data: Optional[Dict], headers: Dict, timeout: Optional[int]) -> Dict:
with belief_scope("_perform_upload"):
try:
response = self.session.post(url, files=files, data=data or {}, headers=headers, timeout=timeout or self.request_settings["timeout"])
response.raise_for_status()
if response.status_code == 200:
try:
return response.json()
except Exception as json_e:
app_logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
raise SupersetAPIError(f"API error during upload: Response is not valid JSON: {json_e}") from json_e
return response.json()
except requests.exceptions.HTTPError as e:
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
except requests.exceptions.RequestException as e:
raise NetworkError(f"Network error during upload: {e}", url=url) from e
# [/DEF:_perform_upload:Function]
# [DEF:fetch_paginated_count:Function]
# @PURPOSE: Получает общее количество элементов для пагинации.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: query_params (Dict) - Параметры запроса.
# @PARAM: count_field (str) - Поле с количеством.
# @PRE: query_params must be a dictionary.
# @POST: Returns total count of items.
# @RETURN: int - Количество.
def fetch_paginated_count(self, endpoint: str, query_params: Dict, count_field: str = "count") -> int:
with belief_scope("fetch_paginated_count"):
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query_params)}))
return response_json.get(count_field, 0)
# [/DEF:fetch_paginated_count:Function]
# [DEF:fetch_paginated_data:Function]
# @PURPOSE: Автоматически собирает данные со всех страниц пагинированного эндпоинта.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: pagination_options (Dict[str, Any]) - Опции пагинации.
# @PRE: pagination_options must contain 'base_query', 'total_count', 'results_field'.
# @POST: Returns all items across all pages.
# @RETURN: List[Any] - Список данных.
def fetch_paginated_data(self, endpoint: str, pagination_options: Dict[str, Any]) -> List[Any]:
with belief_scope("fetch_paginated_data"):
base_query, total_count = pagination_options["base_query"], pagination_options["total_count"]
results_field, page_size = pagination_options["results_field"], base_query.get('page_size')
assert page_size and page_size > 0, "'page_size' must be a positive number."
results = []
for page in range((total_count + page_size - 1) // page_size):
query = {**base_query, 'page': page}
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query)}))
results.extend(response_json.get(results_field, []))
return results
# [/DEF:fetch_paginated_data:Function]
# [/DEF:APIClient:Class]
# [/DEF:backend.core.utils.network:Module]

View File

@@ -8,6 +8,9 @@ from pathlib import Path
from .core.plugin_loader import PluginLoader
from .core.task_manager import TaskManager
from .core.config_manager import ConfigManager
from .core.scheduler import SchedulerService
from .core.database import init_db
from .core.logger import logger, belief_scope
# Initialize singletons
# Use absolute path relative to this file to ensure plugins are found regardless of CWD
@@ -15,19 +18,63 @@ project_root = Path(__file__).parent.parent.parent
config_path = project_root / "config.json"
config_manager = ConfigManager(config_path=str(config_path))
# Initialize database before any other services that might use it
init_db()
# [DEF:get_config_manager:Function]
# @PURPOSE: Dependency injector for the ConfigManager.
# @PRE: Global config_manager must be initialized.
# @POST: Returns shared ConfigManager instance.
# @RETURN: ConfigManager - The shared config manager instance.
def get_config_manager() -> ConfigManager:
"""Dependency injector for the ConfigManager."""
return config_manager
with belief_scope("get_config_manager"):
return config_manager
# [/DEF:get_config_manager:Function]
plugin_dir = Path(__file__).parent / "plugins"
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
task_manager = TaskManager(plugin_loader)
plugin_loader = PluginLoader(plugin_dir=str(plugin_dir))
logger.info(f"PluginLoader initialized with directory: {plugin_dir}")
logger.info(f"Available plugins: {[config.name for config in plugin_loader.get_all_plugin_configs()]}")
task_manager = TaskManager(plugin_loader)
logger.info("TaskManager initialized")
scheduler_service = SchedulerService(task_manager, config_manager)
logger.info("SchedulerService initialized")
# [DEF:get_plugin_loader:Function]
# @PURPOSE: Dependency injector for the PluginLoader.
# @PRE: Global plugin_loader must be initialized.
# @POST: Returns shared PluginLoader instance.
# @RETURN: PluginLoader - The shared plugin loader instance.
def get_plugin_loader() -> PluginLoader:
"""Dependency injector for the PluginLoader."""
return plugin_loader
with belief_scope("get_plugin_loader"):
return plugin_loader
# [/DEF:get_plugin_loader:Function]
# [DEF:get_task_manager:Function]
# @PURPOSE: Dependency injector for the TaskManager.
# @PRE: Global task_manager must be initialized.
# @POST: Returns shared TaskManager instance.
# @RETURN: TaskManager - The shared task manager instance.
def get_task_manager() -> TaskManager:
"""Dependency injector for the TaskManager."""
return task_manager
# [/DEF]
with belief_scope("get_task_manager"):
return task_manager
# [/DEF:get_task_manager:Function]
# [DEF:get_scheduler_service:Function]
# @PURPOSE: Dependency injector for the SchedulerService.
# @PRE: Global scheduler_service must be initialized.
# @POST: Returns shared SchedulerService instance.
# @RETURN: SchedulerService - The shared scheduler service instance.
def get_scheduler_service() -> SchedulerService:
"""Dependency injector for the SchedulerService."""
with belief_scope("get_scheduler_service"):
return scheduler_service
# [/DEF:get_scheduler_service:Function]
# [/DEF:Dependencies:Module]

View File

@@ -0,0 +1,34 @@
# [DEF:backend.src.models.connection:Module]
#
# @SEMANTICS: database, connection, configuration, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for external database connection configurations.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> sqlalchemy
#
# @INVARIANT: All primary keys are UUID strings.
# [SECTION: IMPORTS]
from sqlalchemy import Column, String, Integer, DateTime
from sqlalchemy.sql import func
from .mapping import Base
import uuid
# [/SECTION]
# [DEF:ConnectionConfig:Class]
# @PURPOSE: Stores credentials for external databases used for column mapping.
class ConnectionConfig(Base):
__tablename__ = "connection_configs"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
name = Column(String, nullable=False)
type = Column(String, nullable=False) # e.g., "postgres"
host = Column(String, nullable=True)
port = Column(Integer, nullable=True)
database = Column(String, nullable=True)
username = Column(String, nullable=True)
password = Column(String, nullable=True) # Encrypted/Obfuscated password
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
# [/DEF:ConnectionConfig:Class]
# [/DEF:backend.src.models.connection:Module]

View File

@@ -0,0 +1,28 @@
# [DEF:backend.src.models.dashboard:Module]
# @SEMANTICS: dashboard, model, metadata, migration
# @PURPOSE: Defines data models for dashboard metadata and selection.
# @LAYER: Model
# @RELATION: USED_BY -> backend.src.api.routes.migration
from pydantic import BaseModel
from typing import List
# [DEF:DashboardMetadata:Class]
# @PURPOSE: Represents a dashboard available for migration.
class DashboardMetadata(BaseModel):
id: int
title: str
last_modified: str
status: str
# [/DEF:DashboardMetadata:Class]
# [DEF:DashboardSelection:Class]
# @PURPOSE: Represents the user's selection of dashboards to migrate.
class DashboardSelection(BaseModel):
selected_ids: List[int]
source_env_id: str
target_env_id: str
replace_db_config: bool = False
# [/DEF:DashboardSelection:Class]
# [/DEF:backend.src.models.dashboard:Module]

73
backend/src/models/git.py Normal file
View File

@@ -0,0 +1,73 @@
"""
[DEF:GitModels:Module]
Git-specific SQLAlchemy models for configuration and repository tracking.
@RELATION: specs/011-git-integration-dashboard/data-model.md
"""
import enum
from datetime import datetime
from sqlalchemy import Column, String, Integer, DateTime, Enum, ForeignKey, Boolean
from sqlalchemy.dialects.postgresql import UUID
import uuid
from src.core.database import Base
class GitProvider(str, enum.Enum):
GITHUB = "GITHUB"
GITLAB = "GITLAB"
GITEA = "GITEA"
class GitStatus(str, enum.Enum):
CONNECTED = "CONNECTED"
FAILED = "FAILED"
UNKNOWN = "UNKNOWN"
class SyncStatus(str, enum.Enum):
CLEAN = "CLEAN"
DIRTY = "DIRTY"
CONFLICT = "CONFLICT"
class GitServerConfig(Base):
"""
[DEF:GitServerConfig:Class]
Configuration for a Git server connection.
"""
__tablename__ = "git_server_configs"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
provider = Column(Enum(GitProvider), nullable=False)
url = Column(String(255), nullable=False)
pat = Column(String(255), nullable=False) # PERSONAL ACCESS TOKEN
default_repository = Column(String(255), nullable=True)
status = Column(Enum(GitStatus), default=GitStatus.UNKNOWN)
last_validated = Column(DateTime, default=datetime.utcnow)
class GitRepository(Base):
"""
[DEF:GitRepository:Class]
Tracking for a local Git repository linked to a dashboard.
"""
__tablename__ = "git_repositories"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
dashboard_id = Column(Integer, nullable=False, unique=True)
config_id = Column(String(36), ForeignKey("git_server_configs.id"), nullable=False)
remote_url = Column(String(255), nullable=False)
local_path = Column(String(255), nullable=False)
current_branch = Column(String(255), default="main")
sync_status = Column(Enum(SyncStatus), default=SyncStatus.CLEAN)
class DeploymentEnvironment(Base):
"""
[DEF:DeploymentEnvironment:Class]
Target Superset environments for dashboard deployment.
"""
__tablename__ = "deployment_environments"
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
superset_url = Column(String(255), nullable=False)
superset_token = Column(String(255), nullable=False)
is_active = Column(Boolean, default=True)
# [/DEF:GitModels:Module]

View File

@@ -0,0 +1,70 @@
# [DEF:backend.src.models.mapping:Module]
#
# @SEMANTICS: database, mapping, environment, migration, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for environment metadata and database mappings using SQLAlchemy.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> sqlalchemy
#
# @INVARIANT: All primary keys are UUID strings.
# @CONSTRAINT: source_env_id and target_env_id must be valid environment IDs.
# [SECTION: IMPORTS]
from sqlalchemy import Column, String, Boolean, DateTime, ForeignKey, Enum as SQLEnum
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import func
import uuid
import enum
# [/SECTION]
Base = declarative_base()
# [DEF:MigrationStatus:Class]
# @PURPOSE: Enumeration of possible migration job statuses.
class MigrationStatus(enum.Enum):
PENDING = "PENDING"
RUNNING = "RUNNING"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
AWAITING_MAPPING = "AWAITING_MAPPING"
# [/DEF:MigrationStatus:Class]
# [DEF:Environment:Class]
# @PURPOSE: Represents a Superset instance environment.
class Environment(Base):
__tablename__ = "environments"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
name = Column(String, nullable=False)
url = Column(String, nullable=False)
credentials_id = Column(String, nullable=False)
# [/DEF:Environment:Class]
# [DEF:DatabaseMapping:Class]
# @PURPOSE: Represents a mapping between source and target databases.
class DatabaseMapping(Base):
__tablename__ = "database_mappings"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
source_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
target_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
source_db_uuid = Column(String, nullable=False)
target_db_uuid = Column(String, nullable=False)
source_db_name = Column(String, nullable=False)
target_db_name = Column(String, nullable=False)
engine = Column(String, nullable=True)
# [/DEF:DatabaseMapping:Class]
# [DEF:MigrationJob:Class]
# @PURPOSE: Represents a single migration execution job.
class MigrationJob(Base):
__tablename__ = "migration_jobs"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
source_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
target_env_id = Column(String, ForeignKey("environments.id"), nullable=False)
status = Column(SQLEnum(MigrationStatus), default=MigrationStatus.PENDING)
replace_db = Column(Boolean, default=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
# [/DEF:MigrationJob:Class]
# [/DEF:backend.src.models.mapping:Module]

View File

@@ -0,0 +1,35 @@
# [DEF:backend.src.models.task:Module]
#
# @SEMANTICS: database, task, record, sqlalchemy, sqlite
# @PURPOSE: Defines the database schema for task execution records.
# @LAYER: Domain
# @RELATION: DEPENDS_ON -> sqlalchemy
#
# @INVARIANT: All primary keys are UUID strings.
# [SECTION: IMPORTS]
from sqlalchemy import Column, String, DateTime, JSON, ForeignKey
from sqlalchemy.sql import func
from .mapping import Base
import uuid
# [/SECTION]
# [DEF:TaskRecord:Class]
# @PURPOSE: Represents a persistent record of a task execution.
class TaskRecord(Base):
__tablename__ = "task_records"
id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
type = Column(String, nullable=False) # e.g., "backup", "migration"
status = Column(String, nullable=False) # Enum: "PENDING", "RUNNING", "SUCCESS", "FAILED"
environment_id = Column(String, ForeignKey("environments.id"), nullable=True)
started_at = Column(DateTime(timezone=True), nullable=True)
finished_at = Column(DateTime(timezone=True), nullable=True)
logs = Column(JSON, nullable=True) # Store structured logs as JSON
error = Column(String, nullable=True)
result = Column(JSON, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
params = Column(JSON, nullable=True)
# [/DEF:TaskRecord:Class]
# [/DEF:backend.src.models.task:Module]

View File

@@ -11,10 +11,10 @@ from pathlib import Path
from requests.exceptions import RequestException
from ..core.plugin_base import PluginBase
from superset_tool.client import SupersetClient
from superset_tool.exceptions import SupersetAPIError
from superset_tool.utils.logger import SupersetLogger
from superset_tool.utils.fileio import (
from ..core.logger import belief_scope
from ..core.superset_client import SupersetClient
from ..core.utils.network import SupersetAPIError
from ..core.utils.fileio import (
save_and_unpack_dashboard,
archive_exports,
sanitize_filename,
@@ -22,33 +22,68 @@ from superset_tool.utils.fileio import (
remove_empty_directories,
RetentionPolicy
)
from superset_tool.utils.init_clients import setup_clients
from ..dependencies import get_config_manager
# [DEF:BackupPlugin:Class]
# @PURPOSE: Implementation of the backup plugin logic.
class BackupPlugin(PluginBase):
"""
A plugin to back up Superset dashboards.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the backup plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string ID.
# @RETURN: str - "superset-backup"
def id(self) -> str:
return "superset-backup"
with belief_scope("id"):
return "superset-backup"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the backup plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string name.
# @RETURN: str - Plugin name.
def name(self) -> str:
return "Superset Dashboard Backup"
with belief_scope("name"):
return "Superset Dashboard Backup"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns a description of the backup plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string description.
# @RETURN: str - Plugin description.
def description(self) -> str:
return "Backs up all dashboards from a Superset instance."
with belief_scope("description"):
return "Backs up all dashboards from a Superset instance."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the version of the backup plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string version.
# @RETURN: str - "1.0.0"
def version(self) -> str:
return "1.0.0"
with belief_scope("version"):
return "1.0.0"
# [/DEF:version:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for backup plugin parameters.
# @PRE: Plugin instance exists.
# @POST: Returns dictionary schema.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
with belief_scope("get_schema"):
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
default_path = config_manager.get_config().settings.backup_path
return {
@@ -69,65 +104,86 @@ class BackupPlugin(PluginBase):
},
"required": ["env", "backup_path"],
}
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Executes the dashboard backup logic.
# @PARAM: params (Dict[str, Any]) - Backup parameters (env, backup_path).
# @PRE: Target environment must be configured. params must be a dictionary.
# @POST: All dashboards are exported and archived.
async def execute(self, params: Dict[str, Any]):
env = params["env"]
backup_path = Path(params["backup_path"])
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
try:
with belief_scope("execute"):
config_manager = get_config_manager()
if not config_manager.has_environments():
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
env_id = params.get("environment_id")
# Resolve environment name if environment_id is provided
if env_id:
env_config = next((e for e in config_manager.get_environments() if e.id == env_id), None)
if env_config:
params["env"] = env_config.name
env = params.get("env")
if not env:
raise KeyError("env")
backup_path_str = params.get("backup_path") or config_manager.get_config().settings.backup_path
backup_path = Path(backup_path_str)
from ..core.logger import logger as app_logger
app_logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
try:
config_manager = get_config_manager()
if not config_manager.has_environments():
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
env_config = config_manager.get_environment(env)
if not env_config:
raise ValueError(f"Environment '{env}' not found in configuration.")
clients = setup_clients(logger, custom_envs=config_manager.get_environments())
client = clients.get(env)
if not client:
raise ValueError(f"Environment '{env}' not found in configuration.")
dashboard_count, dashboard_meta = client.get_dashboards()
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
client = SupersetClient(env_config)
dashboard_count, dashboard_meta = client.get_dashboards()
app_logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
if dashboard_count == 0:
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
return
if dashboard_count == 0:
app_logger.info("[BackupPlugin][Exit] No dashboards to back up.")
return
for db in dashboard_meta:
dashboard_id = db.get('id')
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
if not dashboard_id:
continue
for db in dashboard_meta:
dashboard_id = db.get('id')
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
if not dashboard_id:
continue
try:
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
dashboard_dir.mkdir(parents=True, exist_ok=True)
try:
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
dashboard_dir.mkdir(parents=True, exist_ok=True)
zip_content, filename = client.export_dashboard(dashboard_id)
zip_content, filename = client.export_dashboard(dashboard_id)
save_and_unpack_dashboard(
zip_content=zip_content,
original_filename=filename,
output_dir=dashboard_dir,
unpack=False,
logger=logger
)
save_and_unpack_dashboard(
zip_content=zip_content,
original_filename=filename,
output_dir=dashboard_dir,
unpack=False
)
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
archive_exports(str(dashboard_dir), policy=RetentionPolicy())
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
continue
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
app_logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
continue
consolidate_archive_folders(backup_path / env.upper())
remove_empty_directories(str(backup_path / env.upper()))
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
app_logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
except (RequestException, IOError, KeyError) as e:
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
raise e
# [/DEF:BackupPlugin]
except (RequestException, IOError, KeyError) as e:
app_logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
raise e
# [/DEF:execute:Function]
# [/DEF:BackupPlugin:Class]
# [/DEF:BackupPlugin:Module]

View File

@@ -0,0 +1,187 @@
# [DEF:DebugPluginModule:Module]
# @SEMANTICS: plugin, debug, api, database, superset
# @PURPOSE: Implements a plugin for system diagnostics and debugging Superset API responses.
# @LAYER: Plugins
# @RELATION: Inherits from PluginBase. Uses SupersetClient from core.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
from typing import Dict, Any, Optional
from ..core.plugin_base import PluginBase
from ..core.superset_client import SupersetClient
from ..core.logger import logger, belief_scope
# [/SECTION]
# [DEF:DebugPlugin:Class]
# @PURPOSE: Plugin for system diagnostics and debugging.
class DebugPlugin(PluginBase):
"""
Plugin for system diagnostics and debugging.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the debug plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string ID.
# @RETURN: str - "system-debug"
def id(self) -> str:
with belief_scope("id"):
return "system-debug"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the debug plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string name.
# @RETURN: str - Plugin name.
def name(self) -> str:
with belief_scope("name"):
return "System Debug"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns a description of the debug plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string description.
# @RETURN: str - Plugin description.
def description(self) -> str:
with belief_scope("description"):
return "Run system diagnostics and debug Superset API responses."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the version of the debug plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string version.
# @RETURN: str - "1.0.0"
def version(self) -> str:
with belief_scope("version"):
return "1.0.0"
# [/DEF:version:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for the debug plugin parameters.
# @PRE: Plugin instance exists.
# @POST: Returns dictionary schema.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
with belief_scope("get_schema"):
return {
"type": "object",
"properties": {
"action": {
"type": "string",
"title": "Action",
"enum": ["test-db-api", "get-dataset-structure"],
"default": "test-db-api"
},
"env": {
"type": "string",
"title": "Environment",
"description": "The Superset environment (for dataset structure)."
},
"dataset_id": {
"type": "integer",
"title": "Dataset ID",
"description": "The ID of the dataset (for dataset structure)."
},
"source_env": {
"type": "string",
"title": "Source Environment",
"description": "Source env for DB API test."
},
"target_env": {
"type": "string",
"title": "Target Environment",
"description": "Target env for DB API test."
}
},
"required": ["action"]
}
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Executes the debug logic.
# @PARAM: params (Dict[str, Any]) - Debug parameters.
# @PRE: action must be provided in params.
# @POST: Debug action is executed and results returned.
# @RETURN: Dict[str, Any] - Execution results.
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("execute"):
action = params.get("action")
if action == "test-db-api":
return await self._test_db_api(params)
elif action == "get-dataset-structure":
return await self._get_dataset_structure(params)
else:
raise ValueError(f"Unknown action: {action}")
# [/DEF:execute:Function]
# [DEF:_test_db_api:Function]
# @PURPOSE: Tests database API connectivity for source and target environments.
# @PRE: source_env and target_env params exist in params.
# @POST: Returns DB counts for both envs.
# @PARAM: params (Dict) - Plugin parameters.
# @RETURN: Dict - Comparison results.
async def _test_db_api(self, params: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("_test_db_api"):
source_env_name = params.get("source_env")
target_env_name = params.get("target_env")
if not source_env_name or not target_env_name:
raise ValueError("source_env and target_env are required for test-db-api")
from ..dependencies import get_config_manager
config_manager = get_config_manager()
results = {}
for name in [source_env_name, target_env_name]:
env_config = config_manager.get_environment(name)
if not env_config:
raise ValueError(f"Environment '{name}' not found.")
client = SupersetClient(env_config)
client.authenticate()
count, dbs = client.get_databases()
results[name] = {
"count": count,
"databases": dbs
}
return results
# [/DEF:_test_db_api:Function]
# [DEF:_get_dataset_structure:Function]
# @PURPOSE: Retrieves the structure of a dataset.
# @PRE: env and dataset_id params exist in params.
# @POST: Returns dataset JSON structure.
# @PARAM: params (Dict) - Plugin parameters.
# @RETURN: Dict - Dataset structure.
async def _get_dataset_structure(self, params: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("_get_dataset_structure"):
env_name = params.get("env")
dataset_id = params.get("dataset_id")
if not env_name or dataset_id is None:
raise ValueError("env and dataset_id are required for get-dataset-structure")
from ..dependencies import get_config_manager
config_manager = get_config_manager()
env_config = config_manager.get_environment(env_name)
if not env_config:
raise ValueError(f"Environment '{env_name}' not found.")
client = SupersetClient(env_config)
client.authenticate()
dataset_response = client.get_dataset(dataset_id)
return dataset_response.get('result') or {}
# [/DEF:_get_dataset_structure:Function]
# [/DEF:DebugPlugin:Class]
# [/DEF:DebugPluginModule:Module]

View File

@@ -0,0 +1,345 @@
# [DEF:backend.src.plugins.git_plugin:Module]
#
# @SEMANTICS: git, plugin, dashboard, version_control, sync, deploy
# @PURPOSE: Предоставляет плагин для версионирования и развертывания дашбордов Superset.
# @LAYER: Plugin
# @RELATION: INHERITS_FROM -> src.core.plugin_base.PluginBase
# @RELATION: USES -> src.services.git_service.GitService
# @RELATION: USES -> src.core.superset_client.SupersetClient
# @RELATION: USES -> src.core.config_manager.ConfigManager
#
# @INVARIANT: Все операции с Git должны выполняться через GitService.
# @CONSTRAINT: Плагин работает только с распакованными YAML-экспортами Superset.
# [SECTION: IMPORTS]
import os
import io
import shutil
import zipfile
from pathlib import Path
from typing import Dict, Any, Optional
from src.core.plugin_base import PluginBase
from src.services.git_service import GitService
from src.core.logger import logger, belief_scope
from src.core.config_manager import ConfigManager
from src.core.superset_client import SupersetClient
# [/SECTION]
# [DEF:GitPlugin:Class]
# @PURPOSE: Реализация плагина Git Integration для управления версиями дашбордов.
class GitPlugin(PluginBase):
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует плагин и его зависимости.
# @POST: Инициализированы git_service и config_manager.
def __init__(self):
with belief_scope("GitPlugin.__init__"):
logger.info("[GitPlugin.__init__][Entry] Initializing GitPlugin.")
self.git_service = GitService()
# Robust config path resolution:
# 1. Try absolute path from src/dependencies.py style if possible
# 2. Try relative paths based on common execution patterns
if os.path.exists("../config.json"):
config_path = "../config.json"
elif os.path.exists("config.json"):
config_path = "config.json"
else:
# Fallback to the one initialized in dependencies if we can import it
try:
from src.dependencies import config_manager
self.config_manager = config_manager
logger.info("[GitPlugin.__init__][Exit] GitPlugin initialized using shared config_manager.")
return
except:
config_path = "config.json"
self.config_manager = ConfigManager(config_path)
logger.info(f"[GitPlugin.__init__][Exit] GitPlugin initialized with {config_path}")
# [/DEF:__init__:Function]
@property
def id(self) -> str:
return "git-integration"
@property
def name(self) -> str:
return "Git Integration"
@property
def description(self) -> str:
return "Version control for Superset dashboards"
@property
def version(self) -> str:
return "0.1.0"
# [DEF:get_schema:Function]
# @PURPOSE: Возвращает JSON-схему параметров для выполнения задач плагина.
# @RETURN: Dict[str, Any] - Схема параметров.
def get_schema(self) -> Dict[str, Any]:
with belief_scope("GitPlugin.get_schema"):
return {
"type": "object",
"properties": {
"operation": {"type": "string", "enum": ["sync", "deploy", "history"]},
"dashboard_id": {"type": "integer"},
"environment_id": {"type": "string"},
"source_env_id": {"type": "string"}
},
"required": ["operation", "dashboard_id"]
}
# [/DEF:get_schema:Function]
# [DEF:initialize:Function]
# @PURPOSE: Выполняет начальную настройку плагина.
# @POST: Плагин готов к выполнению задач.
async def initialize(self):
with belief_scope("GitPlugin.initialize"):
logger.info("[GitPlugin.initialize][Action] Initializing Git Integration Plugin logic.")
# [DEF:execute:Function]
# @PURPOSE: Основной метод выполнения задач плагина.
# @PRE: task_data содержит 'operation' и 'dashboard_id'.
# @POST: Возвращает результат выполнения операции.
# @PARAM: task_data (Dict[str, Any]) - Данные задачи.
# @RETURN: Dict[str, Any] - Статус и сообщение.
# @RELATION: CALLS -> self._handle_sync
# @RELATION: CALLS -> self._handle_deploy
async def execute(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("GitPlugin.execute"):
operation = task_data.get("operation")
dashboard_id = task_data.get("dashboard_id")
logger.info(f"[GitPlugin.execute][Entry] Executing operation: {operation} for dashboard {dashboard_id}")
if operation == "sync":
source_env_id = task_data.get("source_env_id")
result = await self._handle_sync(dashboard_id, source_env_id)
elif operation == "deploy":
env_id = task_data.get("environment_id")
result = await self._handle_deploy(dashboard_id, env_id)
elif operation == "history":
result = {"status": "success", "message": "History available via API"}
else:
logger.error(f"[GitPlugin.execute][Coherence:Failed] Unknown operation: {operation}")
raise ValueError(f"Unknown operation: {operation}")
logger.info(f"[GitPlugin.execute][Exit] Operation {operation} completed.")
return result
# [/DEF:execute:Function]
# [DEF:_handle_sync:Function]
# @PURPOSE: Экспортирует дашборд из Superset и распаковывает в Git-репозиторий.
# @PRE: Репозиторий для дашборда должен существовать.
# @POST: Файлы в репозитории обновлены до текущего состояния в Superset.
# @PARAM: dashboard_id (int) - ID дашборда.
# @PARAM: source_env_id (Optional[str]) - ID исходного окружения.
# @RETURN: Dict[str, str] - Результат синхронизации.
# @SIDE_EFFECT: Изменяет файлы в локальной рабочей директории репозитория.
# @RELATION: CALLS -> src.services.git_service.GitService.get_repo
# @RELATION: CALLS -> src.core.superset_client.SupersetClient.export_dashboard
async def _handle_sync(self, dashboard_id: int, source_env_id: Optional[str] = None) -> Dict[str, str]:
with belief_scope("GitPlugin._handle_sync"):
try:
# 1. Получение репозитория
repo = self.git_service.get_repo(dashboard_id)
repo_path = Path(repo.working_dir)
logger.info(f"[_handle_sync][Action] Target repo path: {repo_path}")
# 2. Настройка клиента Superset
env = self._get_env(source_env_id)
client = SupersetClient(env)
client.authenticate()
# 3. Экспорт дашборда
logger.info(f"[_handle_sync][Action] Exporting dashboard {dashboard_id} from {env.name}")
zip_bytes, _ = client.export_dashboard(dashboard_id)
# 4. Распаковка с выравниванием структуры (flattening)
logger.info(f"[_handle_sync][Action] Unpacking export to {repo_path}")
# Список папок/файлов, которые мы ожидаем от Superset
managed_dirs = ["dashboards", "charts", "datasets", "databases"]
managed_files = ["metadata.yaml"]
# Очистка старых данных перед распаковкой, чтобы не оставалось "призраков"
for d in managed_dirs:
d_path = repo_path / d
if d_path.exists() and d_path.is_dir():
shutil.rmtree(d_path)
for f in managed_files:
f_path = repo_path / f
if f_path.exists():
f_path.unlink()
with zipfile.ZipFile(io.BytesIO(zip_bytes)) as zf:
# Superset экспортирует всё в подпапку dashboard_export_timestamp/
# Нам нужно найти это имя папки
namelist = zf.namelist()
if not namelist:
raise ValueError("Export ZIP is empty")
root_folder = namelist[0].split('/')[0]
logger.info(f"[_handle_sync][Action] Detected root folder in ZIP: {root_folder}")
for member in zf.infolist():
if member.filename.startswith(root_folder + "/") and len(member.filename) > len(root_folder) + 1:
# Убираем префикс папки
relative_path = member.filename[len(root_folder)+1:]
target_path = repo_path / relative_path
if member.is_dir():
target_path.mkdir(parents=True, exist_ok=True)
else:
target_path.parent.mkdir(parents=True, exist_ok=True)
with zf.open(member) as source, open(target_path, "wb") as target:
shutil.copyfileobj(source, target)
# 5. Автоматический staging изменений (не коммит, чтобы юзер мог проверить diff)
try:
repo.git.add(A=True)
logger.info(f"[_handle_sync][Action] Changes staged in git")
except Exception as ge:
logger.warning(f"[_handle_sync][Action] Failed to stage changes: {ge}")
logger.info(f"[_handle_sync][Coherence:OK] Dashboard {dashboard_id} synced successfully.")
return {"status": "success", "message": "Dashboard synced and flattened in local repository"}
except Exception as e:
logger.error(f"[_handle_sync][Coherence:Failed] Sync failed: {e}")
raise
# [/DEF:_handle_sync:Function]
# [DEF:_handle_deploy:Function]
# @PURPOSE: Упаковывает репозиторий в ZIP и импортирует в целевое окружение Superset.
# @PRE: environment_id должен соответствовать настроенному окружению.
# @POST: Дашборд импортирован в целевой Superset.
# @PARAM: dashboard_id (int) - ID дашборда.
# @PARAM: env_id (str) - ID целевого окружения.
# @RETURN: Dict[str, Any] - Результат деплоя.
# @SIDE_EFFECT: Создает и удаляет временный ZIP-файл.
# @RELATION: CALLS -> src.core.superset_client.SupersetClient.import_dashboard
async def _handle_deploy(self, dashboard_id: int, env_id: str) -> Dict[str, Any]:
with belief_scope("GitPlugin._handle_deploy"):
try:
if not env_id:
raise ValueError("Target environment ID required for deployment")
# 1. Получение репозитория
repo = self.git_service.get_repo(dashboard_id)
repo_path = Path(repo.working_dir)
# 2. Упаковка в ZIP
logger.info(f"[_handle_deploy][Action] Packing repository {repo_path} for deployment.")
zip_buffer = io.BytesIO()
# Superset expects a root directory in the ZIP (e.g., dashboard_export_20240101T000000/)
root_dir_name = f"dashboard_export_{dashboard_id}"
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zf:
for root, dirs, files in os.walk(repo_path):
if ".git" in dirs:
dirs.remove(".git")
for file in files:
if file == ".git" or file.endswith(".zip"): continue
file_path = Path(root) / file
# Prepend the root directory name to the archive path
arcname = Path(root_dir_name) / file_path.relative_to(repo_path)
zf.write(file_path, arcname)
zip_buffer.seek(0)
# 3. Настройка клиента Superset
env = self.config_manager.get_environment(env_id)
if not env:
raise ValueError(f"Environment {env_id} not found")
client = SupersetClient(env)
client.authenticate()
# 4. Импорт
temp_zip_path = repo_path / f"deploy_{dashboard_id}.zip"
logger.info(f"[_handle_deploy][Action] Saving temporary zip to {temp_zip_path}")
with open(temp_zip_path, "wb") as f:
f.write(zip_buffer.getvalue())
try:
logger.info(f"[_handle_deploy][Action] Importing dashboard to {env.name}")
result = client.import_dashboard(temp_zip_path)
logger.info(f"[_handle_deploy][Coherence:OK] Deployment successful for dashboard {dashboard_id}.")
return {"status": "success", "message": f"Dashboard deployed to {env.name}", "details": result}
finally:
if temp_zip_path.exists():
os.remove(temp_zip_path)
except Exception as e:
logger.error(f"[_handle_deploy][Coherence:Failed] Deployment failed: {e}")
raise
# [/DEF:_handle_deploy:Function]
# [DEF:_get_env:Function]
# @PURPOSE: Вспомогательный метод для получения конфигурации окружения.
# @PARAM: env_id (Optional[str]) - ID окружения.
# @RETURN: Environment - Объект конфигурации окружения.
def _get_env(self, env_id: Optional[str] = None):
with belief_scope("GitPlugin._get_env"):
logger.info(f"[_get_env][Entry] Fetching environment for ID: {env_id}")
# Priority 1: ConfigManager (config.json)
if env_id:
env = self.config_manager.get_environment(env_id)
if env:
logger.info(f"[_get_env][Exit] Found environment by ID in ConfigManager: {env.name}")
return env
# Priority 2: Database (DeploymentEnvironment)
from src.core.database import SessionLocal
from src.models.git import DeploymentEnvironment
db = SessionLocal()
try:
if env_id:
db_env = db.query(DeploymentEnvironment).filter(DeploymentEnvironment.id == env_id).first()
else:
# If no ID, try to find active or any environment in DB
db_env = db.query(DeploymentEnvironment).filter(DeploymentEnvironment.is_active == True).first()
if not db_env:
db_env = db.query(DeploymentEnvironment).first()
if db_env:
logger.info(f"[_get_env][Exit] Found environment in DB: {db_env.name}")
from src.core.config_models import Environment
# Use token as password for SupersetClient
return Environment(
id=db_env.id,
name=db_env.name,
url=db_env.superset_url,
username="admin",
password=db_env.superset_token,
verify_ssl=True
)
finally:
db.close()
# Priority 3: ConfigManager Default (if no env_id provided)
envs = self.config_manager.get_environments()
if envs:
if env_id:
# If env_id was provided but not found in DB or specifically by ID in config,
# but we have other envs, maybe it's one of them?
env = next((e for e in envs if e.id == env_id), None)
if env:
logger.info(f"[_get_env][Exit] Found environment {env_id} in ConfigManager list")
return env
if not env_id:
logger.info(f"[_get_env][Exit] Using first environment from ConfigManager: {envs[0].name}")
return envs[0]
logger.error(f"[_get_env][Coherence:Failed] No environments configured (searched config.json and DB). env_id={env_id}")
raise ValueError("No environments configured. Please add a Superset Environment in Settings.")
# [/DEF:_get_env:Function]
# [/DEF:GitPlugin:Class]
# [/DEF:backend.src.plugins.git_plugin:Module]

View File

@@ -0,0 +1,195 @@
# [DEF:MapperPluginModule:Module]
# @SEMANTICS: plugin, mapper, datasets, postgresql, excel
# @PURPOSE: Implements a plugin for mapping dataset columns using external database connections or Excel files.
# @LAYER: Plugins
# @RELATION: Inherits from PluginBase. Uses DatasetMapper from superset_tool.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
from typing import Dict, Any, Optional
from ..core.plugin_base import PluginBase
from ..core.superset_client import SupersetClient
from ..core.logger import logger, belief_scope
from ..core.database import SessionLocal
from ..models.connection import ConnectionConfig
from ..core.utils.dataset_mapper import DatasetMapper
# [/SECTION]
# [DEF:MapperPlugin:Class]
# @PURPOSE: Plugin for mapping dataset columns verbose names.
class MapperPlugin(PluginBase):
"""
Plugin for mapping dataset columns verbose names.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the mapper plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string ID.
# @RETURN: str - "dataset-mapper"
def id(self) -> str:
with belief_scope("id"):
return "dataset-mapper"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the mapper plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string name.
# @RETURN: str - Plugin name.
def name(self) -> str:
with belief_scope("name"):
return "Dataset Mapper"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns a description of the mapper plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string description.
# @RETURN: str - Plugin description.
def description(self) -> str:
with belief_scope("description"):
return "Map dataset column verbose names using PostgreSQL comments or Excel files."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the version of the mapper plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string version.
# @RETURN: str - "1.0.0"
def version(self) -> str:
with belief_scope("version"):
return "1.0.0"
# [/DEF:version:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for the mapper plugin parameters.
# @PRE: Plugin instance exists.
# @POST: Returns dictionary schema.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
with belief_scope("get_schema"):
return {
"type": "object",
"properties": {
"env": {
"type": "string",
"title": "Environment",
"description": "The Superset environment (e.g., 'dev')."
},
"dataset_id": {
"type": "integer",
"title": "Dataset ID",
"description": "The ID of the dataset to update."
},
"source": {
"type": "string",
"title": "Mapping Source",
"enum": ["postgres", "excel"],
"default": "postgres"
},
"connection_id": {
"type": "string",
"title": "Saved Connection",
"description": "The ID of a saved database connection (for postgres source)."
},
"table_name": {
"type": "string",
"title": "Table Name",
"description": "Target table name in PostgreSQL."
},
"table_schema": {
"type": "string",
"title": "Table Schema",
"description": "Target table schema in PostgreSQL.",
"default": "public"
},
"excel_path": {
"type": "string",
"title": "Excel Path",
"description": "Path to the Excel file (for excel source)."
}
},
"required": ["env", "dataset_id", "source"]
}
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Executes the dataset mapping logic.
# @PARAM: params (Dict[str, Any]) - Mapping parameters.
# @PRE: Params contain valid 'env', 'dataset_id', and 'source'. params must be a dictionary.
# @POST: Updates the dataset in Superset.
# @RETURN: Dict[str, Any] - Execution status.
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("execute"):
env_name = params.get("env")
dataset_id = params.get("dataset_id")
source = params.get("source")
if not env_name or dataset_id is None or not source:
logger.error("[MapperPlugin.execute][State] Missing required parameters.")
raise ValueError("Missing required parameters: env, dataset_id, source")
# Get config and initialize client
from ..dependencies import get_config_manager
config_manager = get_config_manager()
env_config = config_manager.get_environment(env_name)
if not env_config:
logger.error(f"[MapperPlugin.execute][State] Environment '{env_name}' not found.")
raise ValueError(f"Environment '{env_name}' not found in configuration.")
client = SupersetClient(env_config)
client.authenticate()
postgres_config = None
if source == "postgres":
connection_id = params.get("connection_id")
if not connection_id:
logger.error("[MapperPlugin.execute][State] connection_id is required for postgres source.")
raise ValueError("connection_id is required for postgres source.")
# Load connection from DB
db = SessionLocal()
try:
conn_config = db.query(ConnectionConfig).filter(ConnectionConfig.id == connection_id).first()
if not conn_config:
logger.error(f"[MapperPlugin.execute][State] Connection {connection_id} not found.")
raise ValueError(f"Connection {connection_id} not found.")
postgres_config = {
'dbname': conn_config.database,
'user': conn_config.username,
'password': conn_config.password,
'host': conn_config.host,
'port': str(conn_config.port) if conn_config.port else '5432'
}
finally:
db.close()
logger.info(f"[MapperPlugin.execute][Action] Starting mapping for dataset {dataset_id} in {env_name}")
mapper = DatasetMapper()
try:
mapper.run_mapping(
superset_client=client,
dataset_id=dataset_id,
source=source,
postgres_config=postgres_config,
excel_path=params.get("excel_path"),
table_name=params.get("table_name"),
table_schema=params.get("table_schema") or "public"
)
logger.info(f"[MapperPlugin.execute][Success] Mapping completed for dataset {dataset_id}")
return {"status": "success", "dataset_id": dataset_id}
except Exception as e:
logger.error(f"[MapperPlugin.execute][Failure] Mapping failed: {e}")
raise
# [/DEF:execute:Function]
# [/DEF:MapperPlugin:Class]
# [/DEF:MapperPluginModule:Module]

View File

@@ -12,35 +12,73 @@ import zipfile
import re
from ..core.plugin_base import PluginBase
from superset_tool.client import SupersetClient
from superset_tool.utils.init_clients import setup_clients
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
from ..core.logger import belief_scope
from ..core.superset_client import SupersetClient
from ..core.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
from ..dependencies import get_config_manager
from superset_tool.utils.logger import SupersetLogger
from ..core.migration_engine import MigrationEngine
from ..core.database import SessionLocal
from ..models.mapping import DatabaseMapping, Environment
# [DEF:MigrationPlugin:Class]
# @PURPOSE: Implementation of the migration plugin logic.
class MigrationPlugin(PluginBase):
"""
A plugin to migrate Superset dashboards between environments.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the migration plugin.
# @PRE: None.
# @POST: Returns "superset-migration".
# @RETURN: str - "superset-migration"
def id(self) -> str:
return "superset-migration"
with belief_scope("id"):
return "superset-migration"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the migration plugin.
# @PRE: None.
# @POST: Returns the plugin name.
# @RETURN: str - Plugin name.
def name(self) -> str:
return "Superset Dashboard Migration"
with belief_scope("name"):
return "Superset Dashboard Migration"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns a description of the migration plugin.
# @PRE: None.
# @POST: Returns the plugin description.
# @RETURN: str - Plugin description.
def description(self) -> str:
return "Migrates dashboards between Superset environments."
with belief_scope("description"):
return "Migrates dashboards between Superset environments."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the version of the migration plugin.
# @PRE: None.
# @POST: Returns "1.0.0".
# @RETURN: str - "1.0.0"
def version(self) -> str:
return "1.0.0"
with belief_scope("version"):
return "1.0.0"
# [/DEF:version:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for migration plugin parameters.
# @PRE: Config manager is available.
# @POST: Returns a valid JSON schema dictionary.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
config_manager = get_config_manager()
with belief_scope("get_schema"):
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
return {
@@ -82,50 +120,174 @@ class MigrationPlugin(PluginBase):
},
"required": ["from_env", "to_env", "dashboard_regex"],
}
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Executes the dashboard migration logic.
# @PARAM: params (Dict[str, Any]) - Migration parameters.
# @PRE: Source and target environments must be configured.
# @POST: Selected dashboards are migrated.
async def execute(self, params: Dict[str, Any]):
from_env = params["from_env"]
to_env = params["to_env"]
dashboard_regex = params["dashboard_regex"]
with belief_scope("MigrationPlugin.execute"):
source_env_id = params.get("source_env_id")
target_env_id = params.get("target_env_id")
selected_ids = params.get("selected_ids")
# Legacy support or alternative params
from_env_name = params.get("from_env")
to_env_name = params.get("to_env")
dashboard_regex = params.get("dashboard_regex")
replace_db_config = params.get("replace_db_config", False)
from_db_id = params.get("from_db_id")
to_db_id = params.get("to_db_id")
logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True)
logger.info(f"[MigrationPlugin][Entry] Starting migration from {from_env} to {to_env}.")
# [DEF:MigrationPlugin.execute:Action]
# @PURPOSE: Execute the migration logic with proper task logging.
task_id = params.get("_task_id")
from ..dependencies import get_task_manager
tm = get_task_manager()
class TaskLoggerProxy:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the proxy logger.
# @PRE: None.
# @POST: Instance is initialized.
def __init__(self):
with belief_scope("__init__"):
# Initialize parent with dummy values since we override methods
pass
# [/DEF:__init__:Function]
# [DEF:debug:Function]
# @PURPOSE: Logs a debug message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def debug(self, msg, *args, extra=None, **kwargs):
with belief_scope("debug"):
if task_id: tm._add_log(task_id, "DEBUG", msg, extra or {})
# [/DEF:debug:Function]
# [DEF:info:Function]
# @PURPOSE: Logs an info message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def info(self, msg, *args, extra=None, **kwargs):
with belief_scope("info"):
if task_id: tm._add_log(task_id, "INFO", msg, extra or {})
# [/DEF:info:Function]
# [DEF:warning:Function]
# @PURPOSE: Logs a warning message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def warning(self, msg, *args, extra=None, **kwargs):
with belief_scope("warning"):
if task_id: tm._add_log(task_id, "WARNING", msg, extra or {})
# [/DEF:warning:Function]
# [DEF:error:Function]
# @PURPOSE: Logs an error message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def error(self, msg, *args, extra=None, **kwargs):
with belief_scope("error"):
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
# [/DEF:error:Function]
# [DEF:critical:Function]
# @PURPOSE: Logs a critical message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def critical(self, msg, *args, extra=None, **kwargs):
with belief_scope("critical"):
if task_id: tm._add_log(task_id, "ERROR", msg, extra or {})
# [/DEF:critical:Function]
# [DEF:exception:Function]
# @PURPOSE: Logs an exception message to the task manager.
# @PRE: msg is a string.
# @POST: Log is added to task manager if task_id exists.
def exception(self, msg, *args, **kwargs):
with belief_scope("exception"):
if task_id: tm._add_log(task_id, "ERROR", msg, {"exception": True})
# [/DEF:exception:Function]
logger = TaskLoggerProxy()
logger.info(f"[MigrationPlugin][Entry] Starting migration task.")
logger.info(f"[MigrationPlugin][Action] Params: {params}")
try:
config_manager = get_config_manager()
all_clients = setup_clients(logger, custom_envs=config_manager.get_environments())
from_c = all_clients.get(from_env)
to_c = all_clients.get(to_env)
with belief_scope("execute"):
config_manager = get_config_manager()
environments = config_manager.get_environments()
# Resolve environments
src_env = None
tgt_env = None
if source_env_id:
src_env = next((e for e in environments if e.id == source_env_id), None)
elif from_env_name:
src_env = next((e for e in environments if e.name == from_env_name), None)
if target_env_id:
tgt_env = next((e for e in environments if e.id == target_env_id), None)
elif to_env_name:
tgt_env = next((e for e in environments if e.name == to_env_name), None)
if not src_env or not tgt_env:
raise ValueError(f"Could not resolve source or target environment. Source: {source_env_id or from_env_name}, Target: {target_env_id or to_env_name}")
from_env_name = src_env.name
to_env_name = tgt_env.name
logger.info(f"[MigrationPlugin][State] Resolved environments: {from_env_name} -> {to_env_name}")
from_c = SupersetClient(src_env)
to_c = SupersetClient(tgt_env)
if not from_c or not to_c:
raise ValueError(f"One or both environments ('{from_env}', '{to_env}') not found in configuration.")
raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
_, all_dashboards = from_c.get_dashboards()
regex_str = str(dashboard_regex)
dashboards_to_migrate = [
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
]
if not dashboards_to_migrate:
logger.warning("[MigrationPlugin][State] No dashboards found matching the regex.")
dashboards_to_migrate = []
if selected_ids:
dashboards_to_migrate = [d for d in all_dashboards if d["id"] in selected_ids]
elif dashboard_regex:
regex_str = str(dashboard_regex)
dashboards_to_migrate = [
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
]
else:
logger.warning("[MigrationPlugin][State] No selection criteria provided (selected_ids or dashboard_regex).")
return
db_config_replacement = None
if not dashboards_to_migrate:
logger.warning("[MigrationPlugin][State] No dashboards found matching criteria.")
return
# Fetch mappings from database
db_mapping = {}
if replace_db_config:
if from_db_id is None or to_db_id is None:
raise ValueError("Source and target database IDs are required when replacing database configuration.")
from_db = from_c.get_database(int(from_db_id))
to_db = to_c.get_database(int(to_db_id))
old_result = from_db.get("result", {})
new_result = to_db.get("result", {})
db_config_replacement = {
"old": {"database_name": old_result.get("database_name"), "uuid": old_result.get("uuid"), "id": str(from_db.get("id"))},
"new": {"database_name": new_result.get("database_name"), "uuid": new_result.get("uuid"), "id": str(to_db.get("id"))}
}
db = SessionLocal()
try:
# Find environment IDs by name
src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
if src_env and tgt_env:
mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env.id,
DatabaseMapping.target_env_id == tgt_env.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
logger.info(f"[MigrationPlugin][State] Loaded {len(db_mapping)} database mappings.")
finally:
db.close()
engine = MigrationEngine()
for dash in dashboards_to_migrate:
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
@@ -133,26 +295,93 @@ class MigrationPlugin(PluginBase):
try:
exported_content, _ = from_c.export_dashboard(dash_id)
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path:
if not db_config_replacement:
to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
else:
with create_temp_file(suffix=".dir", logger=logger) as tmp_unpack_dir:
with zipfile.ZipFile(tmp_zip_path, "r") as zip_ref:
zip_ref.extractall(tmp_unpack_dir)
update_yamls(db_configs=[db_config_replacement], path=str(tmp_unpack_dir))
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
create_dashboard_export(zip_path=tmp_new_zip, source_paths=[str(p) for p in Path(tmp_unpack_dir).glob("**/*")])
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
# Always transform to strip databases to avoid password errors
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
if not success and replace_db_config:
# Signal missing mapping and wait (only if we care about mappings)
if task_id:
logger.info(f"[MigrationPlugin][Action] Pausing for missing mapping in task {task_id}")
# In a real scenario, we'd pass the missing DB info to the frontend
# For this task, we'll just simulate the wait
await tm.wait_for_resolution(task_id)
# After resolution, retry transformation with updated mappings
# (Mappings would be updated in task.params by resolve_task)
db = SessionLocal()
try:
src_env = db.query(Environment).filter(Environment.name == from_env_name).first()
tgt_env = db.query(Environment).filter(Environment.name == to_env_name).first()
mappings = db.query(DatabaseMapping).filter(
DatabaseMapping.source_env_id == src_env.id,
DatabaseMapping.target_env_id == tgt_env.id
).all()
db_mapping = {m.source_db_uuid: m.target_db_uuid for m in mappings}
finally:
db.close()
success = engine.transform_zip(str(tmp_zip_path), str(tmp_new_zip), db_mapping, strip_databases=False)
if success:
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
else:
logger.error(f"[MigrationPlugin][Failure] Failed to transform ZIP for dashboard {title}")
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported.")
except Exception as exc:
# Check for password error
error_msg = str(exc)
# The error message from Superset is often a JSON string inside a string.
# We need to robustly detect the password requirement.
# Typical error: "Error importing dashboard: databases/PostgreSQL.yaml: {'_schema': ['Must provide a password for the database']}"
if "Must provide a password for the database" in error_msg:
# Extract database name
# Try to find "databases/DBNAME.yaml" pattern
import re
db_name = "unknown"
match = re.search(r"databases/([^.]+)\.yaml", error_msg)
if match:
db_name = match.group(1)
else:
# Fallback: try to find 'database 'NAME'' pattern
match_alt = re.search(r"database '([^']+)'", error_msg)
if match_alt:
db_name = match_alt.group(1)
logger.warning(f"[MigrationPlugin][Action] Detected missing password for database: {db_name}")
if task_id:
input_request = {
"type": "database_password",
"databases": [db_name],
"error_message": error_msg
}
tm.await_input(task_id, input_request)
# Wait for user input
await tm.wait_for_input(task_id)
# Resume with passwords
task = tm.get_task(task_id)
passwords = task.params.get("passwords", {})
# Retry import with password
if passwords:
logger.info(f"[MigrationPlugin][Action] Retrying import for {title} with provided passwords.")
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug, passwords=passwords)
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported after password injection.")
# Clear passwords from params after use for security
if "passwords" in task.params:
del task.params["passwords"]
continue
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
logger.info("[MigrationPlugin][Exit] Migration finished.")
logger.info("[MigrationPlugin][Exit] Migration finished.")
except Exception as e:
logger.critical(f"[MigrationPlugin][Failure] Fatal error during migration: {e}", exc_info=True)
raise e
# [/DEF:MigrationPlugin]
# [/DEF:MigrationPlugin.execute:Action]
# [/DEF:execute:Function]
# [/DEF:MigrationPlugin:Class]
# [/DEF:MigrationPlugin:Module]

View File

@@ -0,0 +1,202 @@
# [DEF:SearchPluginModule:Module]
# @SEMANTICS: plugin, search, datasets, regex, superset
# @PURPOSE: Implements a plugin for searching text patterns across all datasets in a specific Superset environment.
# @LAYER: Plugins
# @RELATION: Inherits from PluginBase. Uses SupersetClient from core.
# @CONSTRAINT: Must use belief_scope for logging.
# [SECTION: IMPORTS]
import re
from typing import Dict, Any, List, Optional
from ..core.plugin_base import PluginBase
from ..core.superset_client import SupersetClient
from ..core.logger import logger, belief_scope
# [/SECTION]
# [DEF:SearchPlugin:Class]
# @PURPOSE: Plugin for searching text patterns in Superset datasets.
class SearchPlugin(PluginBase):
"""
Plugin for searching text patterns in Superset datasets.
"""
@property
# [DEF:id:Function]
# @PURPOSE: Returns the unique identifier for the search plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string ID.
# @RETURN: str - "search-datasets"
def id(self) -> str:
with belief_scope("id"):
return "search-datasets"
# [/DEF:id:Function]
@property
# [DEF:name:Function]
# @PURPOSE: Returns the human-readable name of the search plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string name.
# @RETURN: str - Plugin name.
def name(self) -> str:
with belief_scope("name"):
return "Search Datasets"
# [/DEF:name:Function]
@property
# [DEF:description:Function]
# @PURPOSE: Returns a description of the search plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string description.
# @RETURN: str - Plugin description.
def description(self) -> str:
with belief_scope("description"):
return "Search for text patterns across all datasets in a specific environment."
# [/DEF:description:Function]
@property
# [DEF:version:Function]
# @PURPOSE: Returns the version of the search plugin.
# @PRE: Plugin instance exists.
# @POST: Returns string version.
# @RETURN: str - "1.0.0"
def version(self) -> str:
with belief_scope("version"):
return "1.0.0"
# [/DEF:version:Function]
# [DEF:get_schema:Function]
# @PURPOSE: Returns the JSON schema for the search plugin parameters.
# @PRE: Plugin instance exists.
# @POST: Returns dictionary schema.
# @RETURN: Dict[str, Any] - JSON schema.
def get_schema(self) -> Dict[str, Any]:
with belief_scope("get_schema"):
return {
"type": "object",
"properties": {
"env": {
"type": "string",
"title": "Environment",
"description": "The Superset environment to search in (e.g., 'dev', 'prod')."
},
"query": {
"type": "string",
"title": "Search Query (Regex)",
"description": "The regex pattern to search for."
}
},
"required": ["env", "query"]
}
# [/DEF:get_schema:Function]
# [DEF:execute:Function]
# @PURPOSE: Executes the dataset search logic.
# @PARAM: params (Dict[str, Any]) - Search parameters.
# @PRE: Params contain valid 'env' and 'query'.
# @POST: Returns a dictionary with count and results list.
# @RETURN: Dict[str, Any] - Search results.
async def execute(self, params: Dict[str, Any]) -> Dict[str, Any]:
with belief_scope("SearchPlugin.execute", f"params={params}"):
env_name = params.get("env")
search_query = params.get("query")
if not env_name or not search_query:
logger.error("[SearchPlugin.execute][State] Missing required parameters.")
raise ValueError("Missing required parameters: env, query")
# Get config and initialize client
from ..dependencies import get_config_manager
config_manager = get_config_manager()
env_config = config_manager.get_environment(env_name)
if not env_config:
logger.error(f"[SearchPlugin.execute][State] Environment '{env_name}' not found.")
raise ValueError(f"Environment '{env_name}' not found in configuration.")
client = SupersetClient(env_config)
client.authenticate()
logger.info(f"[SearchPlugin.execute][Action] Searching for pattern: '{search_query}' in environment: {env_name}")
try:
# Ported logic from search_script.py
_, datasets = client.get_datasets(query={"columns": ["id", "table_name", "sql", "database", "columns"]})
if not datasets:
logger.warning("[SearchPlugin.execute][State] No datasets found.")
return {"count": 0, "results": []}
pattern = re.compile(search_query, re.IGNORECASE)
results = []
for dataset in datasets:
dataset_id = dataset.get('id')
dataset_name = dataset.get('table_name', 'Unknown')
if not dataset_id:
continue
for field, value in dataset.items():
value_str = str(value)
if pattern.search(value_str):
match_obj = pattern.search(value_str)
results.append({
"dataset_id": dataset_id,
"dataset_name": dataset_name,
"field": field,
"match_context": self._get_context(value_str, match_obj.group() if match_obj else ""),
"full_value": value_str
})
logger.info(f"[SearchPlugin.execute][Success] Found matches in {len(results)} locations.")
return {
"count": len(results),
"results": results
}
except re.error as e:
logger.error(f"[SearchPlugin.execute][Failure] Invalid regex pattern: {e}")
raise ValueError(f"Invalid regex pattern: {e}")
except Exception as e:
logger.error(f"[SearchPlugin.execute][Failure] Error during search: {e}")
raise
# [/DEF:execute:Function]
# [DEF:_get_context:Function]
# @PURPOSE: Extracts a small context around the match for display.
# @PARAM: text (str) - The full text to extract context from.
# @PARAM: match_text (str) - The matched text pattern.
# @PARAM: context_lines (int) - Number of lines of context to include.
# @PRE: text and match_text must be strings.
# @POST: Returns context string.
# @RETURN: str - Extracted context.
def _get_context(self, text: str, match_text: str, context_lines: int = 1) -> str:
"""
Extracts a small context around the match for display.
"""
with belief_scope("_get_context"):
if not match_text:
return text[:100] + "..." if len(text) > 100 else text
lines = text.splitlines()
match_line_index = -1
for i, line in enumerate(lines):
if match_text in line:
match_line_index = i
break
if match_line_index != -1:
start = max(0, match_line_index - context_lines)
end = min(len(lines), match_line_index + context_lines + 1)
context = []
for i in range(start, end):
line_content = lines[i]
if i == match_line_index:
context.append(f"==> {line_content}")
else:
context.append(f" {line_content}")
return "\n".join(context)
return text[:100] + "..." if len(text) > 100 else text
# [/DEF:_get_context:Function]
# [/DEF:SearchPlugin:Class]
# [/DEF:SearchPluginModule:Module]

View File

@@ -0,0 +1,380 @@
# [DEF:backend.src.services.git_service:Module]
#
# @SEMANTICS: git, service, gitpython, repository, version_control
# @PURPOSE: Core Git logic using GitPython to manage dashboard repositories.
# @LAYER: Service
# @RELATION: INHERITS_FROM -> None
# @RELATION: USED_BY -> src.api.routes.git
# @RELATION: USED_BY -> src.plugins.git_plugin
#
# @INVARIANT: All Git operations must be performed on a valid local directory.
import os
import shutil
import httpx
from git import Repo, RemoteProgress
from fastapi import HTTPException
from typing import List, Optional
from datetime import datetime
from src.core.logger import logger, belief_scope
from src.models.git import GitProvider
# [DEF:GitService:Class]
# @PURPOSE: Wrapper for GitPython operations with semantic logging and error handling.
class GitService:
"""
Wrapper for GitPython operations.
"""
# [DEF:__init__:Function]
# @PURPOSE: Initializes the GitService with a base path for repositories.
# @PARAM: base_path (str) - Root directory for all Git clones.
def __init__(self, base_path: str = "backend/git_repos"):
with belief_scope("GitService.__init__"):
self.base_path = base_path
if not os.path.exists(self.base_path):
os.makedirs(self.base_path)
# [/DEF:__init__:Function]
# [DEF:_get_repo_path:Function]
# @PURPOSE: Resolves the local filesystem path for a dashboard's repository.
# @PARAM: dashboard_id (int)
# @RETURN: str
def _get_repo_path(self, dashboard_id: int) -> str:
return os.path.join(self.base_path, str(dashboard_id))
# [/DEF:_get_repo_path:Function]
# [DEF:init_repo:Function]
# @PURPOSE: Initialize or clone a repository for a dashboard.
# @PARAM: dashboard_id (int)
# @PARAM: remote_url (str)
# @PARAM: pat (str) - Personal Access Token for authentication.
# @RETURN: Repo - GitPython Repo object.
def init_repo(self, dashboard_id: int, remote_url: str, pat: str) -> Repo:
with belief_scope("GitService.init_repo"):
repo_path = self._get_repo_path(dashboard_id)
# Inject PAT into remote URL if needed
if pat and "://" in remote_url:
proto, rest = remote_url.split("://", 1)
auth_url = f"{proto}://oauth2:{pat}@{rest}"
else:
auth_url = remote_url
if os.path.exists(repo_path):
logger.info(f"[init_repo][Action] Opening existing repo at {repo_path}")
return Repo(repo_path)
logger.info(f"[init_repo][Action] Cloning {remote_url} to {repo_path}")
return Repo.clone_from(auth_url, repo_path)
# [/DEF:init_repo:Function]
# [DEF:get_repo:Function]
# @PURPOSE: Get Repo object for a dashboard.
# @PRE: Repository must exist on disk.
# @RETURN: Repo
def get_repo(self, dashboard_id: int) -> Repo:
with belief_scope("GitService.get_repo"):
repo_path = self._get_repo_path(dashboard_id)
if not os.path.exists(repo_path):
logger.error(f"[get_repo][Coherence:Failed] Repository for dashboard {dashboard_id} does not exist")
raise HTTPException(status_code=404, detail=f"Repository for dashboard {dashboard_id} not found")
try:
return Repo(repo_path)
except Exception as e:
logger.error(f"[get_repo][Coherence:Failed] Failed to open repository at {repo_path}: {e}")
raise HTTPException(status_code=500, detail="Failed to open local Git repository")
# [/DEF:get_repo:Function]
# [DEF:list_branches:Function]
# @PURPOSE: List all branches for a dashboard's repository.
# @RETURN: List[dict]
def list_branches(self, dashboard_id: int) -> List[dict]:
with belief_scope("GitService.list_branches"):
repo = self.get_repo(dashboard_id)
logger.info(f"[list_branches][Action] Listing branches for {dashboard_id}. Refs: {repo.refs}")
branches = []
# Add existing refs
for ref in repo.refs:
try:
# Strip prefixes for UI
name = ref.name.replace('refs/heads/', '').replace('refs/remotes/origin/', '')
# Avoid duplicates (e.g. local and remote with same name)
if any(b['name'] == name for b in branches):
continue
branches.append({
"name": name,
"commit_hash": ref.commit.hexsha if hasattr(ref, 'commit') else "0000000",
"is_remote": ref.is_remote() if hasattr(ref, 'is_remote') else False,
"last_updated": datetime.fromtimestamp(ref.commit.committed_date) if hasattr(ref, 'commit') else datetime.utcnow()
})
except Exception as e:
logger.warning(f"[list_branches][Action] Skipping ref {ref}: {e}")
# Ensure the current active branch is in the list even if it has no commits or refs
try:
active_name = repo.active_branch.name
if not any(b['name'] == active_name for b in branches):
branches.append({
"name": active_name,
"commit_hash": "0000000",
"is_remote": False,
"last_updated": datetime.utcnow()
})
except Exception as e:
logger.warning(f"[list_branches][Action] Could not determine active branch: {e}")
# If everything else failed and list is still empty, add default
if not branches:
branches.append({
"name": "main",
"commit_hash": "0000000",
"is_remote": False,
"last_updated": datetime.utcnow()
})
return branches
# [/DEF:list_branches:Function]
# [DEF:create_branch:Function]
# @PURPOSE: Create a new branch from an existing one.
# @PARAM: name (str) - New branch name.
# @PARAM: from_branch (str) - Source branch.
def create_branch(self, dashboard_id: int, name: str, from_branch: str = "main"):
with belief_scope("GitService.create_branch"):
repo = self.get_repo(dashboard_id)
logger.info(f"[create_branch][Action] Creating branch {name} from {from_branch}")
# Handle empty repository case (no commits)
if not repo.heads and not repo.remotes:
logger.warning(f"[create_branch][Action] Repository is empty. Creating initial commit to enable branching.")
readme_path = os.path.join(repo.working_dir, "README.md")
if not os.path.exists(readme_path):
with open(readme_path, "w") as f:
f.write(f"# Dashboard {dashboard_id}\nGit repository for Superset dashboard integration.")
repo.index.add(["README.md"])
repo.index.commit("Initial commit")
# Verify source branch exists
try:
repo.commit(from_branch)
except:
logger.warning(f"[create_branch][Action] Source branch {from_branch} not found, using HEAD")
from_branch = repo.head
try:
new_branch = repo.create_head(name, from_branch)
return new_branch
except Exception as e:
logger.error(f"[create_branch][Coherence:Failed] {e}")
raise
# [/DEF:create_branch:Function]
# [/DEF:create_branch:Function]
# [DEF:checkout_branch:Function]
# @PURPOSE: Switch to a specific branch.
def checkout_branch(self, dashboard_id: int, name: str):
with belief_scope("GitService.checkout_branch"):
repo = self.get_repo(dashboard_id)
logger.info(f"[checkout_branch][Action] Checking out branch {name}")
repo.git.checkout(name)
# [/DEF:checkout_branch:Function]
# [DEF:commit_changes:Function]
# @PURPOSE: Stage and commit changes.
# @PARAM: message (str) - Commit message.
# @PARAM: files (List[str]) - Optional list of specific files to stage.
def commit_changes(self, dashboard_id: int, message: str, files: List[str] = None):
with belief_scope("GitService.commit_changes"):
repo = self.get_repo(dashboard_id)
# Check if there are any changes to commit
if not repo.is_dirty(untracked_files=True) and not files:
logger.info(f"[commit_changes][Action] No changes to commit for dashboard {dashboard_id}")
return
if files:
logger.info(f"[commit_changes][Action] Staging files: {files}")
repo.index.add(files)
else:
logger.info("[commit_changes][Action] Staging all changes")
repo.git.add(A=True)
repo.index.commit(message)
logger.info(f"[commit_changes][Coherence:OK] Committed changes with message: {message}")
# [/DEF:commit_changes:Function]
# [DEF:push_changes:Function]
# @PURPOSE: Push local commits to remote.
def push_changes(self, dashboard_id: int):
with belief_scope("GitService.push_changes"):
repo = self.get_repo(dashboard_id)
# Ensure we have something to push
if not repo.heads:
logger.warning(f"[push_changes][Coherence:Failed] No local branches to push for dashboard {dashboard_id}")
return
try:
origin = repo.remote(name='origin')
except ValueError:
logger.error(f"[push_changes][Coherence:Failed] Remote 'origin' not found for dashboard {dashboard_id}")
raise HTTPException(status_code=400, detail="Remote 'origin' not configured")
# Check if current branch has an upstream
try:
current_branch = repo.active_branch
logger.info(f"[push_changes][Action] Pushing branch {current_branch.name} to origin")
# Using a timeout for network operations
push_info = origin.push(refspec=f'{current_branch.name}:{current_branch.name}')
for info in push_info:
if info.flags & info.ERROR:
logger.error(f"[push_changes][Coherence:Failed] Error pushing ref {info.remote_ref_string}: {info.summary}")
raise Exception(f"Git push error for {info.remote_ref_string}: {info.summary}")
except Exception as e:
logger.error(f"[push_changes][Coherence:Failed] Failed to push changes: {e}")
raise HTTPException(status_code=500, detail=f"Git push failed: {str(e)}")
# [/DEF:push_changes:Function]
# [DEF:pull_changes:Function]
# @PURPOSE: Pull changes from remote.
def pull_changes(self, dashboard_id: int):
with belief_scope("GitService.pull_changes"):
repo = self.get_repo(dashboard_id)
try:
origin = repo.remote(name='origin')
logger.info("[pull_changes][Action] Pulling changes from origin")
fetch_info = origin.pull()
for info in fetch_info:
if info.flags & info.ERROR:
logger.error(f"[pull_changes][Coherence:Failed] Error pulling ref {info.ref}: {info.note}")
raise Exception(f"Git pull error for {info.ref}: {info.note}")
except ValueError:
logger.error(f"[pull_changes][Coherence:Failed] Remote 'origin' not found for dashboard {dashboard_id}")
raise HTTPException(status_code=400, detail="Remote 'origin' not configured")
except Exception as e:
logger.error(f"[pull_changes][Coherence:Failed] Failed to pull changes: {e}")
raise HTTPException(status_code=500, detail=f"Git pull failed: {str(e)}")
# [/DEF:pull_changes:Function]
# [DEF:get_status:Function]
# @PURPOSE: Get current repository status (dirty files, untracked, etc.)
# @RETURN: dict
def get_status(self, dashboard_id: int) -> dict:
with belief_scope("GitService.get_status"):
repo = self.get_repo(dashboard_id)
# Handle empty repository (no commits)
has_commits = False
try:
repo.head.commit
has_commits = True
except (ValueError, Exception):
has_commits = False
return {
"is_dirty": repo.is_dirty(untracked_files=True),
"untracked_files": repo.untracked_files,
"modified_files": [item.a_path for item in repo.index.diff(None)],
"staged_files": [item.a_path for item in repo.index.diff("HEAD")] if has_commits else [],
"current_branch": repo.active_branch.name
}
# [/DEF:get_status:Function]
# [DEF:get_diff:Function]
# @PURPOSE: Generate diff for a file or the whole repository.
# @PARAM: file_path (str) - Optional specific file.
# @PARAM: staged (bool) - Whether to show staged changes.
# @RETURN: str
def get_diff(self, dashboard_id: int, file_path: str = None, staged: bool = False) -> str:
with belief_scope("GitService.get_diff"):
repo = self.get_repo(dashboard_id)
diff_args = []
if staged:
diff_args.append("--staged")
if file_path:
return repo.git.diff(*diff_args, "--", file_path)
return repo.git.diff(*diff_args)
# [/DEF:get_diff:Function]
# [DEF:get_commit_history:Function]
# @PURPOSE: Retrieve commit history for a repository.
# @PARAM: limit (int) - Max number of commits to return.
# @RETURN: List[dict]
def get_commit_history(self, dashboard_id: int, limit: int = 50) -> List[dict]:
with belief_scope("GitService.get_commit_history"):
repo = self.get_repo(dashboard_id)
commits = []
try:
# Check if there are any commits at all
if not repo.heads and not repo.remotes:
return []
for commit in repo.iter_commits(max_count=limit):
commits.append({
"hash": commit.hexsha,
"author": commit.author.name,
"email": commit.author.email,
"timestamp": datetime.fromtimestamp(commit.committed_date),
"message": commit.message.strip(),
"files_changed": list(commit.stats.files.keys())
})
except Exception as e:
logger.warning(f"[get_commit_history][Action] Could not retrieve commit history for dashboard {dashboard_id}: {e}")
return []
return commits
# [/DEF:get_commit_history:Function]
# [DEF:test_connection:Function]
# @PURPOSE: Test connection to Git provider using PAT.
# @PARAM: provider (GitProvider)
# @PARAM: url (str)
# @PARAM: pat (str)
# @RETURN: bool
async def test_connection(self, provider: GitProvider, url: str, pat: str) -> bool:
with belief_scope("GitService.test_connection"):
# Check for offline mode or local-only URLs
if ".local" in url or "localhost" in url:
logger.info("[test_connection][Action] Local/Offline mode detected for URL")
return True
if not url.startswith(('http://', 'https://')):
logger.error(f"[test_connection][Coherence:Failed] Invalid URL protocol: {url}")
return False
if not pat or not pat.strip():
logger.error("[test_connection][Coherence:Failed] Git PAT is missing or empty")
return False
pat = pat.strip()
try:
async with httpx.AsyncClient() as client:
if provider == GitProvider.GITHUB:
headers = {"Authorization": f"token {pat}"}
api_url = "https://api.github.com/user" if "github.com" in url else f"{url.rstrip('/')}/api/v3/user"
resp = await client.get(api_url, headers=headers)
elif provider == GitProvider.GITLAB:
headers = {"PRIVATE-TOKEN": pat}
api_url = f"{url.rstrip('/')}/api/v4/user"
resp = await client.get(api_url, headers=headers)
elif provider == GitProvider.GITEA:
headers = {"Authorization": f"token {pat}"}
api_url = f"{url.rstrip('/')}/api/v1/user"
resp = await client.get(api_url, headers=headers)
else:
return False
if resp.status_code != 200:
logger.error(f"[test_connection][Coherence:Failed] Git connection test failed for {provider} at {api_url}. Status: {resp.status_code}")
return resp.status_code == 200
except Exception as e:
logger.error(f"[test_connection][Coherence:Failed] Error testing git connection: {e}")
return False
# [/DEF:test_connection:Function]
# [/DEF:GitService:Class]
# [/DEF:backend.src.services.git_service:Module]

View File

@@ -0,0 +1,71 @@
# [DEF:backend.src.services.mapping_service:Module]
#
# @SEMANTICS: service, mapping, fuzzy-matching, superset
# @PURPOSE: Orchestrates database fetching and fuzzy matching suggestions.
# @LAYER: Service
# @RELATION: DEPENDS_ON -> backend.src.core.superset_client
# @RELATION: DEPENDS_ON -> backend.src.core.utils.matching
#
# @INVARIANT: Suggestions are based on database names.
# [SECTION: IMPORTS]
from typing import List, Dict
from backend.src.core.logger import belief_scope
from backend.src.core.superset_client import SupersetClient
from backend.src.core.utils.matching import suggest_mappings
# [/SECTION]
# [DEF:MappingService:Class]
# @PURPOSE: Service for handling database mapping logic.
class MappingService:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the mapping service with a config manager.
# @PRE: config_manager is provided.
# @PARAM: config_manager (ConfigManager) - The configuration manager.
# @POST: Service is initialized.
def __init__(self, config_manager):
with belief_scope("MappingService.__init__"):
self.config_manager = config_manager
# [/DEF:__init__:Function]
# [DEF:_get_client:Function]
# @PURPOSE: Helper to get an initialized SupersetClient for an environment.
# @PARAM: env_id (str) - The ID of the environment.
# @PRE: environment must exist in config.
# @POST: Returns an initialized SupersetClient.
# @RETURN: SupersetClient - Initialized client.
def _get_client(self, env_id: str) -> SupersetClient:
with belief_scope("MappingService._get_client", f"env_id={env_id}"):
envs = self.config_manager.get_environments()
env = next((e for e in envs if e.id == env_id), None)
if not env:
raise ValueError(f"Environment {env_id} not found")
return SupersetClient(env)
# [/DEF:_get_client:Function]
# [DEF:get_suggestions:Function]
# @PURPOSE: Fetches databases from both environments and returns fuzzy matching suggestions.
# @PARAM: source_env_id (str) - Source environment ID.
# @PARAM: target_env_id (str) - Target environment ID.
# @PRE: Both environments must be accessible.
# @POST: Returns fuzzy-matched database suggestions.
# @RETURN: List[Dict] - Suggested mappings.
async def get_suggestions(self, source_env_id: str, target_env_id: str) -> List[Dict]:
with belief_scope("MappingService.get_suggestions", f"source={source_env_id}, target={target_env_id}"):
"""
Get suggested mappings between two environments.
"""
source_client = self._get_client(source_env_id)
target_client = self._get_client(target_env_id)
source_dbs = source_client.get_databases_summary()
target_dbs = target_client.get_databases_summary()
return suggest_mappings(source_dbs, target_dbs)
# [/DEF:get_suggestions:Function]
# [/DEF:MappingService:Class]
# [/DEF:backend.src.services.mapping_service:Module]

BIN
backend/tasks.db Normal file

Binary file not shown.

View File

@@ -0,0 +1,59 @@
import pytest
from src.core.logger import belief_scope, logger
# [DEF:test_belief_scope_logs_entry_action_exit:Function]
# @PURPOSE: Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs.
# @PRE: belief_scope is available. caplog fixture is used.
# @POST: Logs are verified to contain Entry, Action, and Exit tags.
def test_belief_scope_logs_entry_action_exit(caplog):
"""Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs."""
caplog.set_level("INFO")
with belief_scope("TestFunction"):
logger.info("Doing something important")
# Check that the logs contain the expected patterns
log_messages = [record.message for record in caplog.records]
assert any("[TestFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
assert any("[TestFunction][Action] Doing something important" in msg for msg in log_messages), "Action log not found"
assert any("[TestFunction][Exit]" in msg for msg in log_messages), "Exit log not found"
# [/DEF:test_belief_scope_logs_entry_action_exit:Function]
# [DEF:test_belief_scope_error_handling:Function]
# @PURPOSE: Test that belief_scope logs Coherence:Failed on exception.
# @PRE: belief_scope is available. caplog fixture is used.
# @POST: Logs are verified to contain Coherence:Failed tag.
def test_belief_scope_error_handling(caplog):
"""Test that belief_scope logs Coherence:Failed on exception."""
caplog.set_level("INFO")
with pytest.raises(ValueError):
with belief_scope("FailingFunction"):
raise ValueError("Something went wrong")
log_messages = [record.message for record in caplog.records]
assert any("[FailingFunction][Entry]" in msg for msg in log_messages), "Entry log not found"
assert any("[FailingFunction][Coherence:Failed]" in msg for msg in log_messages), "Failed coherence log not found"
# Exit should not be logged on failure
# [/DEF:test_belief_scope_error_handling:Function]
# [DEF:test_belief_scope_success_coherence:Function]
# @PURPOSE: Test that belief_scope logs Coherence:OK on success.
# @PRE: belief_scope is available. caplog fixture is used.
# @POST: Logs are verified to contain Coherence:OK tag.
def test_belief_scope_success_coherence(caplog):
"""Test that belief_scope logs Coherence:OK on success."""
caplog.set_level("INFO")
with belief_scope("SuccessFunction"):
pass
log_messages = [record.message for record in caplog.records]
assert any("[SuccessFunction][Coherence:OK]" in msg for msg in log_messages), "Success coherence log not found"
# [/DEF:test_belief_scope_success_coherence:Function]

View File

@@ -1,49 +1,23 @@
import pytest
from superset_tool.models import SupersetConfig
from src.core.config_models import Environment
from src.core.logger import belief_scope
def test_superset_config_url_normalization():
auth = {
"provider": "db",
"username": "admin",
"password": "password",
"refresh": "token"
}
# Test with /api/v1 already present
config = SupersetConfig(
env="dev",
base_url="http://localhost:8088/api/v1",
auth=auth
)
assert config.base_url == "http://localhost:8088/api/v1"
# Test without /api/v1
config = SupersetConfig(
env="dev",
base_url="http://localhost:8088",
auth=auth
)
assert config.base_url == "http://localhost:8088/api/v1"
# Test with trailing slash
config = SupersetConfig(
env="dev",
base_url="http://localhost:8088/",
auth=auth
)
assert config.base_url == "http://localhost:8088/api/v1"
def test_superset_config_invalid_url():
auth = {
"provider": "db",
"username": "admin",
"password": "password",
"refresh": "token"
}
with pytest.raises(ValueError, match="Must start with http:// or https://"):
SupersetConfig(
env="dev",
base_url="localhost:8088",
auth=auth
# [DEF:test_environment_model:Function]
# @PURPOSE: Tests that Environment model correctly stores values.
# @PRE: Environment class is available.
# @POST: Values are verified.
def test_environment_model():
with belief_scope("test_environment_model"):
env = Environment(
id="test-id",
name="test-env",
url="http://localhost:8088/api/v1",
username="admin",
password="password"
)
assert env.id == "test-id"
assert env.name == "test-env"
assert env.url == "http://localhost:8088/api/v1"
# [/DEF:test_superset_config_url_normalization:Function]
# [/DEF:test_superset_config_invalid_url:Function]

View File

@@ -1,163 +0,0 @@
# [DEF:backup_script:Module]
#
# @SEMANTICS: backup, superset, automation, dashboard
# @PURPOSE: Этот модуль отвечает за автоматизированное резервное копирование дашбордов Superset.
# @LAYER: App
# @RELATION: DEPENDS_ON -> superset_tool.client
# @RELATION: DEPENDS_ON -> superset_tool.utils
# @PUBLIC_API: BackupConfig, backup_dashboards, main
# [SECTION: IMPORTS]
import logging
import sys
from pathlib import Path
from dataclasses import dataclass,field
from requests.exceptions import RequestException
from superset_tool.client import SupersetClient
from superset_tool.exceptions import SupersetAPIError
from superset_tool.utils.logger import SupersetLogger
from superset_tool.utils.fileio import (
save_and_unpack_dashboard,
archive_exports,
sanitize_filename,
consolidate_archive_folders,
remove_empty_directories,
RetentionPolicy
)
from superset_tool.utils.init_clients import setup_clients
# [/SECTION]
# [DEF:BackupConfig:DataClass]
# @PURPOSE: Хранит конфигурацию для процесса бэкапа.
@dataclass
class BackupConfig:
"""Конфигурация для процесса бэкапа."""
consolidate: bool = True
rotate_archive: bool = True
clean_folders: bool = True
retention_policy: RetentionPolicy = field(default_factory=RetentionPolicy)
# [/DEF:BackupConfig]
# [DEF:backup_dashboards:Function]
# @PURPOSE: Выполняет бэкап всех доступных дашбордов для заданного клиента и окружения, пропуская ошибки экспорта.
# @PRE: `client` должен быть инициализированным экземпляром `SupersetClient`.
# @PRE: `env_name` должен быть строкой, обозначающей окружение.
# @PRE: `backup_root` должен быть валидным путем к корневой директории бэкапа.
# @POST: Дашборды экспортируются и сохраняются. Ошибки экспорта логируются и не приводят к остановке скрипта.
# @RELATION: CALLS -> client.get_dashboards
# @RELATION: CALLS -> client.export_dashboard
# @RELATION: CALLS -> save_and_unpack_dashboard
# @RELATION: CALLS -> archive_exports
# @RELATION: CALLS -> consolidate_archive_folders
# @RELATION: CALLS -> remove_empty_directories
# @PARAM: client (SupersetClient) - Клиент для доступа к API Superset.
# @PARAM: env_name (str) - Имя окружения (e.g., 'PROD').
# @PARAM: backup_root (Path) - Корневая директория для сохранения бэкапов.
# @PARAM: logger (SupersetLogger) - Инстанс логгера.
# @PARAM: config (BackupConfig) - Конфигурация процесса бэкапа.
# @RETURN: bool - `True` если все дашборды были экспортированы без критических ошибок, `False` иначе.
def backup_dashboards(
client: SupersetClient,
env_name: str,
backup_root: Path,
logger: SupersetLogger,
config: BackupConfig
) -> bool:
logger.info(f"[backup_dashboards][Entry] Starting backup for {env_name}.")
try:
dashboard_count, dashboard_meta = client.get_dashboards()
logger.info(f"[backup_dashboards][Progress] Found {dashboard_count} dashboards to export in {env_name}.")
if dashboard_count == 0:
return True
success_count = 0
for db in dashboard_meta:
dashboard_id = db.get('id')
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
if not dashboard_id:
continue
try:
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
dashboard_dir = backup_root / env_name / dashboard_base_dir_name
dashboard_dir.mkdir(parents=True, exist_ok=True)
zip_content, filename = client.export_dashboard(dashboard_id)
save_and_unpack_dashboard(
zip_content=zip_content,
original_filename=filename,
output_dir=dashboard_dir,
unpack=False,
logger=logger
)
if config.rotate_archive:
archive_exports(str(dashboard_dir), policy=config.retention_policy, logger=logger)
success_count += 1
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
logger.error(f"[backup_dashboards][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
continue
if config.consolidate:
consolidate_archive_folders(backup_root / env_name , logger=logger)
if config.clean_folders:
remove_empty_directories(str(backup_root / env_name), logger=logger)
logger.info(f"[backup_dashboards][CoherenceCheck:Passed] Backup logic completed.")
return success_count == dashboard_count
except (RequestException, IOError) as e:
logger.critical(f"[backup_dashboards][Failure] Fatal error during backup for {env_name}: {e}", exc_info=True)
return False
# [/DEF:backup_dashboards]
# [DEF:main:Function]
# @PURPOSE: Основная точка входа для запуска процесса резервного копирования.
# @RELATION: CALLS -> setup_clients
# @RELATION: CALLS -> backup_dashboards
# @RETURN: int - Код выхода (0 - успех, 1 - ошибка).
def main() -> int:
log_dir = Path("P:\\Superset\\010 Бекапы\\Logs")
logger = SupersetLogger(log_dir=log_dir, level=logging.INFO, console=True)
logger.info("[main][Entry] Starting Superset backup process.")
exit_code = 0
try:
clients = setup_clients(logger)
superset_backup_repo = Path("P:\\Superset\\010 Бекапы")
superset_backup_repo.mkdir(parents=True, exist_ok=True)
results = {}
environments = ['dev', 'sbx', 'prod', 'preprod']
backup_config = BackupConfig(rotate_archive=True)
for env in environments:
try:
results[env] = backup_dashboards(
clients[env],
env.upper(),
superset_backup_repo,
logger=logger,
config=backup_config
)
except Exception as env_error:
logger.critical(f"[main][Failure] Critical error for environment {env}: {env_error}", exc_info=True)
results[env] = False
if not all(results.values()):
exit_code = 1
except (RequestException, IOError) as e:
logger.critical(f"[main][Failure] Fatal error in main execution: {e}", exc_info=True)
exit_code = 1
logger.info("[main][Exit] Superset backup process finished.")
return exit_code
# [/DEF:main]
if __name__ == "__main__":
sys.exit(main())
# [/DEF:backup_script]

View File

@@ -0,0 +1,55 @@
slice_name: "FI-0083 \u0421\u0442\u0430\u0442\u0438\u0441\u0442\u0438\u043A\u0430\
\ \u043F\u043E \u0414\u0417/\u041F\u0414\u0417"
description: null
certified_by: null
certification_details: null
viz_type: pivot_table_v2
params:
datasource: 859__table
viz_type: pivot_table_v2
slice_id: 4019
groupbyColumns:
- dt
groupbyRows:
- counterparty_search_name
- attribute
time_grain_sqla: P1M
temporal_columns_lookup:
dt: true
metrics:
- m_debt_amount
- m_overdue_amount
metricsLayout: COLUMNS
adhoc_filters:
- clause: WHERE
comparator: No filter
expressionType: SIMPLE
operator: TEMPORAL_RANGE
subject: dt
row_limit: '90000'
order_desc: false
aggregateFunction: Sum
combineMetric: true
valueFormat: SMART_NUMBER
date_format: smart_date
rowOrder: key_a_to_z
colOrder: key_a_to_z
value_font_size: 12
header_font_size: 12
label_align: left
column_config:
m_debt_amount:
d3NumberFormat: ',d'
m_overdue_amount:
d3NumberFormat: ',d'
conditional_formatting: []
extra_form_data: {}
dashboards:
- 184
query_context: '{"datasource":{"id":859,"type":"table"},"force":false,"queries":[{"filters":[{"col":"dt","op":"TEMPORAL_RANGE","val":"No
filter"}],"extras":{"having":"","where":""},"applied_time_extras":{},"columns":[{"timeGrain":"P1M","columnType":"BASE_AXIS","sqlExpression":"dt","label":"dt","expressionType":"SQL"},"counterparty_search_name","attribute"],"metrics":["m_debt_amount","m_overdue_amount"],"orderby":[["m_debt_amount",true]],"annotation_layers":[],"row_limit":90000,"series_limit":0,"order_desc":false,"url_params":{},"custom_params":{},"custom_form_data":{}}],"form_data":{"datasource":"859__table","viz_type":"pivot_table_v2","slice_id":4019,"groupbyColumns":["dt"],"groupbyRows":["counterparty_search_name","attribute"],"time_grain_sqla":"P1M","temporal_columns_lookup":{"dt":true},"metrics":["m_debt_amount","m_overdue_amount"],"metricsLayout":"COLUMNS","adhoc_filters":[{"clause":"WHERE","comparator":"No
filter","expressionType":"SIMPLE","operator":"TEMPORAL_RANGE","subject":"dt"}],"row_limit":"90000","order_desc":false,"aggregateFunction":"Sum","combineMetric":true,"valueFormat":"SMART_NUMBER","date_format":"smart_date","rowOrder":"key_a_to_z","colOrder":"key_a_to_z","value_font_size":12,"header_font_size":12,"label_align":"left","column_config":{"m_debt_amount":{"d3NumberFormat":",d"},"m_overdue_amount":{"d3NumberFormat":",d"}},"conditional_formatting":[],"extra_form_data":{},"dashboards":[184],"force":false,"result_format":"json","result_type":"full"},"result_format":"json","result_type":"full"}'
cache_timeout: null
uuid: 9c293065-73e2-4d9b-a175-d188ff8ef575
version: 1.0.0
dataset_uuid: 9e645dc0-da25-4f61-9465-6e649b0bc4b1

View File

@@ -0,0 +1,13 @@
database_name: Prod Clickhouse
sqlalchemy_uri: clickhousedb+connect://viz_superset_click_prod:XXXXXXXXXX@rgm-s-khclk.hq.root.ad:443/dm
cache_timeout: null
expose_in_sqllab: true
allow_run_async: false
allow_ctas: false
allow_cvas: false
allow_dml: true
allow_file_upload: false
extra:
allows_virtual_table_explore: true
uuid: 97aced68-326a-4094-b381-27980560efa9
version: 1.0.0

View File

@@ -0,0 +1,119 @@
table_name: "FI-0080-06 \u041A\u0430\u043B\u0435\u043D\u0434\u0430\u0440\u044C (\u041E\
\u0431\u0449\u0438\u0439 \u0441\u043F\u0440\u0430\u0432\u043E\u0447\u043D\u0438\u043A\
)"
main_dttm_col: null
description: null
default_endpoint: null
offset: 0
cache_timeout: null
schema: dm_view
sql: "-- [HEADER]\r\n-- [\u041D\u0410\u0417\u041D\u0410\u0427\u0415\u041D\u0418\u0415\
]: \u041F\u043E\u043B\u0443\u0447\u0435\u043D\u0438\u0435 \u0434\u0438\u0430\u043F\
\u0430\u0437\u043E\u043D\u0430 \u0434\u0430\u0442 \u0434\u043B\u044F \u043E\u0442\
\u0447\u0435\u0442\u0430 \u043E \u0437\u0430\u0434\u043E\u043B\u0436\u0435\u043D\
\u043D\u043E\u0441\u0442\u044F\u0445 \u043F\u043E \u043E\u0431\u043E\u0440\u043E\
\u0442\u043D\u044B\u043C \u0441\u0440\u0435\u0434\u0441\u0442\u0432\u0430\u043C\r\
\n-- [\u041A\u041B\u042E\u0427\u0415\u0412\u042B\u0415 \u041A\u041E\u041B\u041E\u041D\
\u041A\u0418]:\r\n-- - from_dt_txt: \u041D\u0430\u0447\u0430\u043B\u044C\u043D\
\u0430\u044F \u0434\u0430\u0442\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\
\u0435 DD.MM.YYYY\r\n-- - to_dt_txt: \u041A\u043E\u043D\u0435\u0447\u043D\u0430\
\u044F \u0434\u0430\u0442\u0430 \u0432 \u0444\u043E\u0440\u043C\u0430\u0442\u0435\
\ DD.MM.YYYY\r\n-- [JINJA \u041F\u0410\u0420\u0410\u041C\u0415\u0422\u0420\u042B\
]:\r\n-- - {{ filter_values(\"yes_no_check\") }}: \u0424\u0438\u043B\u044C\u0442\
\u0440 \"\u0414\u0430/\u041D\u0435\u0442\" \u0434\u043B\u044F \u043E\u0433\u0440\
\u0430\u043D\u0438\u0447\u0435\u043D\u0438\u044F \u0432\u044B\u0431\u043E\u0440\u043A\
\u0438 \u043F\u043E \u0434\u0430\u0442\u0435\r\n-- [\u041B\u041E\u0413\u0418\u041A\
\u0410]: \u041E\u043F\u0440\u0435\u0434\u0435\u043B\u044F\u0435\u0442 \u043F\u043E\
\u0440\u043E\u0433\u043E\u0432\u0443\u044E \u0434\u0430\u0442\u0443 \u0432 \u0437\
\u0430\u0432\u0438\u0441\u0438\u043C\u043E\u0441\u0442\u0438 \u043E\u0442 \u0442\
\u0435\u043A\u0443\u0449\u0435\u0433\u043E \u0434\u043D\u044F \u043C\u0435\u0441\
\u044F\u0446\u0430 \u0438 \u0444\u0438\u043B\u044C\u0442\u0440\u0443\u0435\u0442\
\ \u0434\u0430\u043D\u043D\u044B\u0435\r\n\r\nWITH date_threshold AS (\r\n SELECT\
\ \r\n -- \u041E\u043F\u0440\u0435\u0434\u0435\u043B\u044F\u0435\u043C \u043F\
\u043E\u0440\u043E\u0433\u043E\u0432\u0443\u044E \u0434\u0430\u0442\u0443 \u0432\
\ \u0437\u0430\u0432\u0438\u0441\u0438\u043C\u043E\u0441\u0442\u0438 \u043E\u0442\
\ \u0442\u0435\u043A\u0443\u0449\u0435\u0433\u043E \u0434\u043D\u044F \r\n \
\ CASE \r\n WHEN toDayOfMonth(now()) <= 10 THEN \r\n \
\ toStartOfMonth(dateSub(MONTH, 1, now())) \r\n ELSE \r\n \
\ toStartOfMonth(now()) \r\n END AS cutoff_date \r\n),\r\nfiltered_dates\
\ AS (\r\n SELECT \r\n dt,\r\n formatDateTime(dt, '%d.%m.%Y') AS\
\ from_dt_txt,\r\n formatDateTime(dt, '%d.%m.%Y') AS to_dt_txt\r\n \
\ --dt as from_dt_txt,\r\n -- dt as to_dt_txt\r\n FROM dm_view.account_debt_for_working_capital_final\r\
\n WHERE 1=1\r\n -- \u0411\u0435\u0437\u043E\u043F\u0430\u0441\u043D\u0430\
\u044F \u043F\u0440\u043E\u0432\u0435\u0440\u043A\u0430 \u0444\u0438\u043B\u044C\
\u0442\u0440\u0430\r\n {% if filter_values(\"yes_no_check\") | length !=\
\ 0 %}\r\n {% if filter_values(\"yes_no_check\")[0] == \"\u0414\u0430\
\" %}\r\n AND dt < (SELECT cutoff_date FROM date_threshold)\r\n \
\ {% endif %}\r\n {% endif %}\r\n)\r\nSELECT \r\ndt,\r\n from_dt_txt,\r\
\n to_dt_txt,\r\n formatDateTime(toLastDayOfMonth(dt), '%d.%m.%Y') as last_day_of_month_dt_txt\r\
\nFROM \r\n filtered_dates\r\nGROUP BY \r\n dt, from_dt_txt, to_dt_txt\r\n\
ORDER BY \r\n dt DESC"
params: null
template_params: null
filter_select_enabled: true
fetch_values_predicate: null
extra: null
normalize_columns: false
uuid: fca62707-6947-4440-a16b-70cb6a5cea5b
metrics:
- metric_name: max_date
verbose_name: max_date
metric_type: count
expression: max(dt)
description: null
d3format: null
currency: null
extra:
warning_markdown: ''
warning_text: null
columns:
- column_name: from_dt_txt
verbose_name: null
is_dttm: true
is_active: true
type: String
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: '%Y'
extra: {}
- column_name: dt
verbose_name: null
is_dttm: true
is_active: true
type: Date
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: {}
- column_name: last_day_of_month_dt_txt
verbose_name: null
is_dttm: false
is_active: true
type: String
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: {}
- column_name: to_dt_txt
verbose_name: null
is_dttm: true
is_active: true
type: String
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: {}
version: 1.0.0
database_uuid: 97aced68-326a-4094-b381-27980560efa9

View File

@@ -0,0 +1,190 @@
table_name: "FI-0090 \u0421\u0442\u0430\u0442\u0438\u0441\u0442\u0438\u043A\u0430\
\ \u043F\u043E \u0414\u0417/\u041F\u0414\u0417"
main_dttm_col: dt
description: null
default_endpoint: null
offset: 0
cache_timeout: null
schema: dm_view
sql: "-- [JINJA_BLOCK] \u0426\u0435\u043D\u0442\u0440\u0430\u043B\u0438\u0437\u043E\
\u0432\u0430\u043D\u043D\u043E\u0435 \u043E\u043F\u0440\u0435\u0434\u0435\u043B\u0435\
\u043D\u0438\u0435 \u0432\u0441\u0435\u0445 Jinja \u043F\u0435\u0440\u0435\u043C\
\u0435\u043D\u043D\u044B\u0445\r\n{% set raw_to = filter_values('last_day_of_month_dt_txt')[0]\
\ \r\n if filter_values('last_day_of_month_dt_txt') else '01.05.2025'\
\ %}\r\n\r\n{# \u0440\u0430\u0437\u0431\u0438\u0432\u0430\u0435\u043C \xABDD.MM.YYYY\xBB\
\ \u043D\u0430 \u0447\u0430\u0441\u0442\u0438 #}\r\n{% set to_parts = raw_to.split('.')\
\ %}\r\n\r\n{# \u0441\u043E\u0431\u0438\u0440\u0430\u0435\u043C ISO\u2011\u0441\u0442\
\u0440\u043E\u043A\u0443 \xABYYYY-MM-DD\xBB #}\r\n{% set to_dt = to_parts[2] \
\ ~ '-' ~ to_parts[1] ~ '-' ~ to_parts[0] %}\r\n\r\nwith \r\ncp_relations_type\
\ AS (\r\n select * from ( SELECT \r\n ctd.counterparty_code AS counterparty_code,\r\
\n min(dt_from) as dt_from,\r\n max(dt_to) as dt_to,\r\n crt.relation_type_code\
\ || ' ' || crt.relation_type_name AS relation_type_code_name\r\n FROM\r\n \
\ dm_view.counterparty_td ctd\r\n JOIN dm_view.counterparty_relation_type_texts\
\ crt \r\n ON ctd.relation_type_code = crt.relation_type_code\r\n GROUP\
\ BY\r\n ctd.counterparty_code, ctd.counterparty_full_name,\r\n crt.relation_type_code,crt.relation_type_name)\r\
\n WHERE \r\n dt_from <= toDate('{{to_dt }}') AND \r\n \
\ dt_to >= toDate('{{to_dt }}')\r\n ),\r\nt_debt as \r\n(SELECT dt, \r\n\
counterparty_search_name,\r\ncp_relations_type.relation_type_code_name as relation_type_code_name,\r\
\nunit_balance_code || ' ' || unit_balance_name as unit_balance_code_name,\r\n'1.\
\ \u0421\u0443\u043C\u043C\u0430' as attribute,\r\nsum(debt_balance_subposition_no_revaluation_usd_amount)\
\ as debt_amount,\r\nsumIf(debt_balance_subposition_no_revaluation_usd_amount,dt_overdue\
\ < dt) as overdue_amount\r\nfrom dm_view.account_debt_for_working_capital t_debt\r\
\njoin cp_relations_type ON\r\ncp_relations_type.counterparty_code = t_debt.counterparty_code\r\
\nwhere dt = toLastDayOfMonth(dt)\r\nand match(general_ledger_account_code,'((62)|(60)|(76))')\r\
\nand debit_or_credit = 'S'\r\nand account_type = 'D'\r\nand dt between addMonths(toDate('{{to_dt\
\ }}'),-12) and toDate('{{to_dt }}')\r\ngroup by dt, counterparty_search_name,unit_balance_code_name,relation_type_code_name\r\
\n),\r\n\r\nt_transaction_count_base as \r\n(\r\nselect *,\r\ncp_relations_type.relation_type_code_name\
\ as relation_type_code_name,\r\nunit_balance_code || ' ' || unit_balance_name as\
\ unit_balance_code_name,\r\n case when dt_overdue<dt_clearing then\r\n \
\ dateDiff(day, dt_overdue, dt_clearing) \r\n else 0\r\n end\
\ as overdue_days\r\nfrom dm_view.accounting_documents_leading_to_debt t_docs\r\n\
join cp_relations_type ON\r\ncp_relations_type.counterparty_code = t_docs.counterparty_code\r\
\nwhere 1=1\r\n\r\nand match(general_ledger_account_code,'((62)|(60)|(76))')\r\n\
and debit_or_credit = 'S'\r\nand account_type = 'D'\r\n)\r\n\r\nselect * from t_debt\r\
\n\r\nunion all \r\n\r\nselect toLastDayOfMonth(dt_debt) as dt, \r\ncounterparty_search_name,\r\
\nrelation_type_code_name,\r\nunit_balance_code_name,\r\n'2. \u043A\u043E\u043B\u0438\
\u0447\u0435\u0441\u0442\u0432\u043E \u0442\u0440\u0430\u043D\u0437\u0430\u043A\u0446\
\u0438\u0439 \u0432 \u043C\u0435\u0441\u044F\u0446' as attribute,\r\ncount(1) as\
\ debt_amount,\r\nnull as overdue_amount\r\nfrom t_transaction_count_base\r\nwhere\
\ dt_debt between addMonths(toDate('{{to_dt }}'),-12) and toDate('{{to_dt }}')\r\
\ngroup by toLastDayOfMonth(dt_debt), \r\ncounterparty_search_name,\r\nrelation_type_code_name,\r\
\nunit_balance_code_name,attribute\r\n\r\nunion all \r\n\r\nselect toLastDayOfMonth(dt_clearing)\
\ as dt, \r\ncounterparty_search_name,\r\nrelation_type_code_name,\r\nunit_balance_code_name,\r\
\n'2. \u043A\u043E\u043B\u0438\u0447\u0435\u0441\u0442\u0432\u043E \u0442\u0440\u0430\
\u043D\u0437\u0430\u043A\u0446\u0438\u0439 \u0432 \u043C\u0435\u0441\u044F\u0446\
' as attribute,\r\nnull as debt_amount,\r\ncount(1) as overdue_amount\r\nfrom t_transaction_count_base\r\
\nwhere dt_clearing between addMonths(toDate('{{to_dt }}'),-12) and toDate('{{to_dt\
\ }}')\r\nand overdue_days > 0\r\ngroup by toLastDayOfMonth(dt_clearing), \r\ncounterparty_search_name,\r\
\nrelation_type_code_name,\r\nunit_balance_code_name,attribute\r\n\r\nunion all\
\ \r\n\r\nselect toLastDayOfMonth(dt_clearing) as dt, \r\ncounterparty_search_name,\r\
\nrelation_type_code_name,\r\nunit_balance_code_name,\r\nmultiIf(\r\noverdue_days\
\ < 30,'3. \u0434\u043E 30',\r\noverdue_days between 30 and 60, '4. \u043E\u0442\
\ 30 \u0434\u043E 60',\r\noverdue_days between 61 and 90, '5. \u043E\u0442 61 \u0434\
\u043E 90',\r\noverdue_days>90,'6. \u0431\u043E\u043B\u0435\u0435 90 \u0434\u043D\
',\r\nnull\r\n)\r\n as attribute,\r\nnull as debt_amount,\r\ncount(1) as overdue_amount\r\
\nfrom t_transaction_count_base\r\nwhere dt_clearing between addMonths(toDate('{{to_dt\
\ }}'),-12) and toDate('{{to_dt }}')\r\nand overdue_days > 0\r\ngroup by toLastDayOfMonth(dt_clearing),\
\ \r\ncounterparty_search_name,\r\nrelation_type_code_name,\r\nattribute,unit_balance_code_name,attribute\r\
\n"
params: null
template_params: null
filter_select_enabled: true
fetch_values_predicate: null
extra: null
normalize_columns: false
uuid: 9e645dc0-da25-4f61-9465-6e649b0bc4b1
metrics:
- metric_name: m_debt_amount
verbose_name: "\u0414\u0417, $"
metric_type: count
expression: sum(debt_amount)
description: null
d3format: null
currency: null
extra:
warning_markdown: ''
warning_text: null
- metric_name: m_overdue_amount
verbose_name: "\u041F\u0414\u0417, $"
metric_type: null
expression: sum(overdue_amount)
description: null
d3format: null
currency: null
extra:
warning_markdown: ''
warning_text: null
columns:
- column_name: debt_amount
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(Decimal(38, 2))
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: overdue_amount
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(Decimal(38, 2))
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: dt
verbose_name: null
is_dttm: true
is_active: true
type: Nullable(Date)
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: unit_balance_code_name
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(String)
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: relation_type_code_name
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(String)
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: counterparty_search_name
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(String)
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
- column_name: attribute
verbose_name: null
is_dttm: false
is_active: true
type: Nullable(String)
advanced_data_type: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra:
warning_markdown: null
version: 1.0.0
database_uuid: 97aced68-326a-4094-b381-27980560efa9

View File

@@ -0,0 +1,3 @@
version: 1.0.0
type: Dashboard
timestamp: '2026-01-14T11:21:08.078620+00:00'

View File

@@ -1,79 +0,0 @@
# [DEF:debug_db_api:Module]
#
# @SEMANTICS: debug, api, database, script
# @PURPOSE: Скрипт для отладки структуры ответа API баз данных.
# @LAYER: App
# @RELATION: DEPENDS_ON -> superset_tool.client
# @RELATION: DEPENDS_ON -> superset_tool.utils
# @PUBLIC_API: debug_database_api
# [SECTION: IMPORTS]
import json
import logging
from superset_tool.client import SupersetClient
from superset_tool.utils.init_clients import setup_clients
from superset_tool.utils.logger import SupersetLogger
# [/SECTION]
# [DEF:debug_database_api:Function]
# @PURPOSE: Отладка структуры ответа API баз данных.
# @RELATION: CALLS -> setup_clients
# @RELATION: CALLS -> client.get_databases
def debug_database_api():
logger = SupersetLogger(name="debug_db_api", level=logging.DEBUG)
# Инициализируем клиенты
clients = setup_clients(logger)
# Log JWT bearer tokens for each client
for env_name, client in clients.items():
try:
# Ensure authentication (access token fetched via headers property)
_ = client.headers
token = client.network._tokens.get("access_token")
logger.info(f"[debug_database_api][Token] Bearer token for {env_name}: {token}")
except Exception as exc:
logger.error(f"[debug_database_api][Token] Failed to retrieve token for {env_name}: {exc}", exc_info=True)
# Проверяем доступные окружения
print("Доступные окружения:")
for env_name, client in clients.items():
print(f" {env_name}: {client.config.base_url}")
# Выбираем два окружения для тестирования
if len(clients) < 2:
print("Недостаточно окружений для тестирования")
return
env_names = list(clients.keys())[:2]
from_env, to_env = env_names[0], env_names[1]
from_client = clients[from_env]
to_client = clients[to_env]
print(f"\nТестируем API для окружений: {from_env} -> {to_env}")
try:
# Получаем список баз данных из первого окружения
print(f"\nПолучаем список БД из {from_env}:")
count, dbs = from_client.get_databases()
print(f"Найдено {count} баз данных")
print("Полный ответ API:")
print(json.dumps({"count": count, "result": dbs}, indent=2, ensure_ascii=False))
# Получаем список баз данных из второго окружения
print(f"\nПолучаем список БД из {to_env}:")
count, dbs = to_client.get_databases()
print(f"Найдено {count} баз данных")
print("Полный ответ API:")
print(json.dumps({"count": count, "result": dbs}, indent=2, ensure_ascii=False))
except Exception as e:
print(f"Ошибка при тестировании API: {e}")
import traceback
traceback.print_exc()
# [/DEF:debug_database_api]
if __name__ == "__main__":
debug_database_api()
# [/DEF:debug_db_api]

42
docs/migration_mapping.md Normal file
View File

@@ -0,0 +1,42 @@
# Database Mapping in Migration
This document describes how to use the database mapping feature during Superset dashboard migrations.
## Overview
When migrating dashboards between different Superset environments (e.g., from Dev to Prod), the underlying databases often have different UUIDs even if they represent the same data source. The Database Mapping feature allows you to define these relationships so that migrated assets automatically point to the correct database in the target environment.
## How it Works
1. **Fuzzy Matching**: The system automatically suggests mappings by comparing database names between environments using the RapidFuzz library.
2. **Persistence**: Mappings are stored in a local SQLite database (`mappings.db`) and are reused for future migrations between the same environment pair.
3. **Asset Interception**: During migration, the system intercepts the Superset export ZIP archive, modifies the `database_uuid` in the dataset YAML files, and re-packages the archive before importing it to the target.
## Usage Instructions
### 1. Define Mappings
1. Navigate to the **Database Mapping** tab in the application.
2. Select your **Source** and **Target** environments.
3. Click **Fetch Databases & Suggestions**.
4. Review the suggested mappings (highlighted in green).
5. If a suggestion is incorrect or missing, use the dropdown in the "Target Database" column to select the correct one.
6. Mappings are saved automatically when you select a target database.
### 2. Run Migration with Database Replacement
1. Go to the **Migration** dashboard.
2. Select the **Source** and **Target** environments.
3. Select the dashboards or datasets you want to migrate.
4. Enable the **Replace Database (Apply Mappings)** toggle.
5. Click **Start Migration**.
### 3. Handling Missing Mappings
If the migration engine encounters a database that has no defined mapping, the process will pause, and a modal will appear prompting you to select a target database on-the-fly. Once selected, the mapping is saved, and the migration continues.
## Troubleshooting
- **Mapping not applied**: Ensure the "Replace Database" toggle is enabled.
- **Wrong database in target**: Check the mapping table for the specific environment pair and correct any errors.
- **Connection errors**: Ensure both Superset environments are reachable and credentials are correct in Settings.

7
frontend/.eslintignore Normal file
View File

@@ -0,0 +1,7 @@
node_modules/
dist/
build/
.svelte-kit/
.vite/
coverage/
*.min.js

9
frontend/.prettierignore Normal file
View File

@@ -0,0 +1,9 @@
node_modules/
dist/
build/
.svelte-kit/
.vite/
coverage/
package-lock.json
yarn.lock
pnpm-lock.yaml

View File

@@ -1,186 +0,0 @@
// this file is generated — do not edit it
/// <reference types="@sveltejs/kit" />
/**
* Environment variables [loaded by Vite](https://vitejs.dev/guide/env-and-mode.html#env-files) from `.env` files and `process.env`. Like [`$env/dynamic/private`](https://svelte.dev/docs/kit/$env-dynamic-private), this module cannot be imported into client-side code. This module only includes variables that _do not_ begin with [`config.kit.env.publicPrefix`](https://svelte.dev/docs/kit/configuration#env) _and do_ start with [`config.kit.env.privatePrefix`](https://svelte.dev/docs/kit/configuration#env) (if configured).
*
* _Unlike_ [`$env/dynamic/private`](https://svelte.dev/docs/kit/$env-dynamic-private), the values exported from this module are statically injected into your bundle at build time, enabling optimisations like dead code elimination.
*
* ```ts
* import { API_KEY } from '$env/static/private';
* ```
*
* Note that all environment variables referenced in your code should be declared (for example in an `.env` file), even if they don't have a value until the app is deployed:
*
* ```
* MY_FEATURE_FLAG=""
* ```
*
* You can override `.env` values from the command line like so:
*
* ```sh
* MY_FEATURE_FLAG="enabled" npm run dev
* ```
*/
declare module '$env/static/private' {
export const LESSOPEN: string;
export const USER: string;
export const npm_config_user_agent: string;
export const npm_node_execpath: string;
export const SHLVL: string;
export const npm_config_noproxy: string;
export const HOME: string;
export const OLDPWD: string;
export const npm_package_json: string;
export const PS1: string;
export const npm_config_userconfig: string;
export const npm_config_local_prefix: string;
export const DBUS_SESSION_BUS_ADDRESS: string;
export const WSL_DISTRO_NAME: string;
export const COLOR: string;
export const WAYLAND_DISPLAY: string;
export const LOGNAME: string;
export const NAME: string;
export const WSL_INTEROP: string;
export const PULSE_SERVER: string;
export const _: string;
export const npm_config_prefix: string;
export const npm_config_npm_version: string;
export const TERM: string;
export const npm_config_cache: string;
export const npm_config_node_gyp: string;
export const PATH: string;
export const NODE: string;
export const npm_package_name: string;
export const XDG_RUNTIME_DIR: string;
export const DISPLAY: string;
export const LANG: string;
export const VIRTUAL_ENV_PROMPT: string;
export const LS_COLORS: string;
export const npm_lifecycle_script: string;
export const SHELL: string;
export const npm_package_version: string;
export const npm_lifecycle_event: string;
export const GOOGLE_CLOUD_PROJECT: string;
export const LESSCLOSE: string;
export const VIRTUAL_ENV: string;
export const npm_config_globalconfig: string;
export const npm_config_init_module: string;
export const PWD: string;
export const npm_execpath: string;
export const XDG_DATA_DIRS: string;
export const npm_config_global_prefix: string;
export const npm_command: string;
export const WSL2_GUI_APPS_ENABLED: string;
export const HOSTTYPE: string;
export const WSLENV: string;
export const INIT_CWD: string;
export const EDITOR: string;
export const NODE_ENV: string;
}
/**
* Similar to [`$env/static/private`](https://svelte.dev/docs/kit/$env-static-private), except that it only includes environment variables that begin with [`config.kit.env.publicPrefix`](https://svelte.dev/docs/kit/configuration#env) (which defaults to `PUBLIC_`), and can therefore safely be exposed to client-side code.
*
* Values are replaced statically at build time.
*
* ```ts
* import { PUBLIC_BASE_URL } from '$env/static/public';
* ```
*/
declare module '$env/static/public' {
export const PUBLIC_WS_URL: string;
}
/**
* This module provides access to runtime environment variables, as defined by the platform you're running on. For example if you're using [`adapter-node`](https://github.com/sveltejs/kit/tree/main/packages/adapter-node) (or running [`vite preview`](https://svelte.dev/docs/kit/cli)), this is equivalent to `process.env`. This module only includes variables that _do not_ begin with [`config.kit.env.publicPrefix`](https://svelte.dev/docs/kit/configuration#env) _and do_ start with [`config.kit.env.privatePrefix`](https://svelte.dev/docs/kit/configuration#env) (if configured).
*
* This module cannot be imported into client-side code.
*
* ```ts
* import { env } from '$env/dynamic/private';
* console.log(env.DEPLOYMENT_SPECIFIC_VARIABLE);
* ```
*
* > [!NOTE] In `dev`, `$env/dynamic` always includes environment variables from `.env`. In `prod`, this behavior will depend on your adapter.
*/
declare module '$env/dynamic/private' {
export const env: {
LESSOPEN: string;
USER: string;
npm_config_user_agent: string;
npm_node_execpath: string;
SHLVL: string;
npm_config_noproxy: string;
HOME: string;
OLDPWD: string;
npm_package_json: string;
PS1: string;
npm_config_userconfig: string;
npm_config_local_prefix: string;
DBUS_SESSION_BUS_ADDRESS: string;
WSL_DISTRO_NAME: string;
COLOR: string;
WAYLAND_DISPLAY: string;
LOGNAME: string;
NAME: string;
WSL_INTEROP: string;
PULSE_SERVER: string;
_: string;
npm_config_prefix: string;
npm_config_npm_version: string;
TERM: string;
npm_config_cache: string;
npm_config_node_gyp: string;
PATH: string;
NODE: string;
npm_package_name: string;
XDG_RUNTIME_DIR: string;
DISPLAY: string;
LANG: string;
VIRTUAL_ENV_PROMPT: string;
LS_COLORS: string;
npm_lifecycle_script: string;
SHELL: string;
npm_package_version: string;
npm_lifecycle_event: string;
GOOGLE_CLOUD_PROJECT: string;
LESSCLOSE: string;
VIRTUAL_ENV: string;
npm_config_globalconfig: string;
npm_config_init_module: string;
PWD: string;
npm_execpath: string;
XDG_DATA_DIRS: string;
npm_config_global_prefix: string;
npm_command: string;
WSL2_GUI_APPS_ENABLED: string;
HOSTTYPE: string;
WSLENV: string;
INIT_CWD: string;
EDITOR: string;
NODE_ENV: string;
[key: `PUBLIC_${string}`]: undefined;
[key: `${string}`]: string | undefined;
}
}
/**
* Similar to [`$env/dynamic/private`](https://svelte.dev/docs/kit/$env-dynamic-private), but only includes variables that begin with [`config.kit.env.publicPrefix`](https://svelte.dev/docs/kit/configuration#env) (which defaults to `PUBLIC_`), and can therefore safely be exposed to client-side code.
*
* Note that public dynamic environment variables must all be sent from the server to the client, causing larger network requests — when possible, use `$env/static/public` instead.
*
* ```ts
* import { env } from '$env/dynamic/public';
* console.log(env.PUBLIC_DEPLOYMENT_SPECIFIC_VARIABLE);
* ```
*/
declare module '$env/dynamic/public' {
export const env: {
PUBLIC_WS_URL: string;
[key: `PUBLIC_${string}`]: string | undefined;
}
}

View File

@@ -1,31 +0,0 @@
export { matchers } from './matchers.js';
export const nodes = [
() => import('./nodes/0'),
() => import('./nodes/1'),
() => import('./nodes/2'),
() => import('./nodes/3')
];
export const server_loads = [];
export const dictionary = {
"/": [2],
"/settings": [3]
};
export const hooks = {
handleError: (({ error }) => { console.error(error) }),
reroute: (() => {}),
transport: {}
};
export const decoders = Object.fromEntries(Object.entries(hooks.transport).map(([k, v]) => [k, v.decode]));
export const encoders = Object.fromEntries(Object.entries(hooks.transport).map(([k, v]) => [k, v.encode]));
export const hash = false;
export const decode = (type, value) => decoders[type](value);
export { default as root } from '../root.js';

View File

@@ -1 +0,0 @@
export const matchers = {};

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/+layout.ts";
export { universal };
export { default as component } from "../../../../src/routes/+layout.svelte";

View File

@@ -1 +0,0 @@
export { default as component } from "../../../../src/routes/+error.svelte";

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/+page.ts";
export { universal };
export { default as component } from "../../../../src/routes/+page.svelte";

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/settings/+page.ts";
export { universal };
export { default as component } from "../../../../src/routes/settings/+page.svelte";

View File

@@ -1,31 +0,0 @@
export { matchers } from './matchers.js';
export const nodes = [
() => import('./nodes/0'),
() => import('./nodes/1'),
() => import('./nodes/2'),
() => import('./nodes/3')
];
export const server_loads = [];
export const dictionary = {
"/": [2],
"/settings": [3]
};
export const hooks = {
handleError: (({ error }) => { console.error(error) }),
reroute: (() => {}),
transport: {}
};
export const decoders = Object.fromEntries(Object.entries(hooks.transport).map(([k, v]) => [k, v.decode]));
export const encoders = Object.fromEntries(Object.entries(hooks.transport).map(([k, v]) => [k, v.encode]));
export const hash = false;
export const decode = (type, value) => decoders[type](value);
export { default as root } from '../root.js';

View File

@@ -1 +0,0 @@
export const matchers = {};

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/+layout.ts";
export { universal };
export { default as component } from "../../../../src/routes/+layout.svelte";

View File

@@ -1 +0,0 @@
export { default as component } from "../../../../src/routes/+error.svelte";

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/+page.ts";
export { universal };
export { default as component } from "../../../../src/routes/+page.svelte";

View File

@@ -1,3 +0,0 @@
import * as universal from "../../../../src/routes/settings/+page.ts";
export { universal };
export { default as component } from "../../../../src/routes/settings/+page.svelte";

View File

@@ -1,3 +0,0 @@
import { asClassComponent } from 'svelte/legacy';
import Root from './root.svelte';
export default asClassComponent(Root);

View File

@@ -1,68 +0,0 @@
<!-- This file is generated by @sveltejs/kit — do not edit it! -->
<svelte:options runes={true} />
<script>
import { setContext, onMount, tick } from 'svelte';
import { browser } from '$app/environment';
// stores
let { stores, page, constructors, components = [], form, data_0 = null, data_1 = null } = $props();
if (!browser) {
// svelte-ignore state_referenced_locally
setContext('__svelte__', stores);
}
if (browser) {
$effect.pre(() => stores.page.set(page));
} else {
// svelte-ignore state_referenced_locally
stores.page.set(page);
}
$effect(() => {
stores;page;constructors;components;form;data_0;data_1;
stores.page.notify();
});
let mounted = $state(false);
let navigated = $state(false);
let title = $state(null);
onMount(() => {
const unsubscribe = stores.page.subscribe(() => {
if (mounted) {
navigated = true;
tick().then(() => {
title = document.title || 'untitled page';
});
}
});
mounted = true;
return unsubscribe;
});
const Pyramid_1=$derived(constructors[1])
</script>
{#if constructors[1]}
{@const Pyramid_0 = constructors[0]}
<!-- svelte-ignore binding_property_non_reactive -->
<Pyramid_0 bind:this={components[0]} data={data_0} {form} params={page.params}>
<!-- svelte-ignore binding_property_non_reactive -->
<Pyramid_1 bind:this={components[1]} data={data_1} {form} params={page.params} />
</Pyramid_0>
{:else}
{@const Pyramid_0 = constructors[0]}
<!-- svelte-ignore binding_property_non_reactive -->
<Pyramid_0 bind:this={components[0]} data={data_0} {form} params={page.params} />
{/if}
{#if mounted}
<div id="svelte-announcer" aria-live="assertive" aria-atomic="true" style="position: absolute; left: 0; top: 0; clip: rect(0 0 0 0); clip-path: inset(50%); overflow: hidden; white-space: nowrap; width: 1px; height: 1px">
{#if navigated}
{title}
{/if}
</div>
{/if}

View File

@@ -1,53 +0,0 @@
import root from '../root.js';
import { set_building, set_prerendering } from '__sveltekit/environment';
import { set_assets } from '$app/paths/internal/server';
import { set_manifest, set_read_implementation } from '__sveltekit/server';
import { set_private_env, set_public_env } from '../../../node_modules/@sveltejs/kit/src/runtime/shared-server.js';
export const options = {
app_template_contains_nonce: false,
async: false,
csp: {"mode":"auto","directives":{"upgrade-insecure-requests":false,"block-all-mixed-content":false},"reportOnly":{"upgrade-insecure-requests":false,"block-all-mixed-content":false}},
csrf_check_origin: true,
csrf_trusted_origins: [],
embedded: false,
env_public_prefix: 'PUBLIC_',
env_private_prefix: '',
hash_routing: false,
hooks: null, // added lazily, via `get_hooks`
preload_strategy: "modulepreload",
root,
service_worker: false,
service_worker_options: undefined,
templates: {
app: ({ head, body, assets, nonce, env }) => "<!DOCTYPE html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<link rel=\"icon\" href=\"" + assets + "/favicon.png\" />\n\t\t<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n\t\t" + head + "\n\t</head>\n\t<body data-sveltekit-preload-data=\"hover\">\n\t\t<div style=\"display: contents\">" + body + "</div>\n\t</body>\n</html>\n",
error: ({ status, message }) => "<!doctype html>\n<html lang=\"en\">\n\t<head>\n\t\t<meta charset=\"utf-8\" />\n\t\t<title>" + message + "</title>\n\n\t\t<style>\n\t\t\tbody {\n\t\t\t\t--bg: white;\n\t\t\t\t--fg: #222;\n\t\t\t\t--divider: #ccc;\n\t\t\t\tbackground: var(--bg);\n\t\t\t\tcolor: var(--fg);\n\t\t\t\tfont-family:\n\t\t\t\t\tsystem-ui,\n\t\t\t\t\t-apple-system,\n\t\t\t\t\tBlinkMacSystemFont,\n\t\t\t\t\t'Segoe UI',\n\t\t\t\t\tRoboto,\n\t\t\t\t\tOxygen,\n\t\t\t\t\tUbuntu,\n\t\t\t\t\tCantarell,\n\t\t\t\t\t'Open Sans',\n\t\t\t\t\t'Helvetica Neue',\n\t\t\t\t\tsans-serif;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tjustify-content: center;\n\t\t\t\theight: 100vh;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t.error {\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t\tmax-width: 32rem;\n\t\t\t\tmargin: 0 1rem;\n\t\t\t}\n\n\t\t\t.status {\n\t\t\t\tfont-weight: 200;\n\t\t\t\tfont-size: 3rem;\n\t\t\t\tline-height: 1;\n\t\t\t\tposition: relative;\n\t\t\t\ttop: -0.05rem;\n\t\t\t}\n\n\t\t\t.message {\n\t\t\t\tborder-left: 1px solid var(--divider);\n\t\t\t\tpadding: 0 0 0 1rem;\n\t\t\t\tmargin: 0 0 0 1rem;\n\t\t\t\tmin-height: 2.5rem;\n\t\t\t\tdisplay: flex;\n\t\t\t\talign-items: center;\n\t\t\t}\n\n\t\t\t.message h1 {\n\t\t\t\tfont-weight: 400;\n\t\t\t\tfont-size: 1em;\n\t\t\t\tmargin: 0;\n\t\t\t}\n\n\t\t\t@media (prefers-color-scheme: dark) {\n\t\t\t\tbody {\n\t\t\t\t\t--bg: #222;\n\t\t\t\t\t--fg: #ddd;\n\t\t\t\t\t--divider: #666;\n\t\t\t\t}\n\t\t\t}\n\t\t</style>\n\t</head>\n\t<body>\n\t\t<div class=\"error\">\n\t\t\t<span class=\"status\">" + status + "</span>\n\t\t\t<div class=\"message\">\n\t\t\t\t<h1>" + message + "</h1>\n\t\t\t</div>\n\t\t</div>\n\t</body>\n</html>\n"
},
version_hash: "1eogxsl"
};
export async function get_hooks() {
let handle;
let handleFetch;
let handleError;
let handleValidationError;
let init;
let reroute;
let transport;
return {
handle,
handleFetch,
handleError,
handleValidationError,
init,
reroute,
transport
};
}
export { set_assets, set_building, set_manifest, set_prerendering, set_private_env, set_public_env, set_read_implementation };

View File

@@ -1,42 +0,0 @@
// this file is generated — do not edit it
declare module "svelte/elements" {
export interface HTMLAttributes<T> {
'data-sveltekit-keepfocus'?: true | '' | 'off' | undefined | null;
'data-sveltekit-noscroll'?: true | '' | 'off' | undefined | null;
'data-sveltekit-preload-code'?:
| true
| ''
| 'eager'
| 'viewport'
| 'hover'
| 'tap'
| 'off'
| undefined
| null;
'data-sveltekit-preload-data'?: true | '' | 'hover' | 'tap' | 'off' | undefined | null;
'data-sveltekit-reload'?: true | '' | 'off' | undefined | null;
'data-sveltekit-replacestate'?: true | '' | 'off' | undefined | null;
}
}
export {};
declare module "$app/types" {
export interface AppTypes {
RouteId(): "/" | "/settings";
RouteParams(): {
};
LayoutParams(): {
"/": Record<string, never>;
"/settings": Record<string, never>
};
Pathname(): "/" | "/settings" | "/settings/";
ResolvedPathname(): `${"" | `/${string}`}${ReturnType<AppTypes['Pathname']>}`;
Asset(): string & {};
}
}

View File

@@ -1,162 +0,0 @@
{
".svelte-kit/generated/client-optimized/app.js": {
"file": "_app/immutable/entry/app.BXnpILpp.js",
"name": "entry/app",
"src": ".svelte-kit/generated/client-optimized/app.js",
"isEntry": true,
"imports": [
"_BtL0wB3H.js",
"_cv2LK44M.js",
"_BxZpmA7Z.js",
"_vVxDbqKK.js"
],
"dynamicImports": [
".svelte-kit/generated/client-optimized/nodes/0.js",
".svelte-kit/generated/client-optimized/nodes/1.js",
".svelte-kit/generated/client-optimized/nodes/2.js",
".svelte-kit/generated/client-optimized/nodes/3.js"
]
},
".svelte-kit/generated/client-optimized/nodes/0.js": {
"file": "_app/immutable/nodes/0.DZdF_zz-.js",
"name": "nodes/0",
"src": ".svelte-kit/generated/client-optimized/nodes/0.js",
"isEntry": true,
"isDynamicEntry": true,
"imports": [
"_cv2LK44M.js",
"_CRLlKr96.js",
"_BtL0wB3H.js",
"_xdjHc-A2.js",
"_DXE57cnx.js",
"_Dbod7Wv8.js"
],
"css": [
"_app/immutable/assets/0.RZHRvmcL.css"
]
},
".svelte-kit/generated/client-optimized/nodes/1.js": {
"file": "_app/immutable/nodes/1.Bh-fCbID.js",
"name": "nodes/1",
"src": ".svelte-kit/generated/client-optimized/nodes/1.js",
"isEntry": true,
"isDynamicEntry": true,
"imports": [
"_cv2LK44M.js",
"_CRLlKr96.js",
"_BtL0wB3H.js",
"_DXE57cnx.js"
]
},
".svelte-kit/generated/client-optimized/nodes/2.js": {
"file": "_app/immutable/nodes/2.BmiXdPHI.js",
"name": "nodes/2",
"src": ".svelte-kit/generated/client-optimized/nodes/2.js",
"isEntry": true,
"isDynamicEntry": true,
"imports": [
"_DyPeVqDG.js",
"_cv2LK44M.js",
"_CRLlKr96.js",
"_BtL0wB3H.js",
"_vVxDbqKK.js",
"_Dbod7Wv8.js",
"_BxZpmA7Z.js",
"_xdjHc-A2.js"
]
},
".svelte-kit/generated/client-optimized/nodes/3.js": {
"file": "_app/immutable/nodes/3.guWMyWpk.js",
"name": "nodes/3",
"src": ".svelte-kit/generated/client-optimized/nodes/3.js",
"isEntry": true,
"isDynamicEntry": true,
"imports": [
"_DyPeVqDG.js",
"_cv2LK44M.js",
"_CRLlKr96.js",
"_BtL0wB3H.js",
"_vVxDbqKK.js",
"_Dbod7Wv8.js"
]
},
"_BtL0wB3H.js": {
"file": "_app/immutable/chunks/BtL0wB3H.js",
"name": "index"
},
"_BxZpmA7Z.js": {
"file": "_app/immutable/chunks/BxZpmA7Z.js",
"name": "index-client",
"imports": [
"_BtL0wB3H.js"
]
},
"_CRLlKr96.js": {
"file": "_app/immutable/chunks/CRLlKr96.js",
"name": "legacy",
"imports": [
"_BtL0wB3H.js"
]
},
"_D0iaTcAo.js": {
"file": "_app/immutable/chunks/D0iaTcAo.js",
"name": "entry",
"imports": [
"_BtL0wB3H.js",
"_BxZpmA7Z.js"
]
},
"_DXE57cnx.js": {
"file": "_app/immutable/chunks/DXE57cnx.js",
"name": "stores",
"imports": [
"_D0iaTcAo.js"
]
},
"_Dbod7Wv8.js": {
"file": "_app/immutable/chunks/Dbod7Wv8.js",
"name": "toasts",
"imports": [
"_BtL0wB3H.js"
]
},
"_DyPeVqDG.js": {
"file": "_app/immutable/chunks/DyPeVqDG.js",
"name": "api",
"imports": [
"_BtL0wB3H.js",
"_Dbod7Wv8.js"
]
},
"_cv2LK44M.js": {
"file": "_app/immutable/chunks/cv2LK44M.js",
"name": "disclose-version",
"imports": [
"_BtL0wB3H.js"
]
},
"_vVxDbqKK.js": {
"file": "_app/immutable/chunks/vVxDbqKK.js",
"name": "props",
"imports": [
"_BtL0wB3H.js",
"_cv2LK44M.js"
]
},
"_xdjHc-A2.js": {
"file": "_app/immutable/chunks/xdjHc-A2.js",
"name": "class",
"imports": [
"_BtL0wB3H.js"
]
},
"node_modules/@sveltejs/kit/src/runtime/client/entry.js": {
"file": "_app/immutable/entry/start.BHAeOrfR.js",
"name": "entry/start",
"src": "node_modules/@sveltejs/kit/src/runtime/client/entry.js",
"isEntry": true,
"imports": [
"_D0iaTcAo.js"
]
}
}

View File

@@ -1 +0,0 @@
{"version":"1766262590857"}

View File

@@ -1 +0,0 @@
export const env={"PUBLIC_WS_URL":"ws://localhost:8000"}

View File

@@ -1,180 +0,0 @@
{
".svelte-kit/generated/server/internal.js": {
"file": "internal.js",
"name": "internal",
"src": ".svelte-kit/generated/server/internal.js",
"isEntry": true,
"imports": [
"_internal.js",
"_environment.js"
]
},
"_api.js": {
"file": "chunks/api.js",
"name": "api",
"imports": [
"_toasts.js"
]
},
"_environment.js": {
"file": "chunks/environment.js",
"name": "environment"
},
"_equality.js": {
"file": "chunks/equality.js",
"name": "equality"
},
"_exports.js": {
"file": "chunks/exports.js",
"name": "exports"
},
"_false.js": {
"file": "chunks/false.js",
"name": "false"
},
"_index.js": {
"file": "chunks/index.js",
"name": "index",
"imports": [
"_equality.js"
]
},
"_index2.js": {
"file": "chunks/index2.js",
"name": "index",
"imports": [
"_false.js",
"_equality.js"
]
},
"_internal.js": {
"file": "chunks/internal.js",
"name": "internal",
"imports": [
"_index2.js",
"_equality.js",
"_environment.js"
]
},
"_shared.js": {
"file": "chunks/shared.js",
"name": "shared",
"imports": [
"_utils.js"
]
},
"_stores.js": {
"file": "chunks/stores.js",
"name": "stores",
"imports": [
"_index2.js",
"_exports.js",
"_utils.js",
"_equality.js"
]
},
"_toasts.js": {
"file": "chunks/toasts.js",
"name": "toasts",
"imports": [
"_index.js"
]
},
"_utils.js": {
"file": "chunks/utils.js",
"name": "utils"
},
"node_modules/@sveltejs/kit/src/runtime/app/server/remote/index.js": {
"file": "remote-entry.js",
"name": "remote-entry",
"src": "node_modules/@sveltejs/kit/src/runtime/app/server/remote/index.js",
"isEntry": true,
"imports": [
"_shared.js",
"_false.js",
"_environment.js"
]
},
"node_modules/@sveltejs/kit/src/runtime/server/index.js": {
"file": "index.js",
"name": "index",
"src": "node_modules/@sveltejs/kit/src/runtime/server/index.js",
"isEntry": true,
"imports": [
"_false.js",
"_environment.js",
"_shared.js",
"_exports.js",
"_utils.js",
"_index.js",
"_internal.js"
]
},
"src/routes/+error.svelte": {
"file": "entries/pages/_error.svelte.js",
"name": "entries/pages/_error.svelte",
"src": "src/routes/+error.svelte",
"isEntry": true,
"imports": [
"_index2.js",
"_stores.js"
]
},
"src/routes/+layout.svelte": {
"file": "entries/pages/_layout.svelte.js",
"name": "entries/pages/_layout.svelte",
"src": "src/routes/+layout.svelte",
"isEntry": true,
"imports": [
"_index2.js",
"_stores.js",
"_toasts.js"
],
"css": [
"_app/immutable/assets/_layout.RZHRvmcL.css"
]
},
"src/routes/+layout.ts": {
"file": "entries/pages/_layout.ts.js",
"name": "entries/pages/_layout.ts",
"src": "src/routes/+layout.ts",
"isEntry": true
},
"src/routes/+page.svelte": {
"file": "entries/pages/_page.svelte.js",
"name": "entries/pages/_page.svelte",
"src": "src/routes/+page.svelte",
"isEntry": true,
"imports": [
"_index2.js",
"_index.js"
]
},
"src/routes/+page.ts": {
"file": "entries/pages/_page.ts.js",
"name": "entries/pages/_page.ts",
"src": "src/routes/+page.ts",
"isEntry": true,
"imports": [
"_api.js"
]
},
"src/routes/settings/+page.svelte": {
"file": "entries/pages/settings/_page.svelte.js",
"name": "entries/pages/settings/_page.svelte",
"src": "src/routes/settings/+page.svelte",
"isEntry": true,
"imports": [
"_index2.js"
]
},
"src/routes/settings/+page.ts": {
"file": "entries/pages/settings/_page.ts.js",
"name": "entries/pages/settings/_page.ts",
"src": "src/routes/settings/+page.ts",
"isEntry": true,
"imports": [
"_api.js"
]
}
}

View File

@@ -1,77 +0,0 @@
import { a as addToast } from "./toasts.js";
const API_BASE_URL = "/api";
async function fetchApi(endpoint) {
try {
console.log(`[api.fetchApi][Action] Fetching from context={{'endpoint': '${endpoint}'}}`);
const response = await fetch(`${API_BASE_URL}${endpoint}`);
if (!response.ok) {
throw new Error(`API request failed with status ${response.status}`);
}
return await response.json();
} catch (error) {
console.error(`[api.fetchApi][Coherence:Failed] Error fetching from ${endpoint}:`, error);
addToast(error.message, "error");
throw error;
}
}
async function postApi(endpoint, body) {
try {
console.log(`[api.postApi][Action] Posting to context={{'endpoint': '${endpoint}'}}`);
const response = await fetch(`${API_BASE_URL}${endpoint}`, {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(`API request failed with status ${response.status}`);
}
return await response.json();
} catch (error) {
console.error(`[api.postApi][Coherence:Failed] Error posting to ${endpoint}:`, error);
addToast(error.message, "error");
throw error;
}
}
async function requestApi(endpoint, method = "GET", body = null) {
try {
console.log(`[api.requestApi][Action] ${method} to context={{'endpoint': '${endpoint}'}}`);
const options = {
method,
headers: {
"Content-Type": "application/json"
}
};
if (body) {
options.body = JSON.stringify(body);
}
const response = await fetch(`${API_BASE_URL}${endpoint}`, options);
if (!response.ok) {
const errorData = await response.json().catch(() => ({}));
throw new Error(errorData.detail || `API request failed with status ${response.status}`);
}
return await response.json();
} catch (error) {
console.error(`[api.requestApi][Coherence:Failed] Error ${method} to ${endpoint}:`, error);
addToast(error.message, "error");
throw error;
}
}
const api = {
getPlugins: () => fetchApi("/plugins/"),
getTasks: () => fetchApi("/tasks/"),
getTask: (taskId) => fetchApi(`/tasks/${taskId}`),
createTask: (pluginId, params) => postApi("/tasks/", { plugin_id: pluginId, params }),
// Settings
getSettings: () => fetchApi("/settings/"),
updateGlobalSettings: (settings) => requestApi("/settings/global", "PATCH", settings),
getEnvironments: () => fetchApi("/settings/environments"),
addEnvironment: (env) => postApi("/settings/environments", env),
updateEnvironment: (id, env) => requestApi(`/settings/environments/${id}`, "PUT", env),
deleteEnvironment: (id) => requestApi(`/settings/environments/${id}`, "DELETE"),
testEnvironmentConnection: (id) => postApi(`/settings/environments/${id}/test`, {})
};
export {
api as a
};

View File

@@ -1,34 +0,0 @@
let base = "";
let assets = base;
const app_dir = "_app";
const relative = true;
const initial = { base, assets };
function override(paths) {
base = paths.base;
assets = paths.assets;
}
function reset() {
base = initial.base;
assets = initial.assets;
}
function set_assets(path) {
assets = initial.assets = path;
}
let prerendering = false;
function set_building() {
}
function set_prerendering() {
prerendering = true;
}
export {
assets as a,
base as b,
app_dir as c,
reset as d,
set_building as e,
set_prerendering as f,
override as o,
prerendering as p,
relative as r,
set_assets as s
};

View File

@@ -1,51 +0,0 @@
var is_array = Array.isArray;
var index_of = Array.prototype.indexOf;
var array_from = Array.from;
var define_property = Object.defineProperty;
var get_descriptor = Object.getOwnPropertyDescriptor;
var object_prototype = Object.prototype;
var array_prototype = Array.prototype;
var get_prototype_of = Object.getPrototypeOf;
var is_extensible = Object.isExtensible;
const noop = () => {
};
function run_all(arr) {
for (var i = 0; i < arr.length; i++) {
arr[i]();
}
}
function deferred() {
var resolve;
var reject;
var promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
}
function equals(value) {
return value === this.v;
}
function safe_not_equal(a, b) {
return a != a ? b == b : a !== b || a !== null && typeof a === "object" || typeof a === "function";
}
function safe_equals(value) {
return !safe_not_equal(value, this.v);
}
export {
array_from as a,
deferred as b,
array_prototype as c,
define_property as d,
equals as e,
get_prototype_of as f,
get_descriptor as g,
is_extensible as h,
is_array as i,
index_of as j,
safe_not_equal as k,
noop as n,
object_prototype as o,
run_all as r,
safe_equals as s
};

View File

@@ -1,174 +0,0 @@
const SCHEME = /^[a-z][a-z\d+\-.]+:/i;
const internal = new URL("sveltekit-internal://");
function resolve(base, path) {
if (path[0] === "/" && path[1] === "/") return path;
let url = new URL(base, internal);
url = new URL(path, url);
return url.protocol === internal.protocol ? url.pathname + url.search + url.hash : url.href;
}
function normalize_path(path, trailing_slash) {
if (path === "/" || trailing_slash === "ignore") return path;
if (trailing_slash === "never") {
return path.endsWith("/") ? path.slice(0, -1) : path;
} else if (trailing_slash === "always" && !path.endsWith("/")) {
return path + "/";
}
return path;
}
function decode_pathname(pathname) {
return pathname.split("%25").map(decodeURI).join("%25");
}
function decode_params(params) {
for (const key in params) {
params[key] = decodeURIComponent(params[key]);
}
return params;
}
function make_trackable(url, callback, search_params_callback, allow_hash = false) {
const tracked = new URL(url);
Object.defineProperty(tracked, "searchParams", {
value: new Proxy(tracked.searchParams, {
get(obj, key) {
if (key === "get" || key === "getAll" || key === "has") {
return (param) => {
search_params_callback(param);
return obj[key](param);
};
}
callback();
const value = Reflect.get(obj, key);
return typeof value === "function" ? value.bind(obj) : value;
}
}),
enumerable: true,
configurable: true
});
const tracked_url_properties = ["href", "pathname", "search", "toString", "toJSON"];
if (allow_hash) tracked_url_properties.push("hash");
for (const property of tracked_url_properties) {
Object.defineProperty(tracked, property, {
get() {
callback();
return url[property];
},
enumerable: true,
configurable: true
});
}
{
tracked[/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")] = (depth, opts, inspect) => {
return inspect(url, opts);
};
tracked.searchParams[/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")] = (depth, opts, inspect) => {
return inspect(url.searchParams, opts);
};
}
if (!allow_hash) {
disable_hash(tracked);
}
return tracked;
}
function disable_hash(url) {
allow_nodejs_console_log(url);
Object.defineProperty(url, "hash", {
get() {
throw new Error(
"Cannot access event.url.hash. Consider using `page.url.hash` inside a component instead"
);
}
});
}
function disable_search(url) {
allow_nodejs_console_log(url);
for (const property of ["search", "searchParams"]) {
Object.defineProperty(url, property, {
get() {
throw new Error(`Cannot access url.${property} on a page with prerendering enabled`);
}
});
}
}
function allow_nodejs_console_log(url) {
{
url[/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")] = (depth, opts, inspect) => {
return inspect(new URL(url), opts);
};
}
}
function validator(expected) {
function validate(module, file) {
if (!module) return;
for (const key in module) {
if (key[0] === "_" || expected.has(key)) continue;
const values = [...expected.values()];
const hint = hint_for_supported_files(key, file?.slice(file.lastIndexOf("."))) ?? `valid exports are ${values.join(", ")}, or anything with a '_' prefix`;
throw new Error(`Invalid export '${key}'${file ? ` in ${file}` : ""} (${hint})`);
}
}
return validate;
}
function hint_for_supported_files(key, ext = ".js") {
const supported_files = [];
if (valid_layout_exports.has(key)) {
supported_files.push(`+layout${ext}`);
}
if (valid_page_exports.has(key)) {
supported_files.push(`+page${ext}`);
}
if (valid_layout_server_exports.has(key)) {
supported_files.push(`+layout.server${ext}`);
}
if (valid_page_server_exports.has(key)) {
supported_files.push(`+page.server${ext}`);
}
if (valid_server_exports.has(key)) {
supported_files.push(`+server${ext}`);
}
if (supported_files.length > 0) {
return `'${key}' is a valid export in ${supported_files.slice(0, -1).join(", ")}${supported_files.length > 1 ? " or " : ""}${supported_files.at(-1)}`;
}
}
const valid_layout_exports = /* @__PURE__ */ new Set([
"load",
"prerender",
"csr",
"ssr",
"trailingSlash",
"config"
]);
const valid_page_exports = /* @__PURE__ */ new Set([...valid_layout_exports, "entries"]);
const valid_layout_server_exports = /* @__PURE__ */ new Set([...valid_layout_exports]);
const valid_page_server_exports = /* @__PURE__ */ new Set([...valid_layout_server_exports, "actions", "entries"]);
const valid_server_exports = /* @__PURE__ */ new Set([
"GET",
"POST",
"PATCH",
"PUT",
"DELETE",
"OPTIONS",
"HEAD",
"fallback",
"prerender",
"trailingSlash",
"config",
"entries"
]);
const validate_layout_exports = validator(valid_layout_exports);
const validate_page_exports = validator(valid_page_exports);
const validate_layout_server_exports = validator(valid_layout_server_exports);
const validate_page_server_exports = validator(valid_page_server_exports);
const validate_server_exports = validator(valid_server_exports);
export {
SCHEME as S,
decode_params as a,
validate_layout_exports as b,
validate_page_server_exports as c,
disable_search as d,
validate_page_exports as e,
decode_pathname as f,
validate_server_exports as g,
make_trackable as m,
normalize_path as n,
resolve as r,
validate_layout_server_exports as v
};

View File

@@ -1,4 +0,0 @@
const BROWSER = false;
export {
BROWSER as B
};

View File

@@ -1,59 +0,0 @@
import { n as noop, k as safe_not_equal } from "./equality.js";
import "clsx";
const subscriber_queue = [];
function readable(value, start) {
return {
subscribe: writable(value, start).subscribe
};
}
function writable(value, start = noop) {
let stop = null;
const subscribers = /* @__PURE__ */ new Set();
function set(new_value) {
if (safe_not_equal(value, new_value)) {
value = new_value;
if (stop) {
const run_queue = !subscriber_queue.length;
for (const subscriber of subscribers) {
subscriber[1]();
subscriber_queue.push(subscriber, value);
}
if (run_queue) {
for (let i = 0; i < subscriber_queue.length; i += 2) {
subscriber_queue[i][0](subscriber_queue[i + 1]);
}
subscriber_queue.length = 0;
}
}
}
}
function update(fn) {
set(fn(
/** @type {T} */
value
));
}
function subscribe(run, invalidate = noop) {
const subscriber = [run, invalidate];
subscribers.add(subscriber);
if (subscribers.size === 1) {
stop = start(set, update) || noop;
}
run(
/** @type {T} */
value
);
return () => {
subscribers.delete(subscriber);
if (subscribers.size === 0 && stop) {
stop();
stop = null;
}
};
}
return { set, update, subscribe };
}
export {
readable as r,
writable as w
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,982 +0,0 @@
import { H as HYDRATION_ERROR, C as COMMENT_NODE, a as HYDRATION_END, g as get_next_sibling, b as HYDRATION_START, c as HYDRATION_START_ELSE, e as effect_tracking, d as get, s as source, r as render_effect, u as untrack, i as increment, q as queue_micro_task, f as active_effect, h as block, j as branch, B as Batch, p as pause_effect, k as create_text, l as set_active_effect, m as set_active_reaction, n as set_component_context, o as handle_error, t as active_reaction, v as component_context, w as move_effect, x as internal_set, y as destroy_effect, z as invoke_error_boundary, A as svelte_boundary_reset_onerror, E as EFFECT_TRANSPARENT, D as EFFECT_PRESERVED, F as BOUNDARY_EFFECT, G as init_operations, I as get_first_child, J as hydration_failed, K as clear_text_content, L as component_root, M as is_passive_event, N as push, O as pop, P as set, Q as LEGACY_PROPS, R as flushSync, S as mutable_source, T as render, U as setContext } from "./index2.js";
import { d as define_property, a as array_from } from "./equality.js";
import "clsx";
import "./environment.js";
let public_env = {};
function set_private_env(environment) {
}
function set_public_env(environment) {
public_env = environment;
}
function hydration_mismatch(location) {
{
console.warn(`https://svelte.dev/e/hydration_mismatch`);
}
}
function svelte_boundary_reset_noop() {
{
console.warn(`https://svelte.dev/e/svelte_boundary_reset_noop`);
}
}
let hydrating = false;
function set_hydrating(value) {
hydrating = value;
}
let hydrate_node;
function set_hydrate_node(node) {
if (node === null) {
hydration_mismatch();
throw HYDRATION_ERROR;
}
return hydrate_node = node;
}
function hydrate_next() {
return set_hydrate_node(get_next_sibling(hydrate_node));
}
function next(count = 1) {
if (hydrating) {
var i = count;
var node = hydrate_node;
while (i--) {
node = /** @type {TemplateNode} */
get_next_sibling(node);
}
hydrate_node = node;
}
}
function skip_nodes(remove = true) {
var depth = 0;
var node = hydrate_node;
while (true) {
if (node.nodeType === COMMENT_NODE) {
var data = (
/** @type {Comment} */
node.data
);
if (data === HYDRATION_END) {
if (depth === 0) return node;
depth -= 1;
} else if (data === HYDRATION_START || data === HYDRATION_START_ELSE) {
depth += 1;
}
}
var next2 = (
/** @type {TemplateNode} */
get_next_sibling(node)
);
if (remove) node.remove();
node = next2;
}
}
function createSubscriber(start) {
let subscribers = 0;
let version = source(0);
let stop;
return () => {
if (effect_tracking()) {
get(version);
render_effect(() => {
if (subscribers === 0) {
stop = untrack(() => start(() => increment(version)));
}
subscribers += 1;
return () => {
queue_micro_task(() => {
subscribers -= 1;
if (subscribers === 0) {
stop?.();
stop = void 0;
increment(version);
}
});
};
});
}
};
}
var flags = EFFECT_TRANSPARENT | EFFECT_PRESERVED | BOUNDARY_EFFECT;
function boundary(node, props, children) {
new Boundary(node, props, children);
}
class Boundary {
/** @type {Boundary | null} */
parent;
#pending = false;
/** @type {TemplateNode} */
#anchor;
/** @type {TemplateNode | null} */
#hydrate_open = hydrating ? hydrate_node : null;
/** @type {BoundaryProps} */
#props;
/** @type {((anchor: Node) => void)} */
#children;
/** @type {Effect} */
#effect;
/** @type {Effect | null} */
#main_effect = null;
/** @type {Effect | null} */
#pending_effect = null;
/** @type {Effect | null} */
#failed_effect = null;
/** @type {DocumentFragment | null} */
#offscreen_fragment = null;
/** @type {TemplateNode | null} */
#pending_anchor = null;
#local_pending_count = 0;
#pending_count = 0;
#is_creating_fallback = false;
/**
* A source containing the number of pending async deriveds/expressions.
* Only created if `$effect.pending()` is used inside the boundary,
* otherwise updating the source results in needless `Batch.ensure()`
* calls followed by no-op flushes
* @type {Source<number> | null}
*/
#effect_pending = null;
#effect_pending_subscriber = createSubscriber(() => {
this.#effect_pending = source(this.#local_pending_count);
return () => {
this.#effect_pending = null;
};
});
/**
* @param {TemplateNode} node
* @param {BoundaryProps} props
* @param {((anchor: Node) => void)} children
*/
constructor(node, props, children) {
this.#anchor = node;
this.#props = props;
this.#children = children;
this.parent = /** @type {Effect} */
active_effect.b;
this.#pending = !!this.#props.pending;
this.#effect = block(() => {
active_effect.b = this;
if (hydrating) {
const comment = this.#hydrate_open;
hydrate_next();
const server_rendered_pending = (
/** @type {Comment} */
comment.nodeType === COMMENT_NODE && /** @type {Comment} */
comment.data === HYDRATION_START_ELSE
);
if (server_rendered_pending) {
this.#hydrate_pending_content();
} else {
this.#hydrate_resolved_content();
}
} else {
var anchor = this.#get_anchor();
try {
this.#main_effect = branch(() => children(anchor));
} catch (error) {
this.error(error);
}
if (this.#pending_count > 0) {
this.#show_pending_snippet();
} else {
this.#pending = false;
}
}
return () => {
this.#pending_anchor?.remove();
};
}, flags);
if (hydrating) {
this.#anchor = hydrate_node;
}
}
#hydrate_resolved_content() {
try {
this.#main_effect = branch(() => this.#children(this.#anchor));
} catch (error) {
this.error(error);
}
this.#pending = false;
}
#hydrate_pending_content() {
const pending = this.#props.pending;
if (!pending) {
return;
}
this.#pending_effect = branch(() => pending(this.#anchor));
Batch.enqueue(() => {
var anchor = this.#get_anchor();
this.#main_effect = this.#run(() => {
Batch.ensure();
return branch(() => this.#children(anchor));
});
if (this.#pending_count > 0) {
this.#show_pending_snippet();
} else {
pause_effect(
/** @type {Effect} */
this.#pending_effect,
() => {
this.#pending_effect = null;
}
);
this.#pending = false;
}
});
}
#get_anchor() {
var anchor = this.#anchor;
if (this.#pending) {
this.#pending_anchor = create_text();
this.#anchor.before(this.#pending_anchor);
anchor = this.#pending_anchor;
}
return anchor;
}
/**
* Returns `true` if the effect exists inside a boundary whose pending snippet is shown
* @returns {boolean}
*/
is_pending() {
return this.#pending || !!this.parent && this.parent.is_pending();
}
has_pending_snippet() {
return !!this.#props.pending;
}
/**
* @param {() => Effect | null} fn
*/
#run(fn) {
var previous_effect = active_effect;
var previous_reaction = active_reaction;
var previous_ctx = component_context;
set_active_effect(this.#effect);
set_active_reaction(this.#effect);
set_component_context(this.#effect.ctx);
try {
return fn();
} catch (e) {
handle_error(e);
return null;
} finally {
set_active_effect(previous_effect);
set_active_reaction(previous_reaction);
set_component_context(previous_ctx);
}
}
#show_pending_snippet() {
const pending = (
/** @type {(anchor: Node) => void} */
this.#props.pending
);
if (this.#main_effect !== null) {
this.#offscreen_fragment = document.createDocumentFragment();
this.#offscreen_fragment.append(
/** @type {TemplateNode} */
this.#pending_anchor
);
move_effect(this.#main_effect, this.#offscreen_fragment);
}
if (this.#pending_effect === null) {
this.#pending_effect = branch(() => pending(this.#anchor));
}
}
/**
* Updates the pending count associated with the currently visible pending snippet,
* if any, such that we can replace the snippet with content once work is done
* @param {1 | -1} d
*/
#update_pending_count(d) {
if (!this.has_pending_snippet()) {
if (this.parent) {
this.parent.#update_pending_count(d);
}
return;
}
this.#pending_count += d;
if (this.#pending_count === 0) {
this.#pending = false;
if (this.#pending_effect) {
pause_effect(this.#pending_effect, () => {
this.#pending_effect = null;
});
}
if (this.#offscreen_fragment) {
this.#anchor.before(this.#offscreen_fragment);
this.#offscreen_fragment = null;
}
}
}
/**
* Update the source that powers `$effect.pending()` inside this boundary,
* and controls when the current `pending` snippet (if any) is removed.
* Do not call from inside the class
* @param {1 | -1} d
*/
update_pending_count(d) {
this.#update_pending_count(d);
this.#local_pending_count += d;
if (this.#effect_pending) {
internal_set(this.#effect_pending, this.#local_pending_count);
}
}
get_effect_pending() {
this.#effect_pending_subscriber();
return get(
/** @type {Source<number>} */
this.#effect_pending
);
}
/** @param {unknown} error */
error(error) {
var onerror = this.#props.onerror;
let failed = this.#props.failed;
if (this.#is_creating_fallback || !onerror && !failed) {
throw error;
}
if (this.#main_effect) {
destroy_effect(this.#main_effect);
this.#main_effect = null;
}
if (this.#pending_effect) {
destroy_effect(this.#pending_effect);
this.#pending_effect = null;
}
if (this.#failed_effect) {
destroy_effect(this.#failed_effect);
this.#failed_effect = null;
}
if (hydrating) {
set_hydrate_node(
/** @type {TemplateNode} */
this.#hydrate_open
);
next();
set_hydrate_node(skip_nodes());
}
var did_reset = false;
var calling_on_error = false;
const reset = () => {
if (did_reset) {
svelte_boundary_reset_noop();
return;
}
did_reset = true;
if (calling_on_error) {
svelte_boundary_reset_onerror();
}
Batch.ensure();
this.#local_pending_count = 0;
if (this.#failed_effect !== null) {
pause_effect(this.#failed_effect, () => {
this.#failed_effect = null;
});
}
this.#pending = this.has_pending_snippet();
this.#main_effect = this.#run(() => {
this.#is_creating_fallback = false;
return branch(() => this.#children(this.#anchor));
});
if (this.#pending_count > 0) {
this.#show_pending_snippet();
} else {
this.#pending = false;
}
};
var previous_reaction = active_reaction;
try {
set_active_reaction(null);
calling_on_error = true;
onerror?.(error, reset);
calling_on_error = false;
} catch (error2) {
invoke_error_boundary(error2, this.#effect && this.#effect.parent);
} finally {
set_active_reaction(previous_reaction);
}
if (failed) {
queue_micro_task(() => {
this.#failed_effect = this.#run(() => {
Batch.ensure();
this.#is_creating_fallback = true;
try {
return branch(() => {
failed(
this.#anchor,
() => error,
() => reset
);
});
} catch (error2) {
invoke_error_boundary(
error2,
/** @type {Effect} */
this.#effect.parent
);
return null;
} finally {
this.#is_creating_fallback = false;
}
});
});
}
}
}
const all_registered_events = /* @__PURE__ */ new Set();
const root_event_handles = /* @__PURE__ */ new Set();
let last_propagated_event = null;
function handle_event_propagation(event) {
var handler_element = this;
var owner_document = (
/** @type {Node} */
handler_element.ownerDocument
);
var event_name = event.type;
var path = event.composedPath?.() || [];
var current_target = (
/** @type {null | Element} */
path[0] || event.target
);
last_propagated_event = event;
var path_idx = 0;
var handled_at = last_propagated_event === event && event.__root;
if (handled_at) {
var at_idx = path.indexOf(handled_at);
if (at_idx !== -1 && (handler_element === document || handler_element === /** @type {any} */
window)) {
event.__root = handler_element;
return;
}
var handler_idx = path.indexOf(handler_element);
if (handler_idx === -1) {
return;
}
if (at_idx <= handler_idx) {
path_idx = at_idx;
}
}
current_target = /** @type {Element} */
path[path_idx] || event.target;
if (current_target === handler_element) return;
define_property(event, "currentTarget", {
configurable: true,
get() {
return current_target || owner_document;
}
});
var previous_reaction = active_reaction;
var previous_effect = active_effect;
set_active_reaction(null);
set_active_effect(null);
try {
var throw_error;
var other_errors = [];
while (current_target !== null) {
var parent_element = current_target.assignedSlot || current_target.parentNode || /** @type {any} */
current_target.host || null;
try {
var delegated = current_target["__" + event_name];
if (delegated != null && (!/** @type {any} */
current_target.disabled || // DOM could've been updated already by the time this is reached, so we check this as well
// -> the target could not have been disabled because it emits the event in the first place
event.target === current_target)) {
delegated.call(current_target, event);
}
} catch (error) {
if (throw_error) {
other_errors.push(error);
} else {
throw_error = error;
}
}
if (event.cancelBubble || parent_element === handler_element || parent_element === null) {
break;
}
current_target = parent_element;
}
if (throw_error) {
for (let error of other_errors) {
queueMicrotask(() => {
throw error;
});
}
throw throw_error;
}
} finally {
event.__root = handler_element;
delete event.currentTarget;
set_active_reaction(previous_reaction);
set_active_effect(previous_effect);
}
}
function assign_nodes(start, end) {
var effect = (
/** @type {Effect} */
active_effect
);
if (effect.nodes === null) {
effect.nodes = { start, end, a: null, t: null };
}
}
function mount(component, options2) {
return _mount(component, options2);
}
function hydrate(component, options2) {
init_operations();
options2.intro = options2.intro ?? false;
const target = options2.target;
const was_hydrating = hydrating;
const previous_hydrate_node = hydrate_node;
try {
var anchor = get_first_child(target);
while (anchor && (anchor.nodeType !== COMMENT_NODE || /** @type {Comment} */
anchor.data !== HYDRATION_START)) {
anchor = get_next_sibling(anchor);
}
if (!anchor) {
throw HYDRATION_ERROR;
}
set_hydrating(true);
set_hydrate_node(
/** @type {Comment} */
anchor
);
const instance = _mount(component, { ...options2, anchor });
set_hydrating(false);
return (
/** @type {Exports} */
instance
);
} catch (error) {
if (error instanceof Error && error.message.split("\n").some((line) => line.startsWith("https://svelte.dev/e/"))) {
throw error;
}
if (error !== HYDRATION_ERROR) {
console.warn("Failed to hydrate: ", error);
}
if (options2.recover === false) {
hydration_failed();
}
init_operations();
clear_text_content(target);
set_hydrating(false);
return mount(component, options2);
} finally {
set_hydrating(was_hydrating);
set_hydrate_node(previous_hydrate_node);
}
}
const document_listeners = /* @__PURE__ */ new Map();
function _mount(Component, { target, anchor, props = {}, events, context, intro = true }) {
init_operations();
var registered_events = /* @__PURE__ */ new Set();
var event_handle = (events2) => {
for (var i = 0; i < events2.length; i++) {
var event_name = events2[i];
if (registered_events.has(event_name)) continue;
registered_events.add(event_name);
var passive = is_passive_event(event_name);
target.addEventListener(event_name, handle_event_propagation, { passive });
var n = document_listeners.get(event_name);
if (n === void 0) {
document.addEventListener(event_name, handle_event_propagation, { passive });
document_listeners.set(event_name, 1);
} else {
document_listeners.set(event_name, n + 1);
}
}
};
event_handle(array_from(all_registered_events));
root_event_handles.add(event_handle);
var component = void 0;
var unmount2 = component_root(() => {
var anchor_node = anchor ?? target.appendChild(create_text());
boundary(
/** @type {TemplateNode} */
anchor_node,
{
pending: () => {
}
},
(anchor_node2) => {
if (context) {
push({});
var ctx = (
/** @type {ComponentContext} */
component_context
);
ctx.c = context;
}
if (events) {
props.$$events = events;
}
if (hydrating) {
assign_nodes(
/** @type {TemplateNode} */
anchor_node2,
null
);
}
component = Component(anchor_node2, props) || {};
if (hydrating) {
active_effect.nodes.end = hydrate_node;
if (hydrate_node === null || hydrate_node.nodeType !== COMMENT_NODE || /** @type {Comment} */
hydrate_node.data !== HYDRATION_END) {
hydration_mismatch();
throw HYDRATION_ERROR;
}
}
if (context) {
pop();
}
}
);
return () => {
for (var event_name of registered_events) {
target.removeEventListener(event_name, handle_event_propagation);
var n = (
/** @type {number} */
document_listeners.get(event_name)
);
if (--n === 0) {
document.removeEventListener(event_name, handle_event_propagation);
document_listeners.delete(event_name);
} else {
document_listeners.set(event_name, n);
}
}
root_event_handles.delete(event_handle);
if (anchor_node !== anchor) {
anchor_node.parentNode?.removeChild(anchor_node);
}
};
});
mounted_components.set(component, unmount2);
return component;
}
let mounted_components = /* @__PURE__ */ new WeakMap();
function unmount(component, options2) {
const fn = mounted_components.get(component);
if (fn) {
mounted_components.delete(component);
return fn(options2);
}
return Promise.resolve();
}
function asClassComponent$1(component) {
return class extends Svelte4Component {
/** @param {any} options */
constructor(options2) {
super({
component,
...options2
});
}
};
}
class Svelte4Component {
/** @type {any} */
#events;
/** @type {Record<string, any>} */
#instance;
/**
* @param {ComponentConstructorOptions & {
* component: any;
* }} options
*/
constructor(options2) {
var sources = /* @__PURE__ */ new Map();
var add_source = (key, value) => {
var s = mutable_source(value, false, false);
sources.set(key, s);
return s;
};
const props = new Proxy(
{ ...options2.props || {}, $$events: {} },
{
get(target, prop) {
return get(sources.get(prop) ?? add_source(prop, Reflect.get(target, prop)));
},
has(target, prop) {
if (prop === LEGACY_PROPS) return true;
get(sources.get(prop) ?? add_source(prop, Reflect.get(target, prop)));
return Reflect.has(target, prop);
},
set(target, prop, value) {
set(sources.get(prop) ?? add_source(prop, value), value);
return Reflect.set(target, prop, value);
}
}
);
this.#instance = (options2.hydrate ? hydrate : mount)(options2.component, {
target: options2.target,
anchor: options2.anchor,
props,
context: options2.context,
intro: options2.intro ?? false,
recover: options2.recover
});
if (!options2?.props?.$$host || options2.sync === false) {
flushSync();
}
this.#events = props.$$events;
for (const key of Object.keys(this.#instance)) {
if (key === "$set" || key === "$destroy" || key === "$on") continue;
define_property(this, key, {
get() {
return this.#instance[key];
},
/** @param {any} value */
set(value) {
this.#instance[key] = value;
},
enumerable: true
});
}
this.#instance.$set = /** @param {Record<string, any>} next */
(next2) => {
Object.assign(props, next2);
};
this.#instance.$destroy = () => {
unmount(this.#instance);
};
}
/** @param {Record<string, any>} props */
$set(props) {
this.#instance.$set(props);
}
/**
* @param {string} event
* @param {(...args: any[]) => any} callback
* @returns {any}
*/
$on(event, callback) {
this.#events[event] = this.#events[event] || [];
const cb = (...args) => callback.call(this, ...args);
this.#events[event].push(cb);
return () => {
this.#events[event] = this.#events[event].filter(
/** @param {any} fn */
(fn) => fn !== cb
);
};
}
$destroy() {
this.#instance.$destroy();
}
}
let read_implementation = null;
function set_read_implementation(fn) {
read_implementation = fn;
}
function set_manifest(_) {
}
function asClassComponent(component) {
const component_constructor = asClassComponent$1(component);
const _render = (props, { context, csp } = {}) => {
const result = render(component, { props, context, csp });
const munged = Object.defineProperties(
/** @type {LegacyRenderResult & PromiseLike<LegacyRenderResult>} */
{},
{
css: {
value: { code: "", map: null }
},
head: {
get: () => result.head
},
html: {
get: () => result.body
},
then: {
/**
* this is not type-safe, but honestly it's the best I can do right now, and it's a straightforward function.
*
* @template TResult1
* @template [TResult2=never]
* @param { (value: LegacyRenderResult) => TResult1 } onfulfilled
* @param { (reason: unknown) => TResult2 } onrejected
*/
value: (onfulfilled, onrejected) => {
{
const user_result = onfulfilled({
css: munged.css,
head: munged.head,
html: munged.html
});
return Promise.resolve(user_result);
}
}
}
}
);
return munged;
};
component_constructor.render = _render;
return component_constructor;
}
function Root($$renderer, $$props) {
$$renderer.component(($$renderer2) => {
let {
stores,
page,
constructors,
components = [],
form,
data_0 = null,
data_1 = null
} = $$props;
{
setContext("__svelte__", stores);
}
{
stores.page.set(page);
}
const Pyramid_1 = constructors[1];
if (constructors[1]) {
$$renderer2.push("<!--[-->");
const Pyramid_0 = constructors[0];
$$renderer2.push(`<!---->`);
Pyramid_0($$renderer2, {
data: data_0,
form,
params: page.params,
children: ($$renderer3) => {
$$renderer3.push(`<!---->`);
Pyramid_1($$renderer3, { data: data_1, form, params: page.params });
$$renderer3.push(`<!---->`);
},
$$slots: { default: true }
});
$$renderer2.push(`<!---->`);
} else {
$$renderer2.push("<!--[!-->");
const Pyramid_0 = constructors[0];
$$renderer2.push(`<!---->`);
Pyramid_0($$renderer2, { data: data_0, form, params: page.params });
$$renderer2.push(`<!---->`);
}
$$renderer2.push(`<!--]--> `);
{
$$renderer2.push("<!--[!-->");
}
$$renderer2.push(`<!--]-->`);
});
}
const root = asClassComponent(Root);
const options = {
app_template_contains_nonce: false,
async: false,
csp: { "mode": "auto", "directives": { "upgrade-insecure-requests": false, "block-all-mixed-content": false }, "reportOnly": { "upgrade-insecure-requests": false, "block-all-mixed-content": false } },
csrf_check_origin: true,
csrf_trusted_origins: [],
embedded: false,
env_public_prefix: "PUBLIC_",
env_private_prefix: "",
hash_routing: false,
hooks: null,
// added lazily, via `get_hooks`
preload_strategy: "modulepreload",
root,
service_worker: false,
service_worker_options: void 0,
templates: {
app: ({ head, body, assets, nonce, env }) => '<!DOCTYPE html>\n<html lang="en">\n <head>\n <meta charset="utf-8" />\n <link rel="icon" href="' + assets + '/favicon.png" />\n <meta name="viewport" content="width=device-width, initial-scale=1" />\n ' + head + '\n </head>\n <body data-sveltekit-preload-data="hover">\n <div style="display: contents">' + body + "</div>\n </body>\n</html>\n",
error: ({ status, message }) => '<!doctype html>\n<html lang="en">\n <head>\n <meta charset="utf-8" />\n <title>' + message + `</title>
<style>
body {
--bg: white;
--fg: #222;
--divider: #ccc;
background: var(--bg);
color: var(--fg);
font-family:
system-ui,
-apple-system,
BlinkMacSystemFont,
'Segoe UI',
Roboto,
Oxygen,
Ubuntu,
Cantarell,
'Open Sans',
'Helvetica Neue',
sans-serif;
display: flex;
align-items: center;
justify-content: center;
height: 100vh;
margin: 0;
}
.error {
display: flex;
align-items: center;
max-width: 32rem;
margin: 0 1rem;
}
.status {
font-weight: 200;
font-size: 3rem;
line-height: 1;
position: relative;
top: -0.05rem;
}
.message {
border-left: 1px solid var(--divider);
padding: 0 0 0 1rem;
margin: 0 0 0 1rem;
min-height: 2.5rem;
display: flex;
align-items: center;
}
.message h1 {
font-weight: 400;
font-size: 1em;
margin: 0;
}
@media (prefers-color-scheme: dark) {
body {
--bg: #222;
--fg: #ddd;
--divider: #666;
}
}
</style>
</head>
<body>
<div class="error">
<span class="status">` + status + '</span>\n <div class="message">\n <h1>' + message + "</h1>\n </div>\n </div>\n </body>\n</html>\n"
},
version_hash: "1ootf77"
};
async function get_hooks() {
let handle;
let handleFetch;
let handleError;
let handleValidationError;
let init;
let reroute;
let transport;
return {
handle,
handleFetch,
handleError,
handleValidationError,
init,
reroute,
transport
};
}
export {
set_public_env as a,
set_read_implementation as b,
set_manifest as c,
get_hooks as g,
options as o,
public_env as p,
read_implementation as r,
set_private_env as s
};

View File

@@ -1,522 +0,0 @@
import * as devalue from "devalue";
import { t as text_decoder, b as base64_encode, c as base64_decode } from "./utils.js";
function set_nested_value(object, path_string, value) {
if (path_string.startsWith("n:")) {
path_string = path_string.slice(2);
value = value === "" ? void 0 : parseFloat(value);
} else if (path_string.startsWith("b:")) {
path_string = path_string.slice(2);
value = value === "on";
}
deep_set(object, split_path(path_string), value);
}
function convert_formdata(data) {
const result = {};
for (let key of data.keys()) {
const is_array = key.endsWith("[]");
let values = data.getAll(key);
if (is_array) key = key.slice(0, -2);
if (values.length > 1 && !is_array) {
throw new Error(`Form cannot contain duplicated keys — "${key}" has ${values.length} values`);
}
values = values.filter(
(entry) => typeof entry === "string" || entry.name !== "" || entry.size > 0
);
if (key.startsWith("n:")) {
key = key.slice(2);
values = values.map((v) => v === "" ? void 0 : parseFloat(
/** @type {string} */
v
));
} else if (key.startsWith("b:")) {
key = key.slice(2);
values = values.map((v) => v === "on");
}
set_nested_value(result, key, is_array ? values : values[0]);
}
return result;
}
const BINARY_FORM_CONTENT_TYPE = "application/x-sveltekit-formdata";
const BINARY_FORM_VERSION = 0;
async function deserialize_binary_form(request) {
if (request.headers.get("content-type") !== BINARY_FORM_CONTENT_TYPE) {
const form_data = await request.formData();
return { data: convert_formdata(form_data), meta: {}, form_data };
}
if (!request.body) {
throw new Error("Could not deserialize binary form: no body");
}
const reader = request.body.getReader();
const chunks = [];
async function get_chunk(index) {
if (index in chunks) return chunks[index];
let i = chunks.length;
while (i <= index) {
chunks[i] = reader.read().then((chunk) => chunk.value);
i++;
}
return chunks[index];
}
async function get_buffer(offset, length) {
let start_chunk;
let chunk_start = 0;
let chunk_index;
for (chunk_index = 0; ; chunk_index++) {
const chunk = await get_chunk(chunk_index);
if (!chunk) return null;
const chunk_end = chunk_start + chunk.byteLength;
if (offset >= chunk_start && offset < chunk_end) {
start_chunk = chunk;
break;
}
chunk_start = chunk_end;
}
if (offset + length <= chunk_start + start_chunk.byteLength) {
return start_chunk.subarray(offset - chunk_start, offset + length - chunk_start);
}
const buffer = new Uint8Array(length);
buffer.set(start_chunk.subarray(offset - chunk_start));
let cursor = start_chunk.byteLength - offset + chunk_start;
while (cursor < length) {
chunk_index++;
let chunk = await get_chunk(chunk_index);
if (!chunk) return null;
if (chunk.byteLength > length - cursor) {
chunk = chunk.subarray(0, length - cursor);
}
buffer.set(chunk, cursor);
cursor += chunk.byteLength;
}
return buffer;
}
const header = await get_buffer(0, 1 + 4 + 2);
if (!header) throw new Error("Could not deserialize binary form: too short");
if (header[0] !== BINARY_FORM_VERSION) {
throw new Error(
`Could not deserialize binary form: got version ${header[0]}, expected version ${BINARY_FORM_VERSION}`
);
}
const header_view = new DataView(header.buffer, header.byteOffset, header.byteLength);
const data_length = header_view.getUint32(1, true);
const file_offsets_length = header_view.getUint16(5, true);
const data_buffer = await get_buffer(1 + 4 + 2, data_length);
if (!data_buffer) throw new Error("Could not deserialize binary form: data too short");
let file_offsets;
let files_start_offset;
if (file_offsets_length > 0) {
const file_offsets_buffer = await get_buffer(1 + 4 + 2 + data_length, file_offsets_length);
if (!file_offsets_buffer)
throw new Error("Could not deserialize binary form: file offset table too short");
file_offsets = /** @type {Array<number>} */
JSON.parse(text_decoder.decode(file_offsets_buffer));
files_start_offset = 1 + 4 + 2 + data_length + file_offsets_length;
}
const [data, meta] = devalue.parse(text_decoder.decode(data_buffer), {
File: ([name, type, size, last_modified, index]) => {
return new Proxy(
new LazyFile(
name,
type,
size,
last_modified,
get_chunk,
files_start_offset + file_offsets[index]
),
{
getPrototypeOf() {
return File.prototype;
}
}
);
}
});
void (async () => {
let has_more = true;
while (has_more) {
const chunk = await get_chunk(chunks.length);
has_more = !!chunk;
}
})();
return { data, meta, form_data: null };
}
class LazyFile {
/** @type {(index: number) => Promise<Uint8Array<ArrayBuffer> | undefined>} */
#get_chunk;
/** @type {number} */
#offset;
/**
* @param {string} name
* @param {string} type
* @param {number} size
* @param {number} last_modified
* @param {(index: number) => Promise<Uint8Array<ArrayBuffer> | undefined>} get_chunk
* @param {number} offset
*/
constructor(name, type, size, last_modified, get_chunk, offset) {
this.name = name;
this.type = type;
this.size = size;
this.lastModified = last_modified;
this.webkitRelativePath = "";
this.#get_chunk = get_chunk;
this.#offset = offset;
this.arrayBuffer = this.arrayBuffer.bind(this);
this.bytes = this.bytes.bind(this);
this.slice = this.slice.bind(this);
this.stream = this.stream.bind(this);
this.text = this.text.bind(this);
}
/** @type {ArrayBuffer | undefined} */
#buffer;
async arrayBuffer() {
this.#buffer ??= await new Response(this.stream()).arrayBuffer();
return this.#buffer;
}
async bytes() {
return new Uint8Array(await this.arrayBuffer());
}
/**
* @param {number=} start
* @param {number=} end
* @param {string=} contentType
*/
slice(start = 0, end = this.size, contentType = this.type) {
if (start < 0) {
start = Math.max(this.size + start, 0);
} else {
start = Math.min(start, this.size);
}
if (end < 0) {
end = Math.max(this.size + end, 0);
} else {
end = Math.min(end, this.size);
}
const size = Math.max(end - start, 0);
const file = new LazyFile(
this.name,
contentType,
size,
this.lastModified,
this.#get_chunk,
this.#offset + start
);
return file;
}
stream() {
let cursor = 0;
let chunk_index = 0;
return new ReadableStream({
start: async (controller) => {
let chunk_start = 0;
let start_chunk = null;
for (chunk_index = 0; ; chunk_index++) {
const chunk = await this.#get_chunk(chunk_index);
if (!chunk) return null;
const chunk_end = chunk_start + chunk.byteLength;
if (this.#offset >= chunk_start && this.#offset < chunk_end) {
start_chunk = chunk;
break;
}
chunk_start = chunk_end;
}
if (this.#offset + this.size <= chunk_start + start_chunk.byteLength) {
controller.enqueue(
start_chunk.subarray(this.#offset - chunk_start, this.#offset + this.size - chunk_start)
);
controller.close();
} else {
controller.enqueue(start_chunk.subarray(this.#offset - chunk_start));
cursor = start_chunk.byteLength - this.#offset + chunk_start;
}
},
pull: async (controller) => {
chunk_index++;
let chunk = await this.#get_chunk(chunk_index);
if (!chunk) {
controller.error("Could not deserialize binary form: incomplete file data");
controller.close();
return;
}
if (chunk.byteLength > this.size - cursor) {
chunk = chunk.subarray(0, this.size - cursor);
}
controller.enqueue(chunk);
cursor += chunk.byteLength;
if (cursor >= this.size) {
controller.close();
}
}
});
}
async text() {
return text_decoder.decode(await this.arrayBuffer());
}
}
const path_regex = /^[a-zA-Z_$]\w*(\.[a-zA-Z_$]\w*|\[\d+\])*$/;
function split_path(path) {
if (!path_regex.test(path)) {
throw new Error(`Invalid path ${path}`);
}
return path.split(/\.|\[|\]/).filter(Boolean);
}
function check_prototype_pollution(key) {
if (key === "__proto__" || key === "constructor" || key === "prototype") {
throw new Error(
`Invalid key "${key}"`
);
}
}
function deep_set(object, keys, value) {
let current = object;
for (let i = 0; i < keys.length - 1; i += 1) {
const key = keys[i];
check_prototype_pollution(key);
const is_array = /^\d+$/.test(keys[i + 1]);
const exists = key in current;
const inner = current[key];
if (exists && is_array !== Array.isArray(inner)) {
throw new Error(`Invalid array key ${keys[i + 1]}`);
}
if (!exists) {
current[key] = is_array ? [] : {};
}
current = current[key];
}
const final_key = keys[keys.length - 1];
check_prototype_pollution(final_key);
current[final_key] = value;
}
function normalize_issue(issue, server = false) {
const normalized = { name: "", path: [], message: issue.message, server };
if (issue.path !== void 0) {
let name = "";
for (const segment of issue.path) {
const key = (
/** @type {string | number} */
typeof segment === "object" ? segment.key : segment
);
normalized.path.push(key);
if (typeof key === "number") {
name += `[${key}]`;
} else if (typeof key === "string") {
name += name === "" ? key : "." + key;
}
}
normalized.name = name;
}
return normalized;
}
function flatten_issues(issues) {
const result = {};
for (const issue of issues) {
(result.$ ??= []).push(issue);
let name = "";
if (issue.path !== void 0) {
for (const key of issue.path) {
if (typeof key === "number") {
name += `[${key}]`;
} else if (typeof key === "string") {
name += name === "" ? key : "." + key;
}
(result[name] ??= []).push(issue);
}
}
}
return result;
}
function deep_get(object, path) {
let current = object;
for (const key of path) {
if (current == null || typeof current !== "object") {
return current;
}
current = current[key];
}
return current;
}
function create_field_proxy(target, get_input, set_input, get_issues, path = []) {
const get_value = () => {
return deep_get(get_input(), path);
};
return new Proxy(target, {
get(target2, prop) {
if (typeof prop === "symbol") return target2[prop];
if (/^\d+$/.test(prop)) {
return create_field_proxy({}, get_input, set_input, get_issues, [
...path,
parseInt(prop, 10)
]);
}
const key = build_path_string(path);
if (prop === "set") {
const set_func = function(newValue) {
set_input(path, newValue);
return newValue;
};
return create_field_proxy(set_func, get_input, set_input, get_issues, [...path, prop]);
}
if (prop === "value") {
return create_field_proxy(get_value, get_input, set_input, get_issues, [...path, prop]);
}
if (prop === "issues" || prop === "allIssues") {
const issues_func = () => {
const all_issues = get_issues()[key === "" ? "$" : key];
if (prop === "allIssues") {
return all_issues?.map((issue) => ({
path: issue.path,
message: issue.message
}));
}
return all_issues?.filter((issue) => issue.name === key)?.map((issue) => ({
path: issue.path,
message: issue.message
}));
};
return create_field_proxy(issues_func, get_input, set_input, get_issues, [...path, prop]);
}
if (prop === "as") {
const as_func = (type, input_value) => {
const is_array = type === "file multiple" || type === "select multiple" || type === "checkbox" && typeof input_value === "string";
const prefix = type === "number" || type === "range" ? "n:" : type === "checkbox" && !is_array ? "b:" : "";
const base_props = {
name: prefix + key + (is_array ? "[]" : ""),
get "aria-invalid"() {
const issues = get_issues();
return key in issues ? "true" : void 0;
}
};
if (type !== "text" && type !== "select" && type !== "select multiple") {
base_props.type = type === "file multiple" ? "file" : type;
}
if (type === "submit" || type === "hidden") {
return Object.defineProperties(base_props, {
value: { value: input_value, enumerable: true }
});
}
if (type === "select" || type === "select multiple") {
return Object.defineProperties(base_props, {
multiple: { value: is_array, enumerable: true },
value: {
enumerable: true,
get() {
return get_value();
}
}
});
}
if (type === "checkbox" || type === "radio") {
return Object.defineProperties(base_props, {
value: { value: input_value ?? "on", enumerable: true },
checked: {
enumerable: true,
get() {
const value = get_value();
if (type === "radio") {
return value === input_value;
}
if (is_array) {
return (value ?? []).includes(input_value);
}
return value;
}
}
});
}
if (type === "file" || type === "file multiple") {
return Object.defineProperties(base_props, {
multiple: { value: is_array, enumerable: true },
files: {
enumerable: true,
get() {
const value = get_value();
if (value instanceof File) {
if (typeof DataTransfer !== "undefined") {
const fileList = new DataTransfer();
fileList.items.add(value);
return fileList.files;
}
return { 0: value, length: 1 };
}
if (Array.isArray(value) && value.every((f) => f instanceof File)) {
if (typeof DataTransfer !== "undefined") {
const fileList = new DataTransfer();
value.forEach((file) => fileList.items.add(file));
return fileList.files;
}
const fileListLike = { length: value.length };
value.forEach((file, index) => {
fileListLike[index] = file;
});
return fileListLike;
}
return null;
}
}
});
}
return Object.defineProperties(base_props, {
value: {
enumerable: true,
get() {
const value = get_value();
return value != null ? String(value) : "";
}
}
});
};
return create_field_proxy(as_func, get_input, set_input, get_issues, [...path, "as"]);
}
return create_field_proxy({}, get_input, set_input, get_issues, [...path, prop]);
}
});
}
function build_path_string(path) {
let result = "";
for (const segment of path) {
if (typeof segment === "number") {
result += `[${segment}]`;
} else {
result += result === "" ? segment : "." + segment;
}
}
return result;
}
const INVALIDATED_PARAM = "x-sveltekit-invalidated";
const TRAILING_SLASH_PARAM = "x-sveltekit-trailing-slash";
function stringify(data, transport) {
const encoders = Object.fromEntries(Object.entries(transport).map(([k, v]) => [k, v.encode]));
return devalue.stringify(data, encoders);
}
function stringify_remote_arg(value, transport) {
if (value === void 0) return "";
const json_string = stringify(value, transport);
const bytes = new TextEncoder().encode(json_string);
return base64_encode(bytes).replaceAll("=", "").replaceAll("+", "-").replaceAll("/", "_");
}
function parse_remote_arg(string, transport) {
if (!string) return void 0;
const json_string = text_decoder.decode(
// no need to add back `=` characters, atob can handle it
base64_decode(string.replaceAll("-", "+").replaceAll("_", "/"))
);
const decoders = Object.fromEntries(Object.entries(transport).map(([k, v]) => [k, v.decode]));
return devalue.parse(json_string, decoders);
}
function create_remote_key(id, payload) {
return id + "/" + payload;
}
export {
BINARY_FORM_CONTENT_TYPE as B,
INVALIDATED_PARAM as I,
TRAILING_SLASH_PARAM as T,
stringify_remote_arg as a,
create_field_proxy as b,
create_remote_key as c,
deserialize_binary_form as d,
set_nested_value as e,
flatten_issues as f,
deep_set as g,
normalize_issue as n,
parse_remote_arg as p,
stringify as s
};

Some files were not shown because too many files have changed in this diff Show More