Compare commits
7 Commits
ec8d67c956
...
26ba015b75
| Author | SHA1 | Date | |
|---|---|---|---|
| 26ba015b75 | |||
| 49129d3e86 | |||
| d99a13d91f | |||
| 203ce446f4 | |||
| c96d50a3f4 | |||
| 3bbe320949 | |||
| 2d2435642d |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -59,9 +59,10 @@ Thumbs.db
|
||||
*.ps1
|
||||
keyring passwords.py
|
||||
*github*
|
||||
*git*
|
||||
|
||||
*tech_spec*
|
||||
dashboards
|
||||
backend/mappings.db
|
||||
|
||||
|
||||
backend/tasks.db
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"mcpServers":{"tavily":{"command":"npx","args":["-y","tavily-mcp@0.2.3"],"env":{"TAVILY_API_KEY":"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"},"alwaysAllow":[]}}}
|
||||
{"mcpServers":{}}
|
||||
@@ -20,6 +20,8 @@ Auto-generated from all feature plans. Last updated: 2025-12-19
|
||||
- SQLite (`tasks.db`), JSON (`config.json`) (009-backup-scheduler)
|
||||
- Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, `superset_tool` (internal lib) (010-refactor-cli-to-web)
|
||||
- SQLite (for job history/results, connection configs), Filesystem (for temporary file uploads) (010-refactor-cli-to-web)
|
||||
- Python 3.9+ + FastAPI, Pydantic, requests, pyyaml (migrated from superset_tool) (012-remove-superset-tool)
|
||||
- SQLite (tasks.db, migrations.db), Filesystem (012-remove-superset-tool)
|
||||
|
||||
- Python 3.9+ (Backend), Node.js 18+ (Frontend Build) (001-plugin-arch-svelte-ui)
|
||||
|
||||
@@ -40,9 +42,9 @@ cd src; pytest; ruff check .
|
||||
Python 3.9+ (Backend), Node.js 18+ (Frontend Build): Follow standard conventions
|
||||
|
||||
## Recent Changes
|
||||
- 012-remove-superset-tool: Added Python 3.9+ + FastAPI, Pydantic, requests, pyyaml (migrated from superset_tool)
|
||||
- 010-refactor-cli-to-web: Added Python 3.9+ (Backend), Node.js 18+ (Frontend) + FastAPI, SvelteKit, Tailwind CSS, Pydantic, SQLAlchemy, `superset_tool` (internal lib)
|
||||
- 009-backup-scheduler: Added Python 3.9+, Node.js 18+ + FastAPI, APScheduler, SQLAlchemy, SvelteKit, Tailwind CSS
|
||||
- 009-backup-scheduler: Added Python 3.9+, Node.js 18+ + FastAPI, APScheduler, SQLAlchemy, SvelteKit, Tailwind CSS
|
||||
|
||||
|
||||
<!-- MANUAL ADDITIONS START -->
|
||||
|
||||
35
backend/delete_running_tasks.py
Normal file
35
backend/delete_running_tasks.py
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Script to delete tasks with RUNNING status from the database."""
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from src.core.database import TasksSessionLocal
|
||||
from src.models.task import TaskRecord
|
||||
|
||||
def delete_running_tasks():
|
||||
"""Delete all tasks with RUNNING status from the database."""
|
||||
session: Session = TasksSessionLocal()
|
||||
try:
|
||||
# Find all task records with RUNNING status
|
||||
running_tasks = session.query(TaskRecord).filter(TaskRecord.status == "RUNNING").all()
|
||||
|
||||
if not running_tasks:
|
||||
print("No RUNNING tasks found.")
|
||||
return
|
||||
|
||||
print(f"Found {len(running_tasks)} RUNNING tasks:")
|
||||
for task in running_tasks:
|
||||
print(f"- Task ID: {task.id}, Type: {task.type}")
|
||||
|
||||
# Delete the found tasks
|
||||
session.query(TaskRecord).filter(TaskRecord.status == "RUNNING").delete(synchronize_session=False)
|
||||
session.commit()
|
||||
|
||||
print(f"Successfully deleted {len(running_tasks)} RUNNING tasks.")
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
print(f"Error deleting tasks: {e}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
delete_running_tasks()
|
||||
@@ -43,3 +43,4 @@ uvicorn==0.38.0
|
||||
websockets==15.0.1
|
||||
pandas
|
||||
psycopg2-binary
|
||||
openpyxl
|
||||
@@ -11,12 +11,11 @@
|
||||
# [SECTION: IMPORTS]
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing import List, Dict, Optional
|
||||
from backend.src.dependencies import get_config_manager, get_scheduler_service
|
||||
from backend.src.core.superset_client import SupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
from ...dependencies import get_config_manager, get_scheduler_service
|
||||
from ...core.superset_client import SupersetClient
|
||||
from pydantic import BaseModel, Field
|
||||
from backend.src.core.config_models import Environment as EnvModel
|
||||
from backend.src.core.logger import belief_scope
|
||||
from ...core.config_models import Environment as EnvModel
|
||||
from ...core.logger import belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
router = APIRouter()
|
||||
@@ -114,18 +113,7 @@ async def get_environment_databases(id: str, config_manager=Depends(get_config_m
|
||||
|
||||
try:
|
||||
# Initialize SupersetClient from environment config
|
||||
# Note: We need to map Environment model to SupersetConfig
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db", # Defaulting to db provider
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "false"
|
||||
}
|
||||
)
|
||||
client = SupersetClient(superset_config)
|
||||
client = SupersetClient(env)
|
||||
return client.get_databases_summary()
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to fetch databases: {str(e)}")
|
||||
|
||||
@@ -13,9 +13,10 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
from backend.src.dependencies import get_config_manager
|
||||
from backend.src.core.database import get_db
|
||||
from backend.src.models.mapping import DatabaseMapping
|
||||
from ...core.logger import belief_scope
|
||||
from ...dependencies import get_config_manager
|
||||
from ...core.database import get_db
|
||||
from ...models.mapping import DatabaseMapping
|
||||
from pydantic import BaseModel
|
||||
# [/SECTION]
|
||||
|
||||
|
||||
@@ -7,10 +7,10 @@
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing import List, Dict
|
||||
from backend.src.dependencies import get_config_manager, get_task_manager
|
||||
from backend.src.models.dashboard import DashboardMetadata, DashboardSelection
|
||||
from backend.src.core.superset_client import SupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
from ...dependencies import get_config_manager, get_task_manager
|
||||
from ...models.dashboard import DashboardMetadata, DashboardSelection
|
||||
from ...core.superset_client import SupersetClient
|
||||
from ...core.logger import belief_scope
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["migration"])
|
||||
|
||||
@@ -22,19 +22,13 @@ router = APIRouter(prefix="/api", tags=["migration"])
|
||||
# @RETURN: List[DashboardMetadata]
|
||||
@router.get("/environments/{env_id}/dashboards", response_model=List[DashboardMetadata])
|
||||
async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)):
|
||||
with belief_scope("get_dashboards", f"env_id={env_id}"):
|
||||
environments = config_manager.get_environments()
|
||||
env = next((e for e in environments if e.id == env_id), None)
|
||||
if not env:
|
||||
raise HTTPException(status_code=404, detail="Environment not found")
|
||||
|
||||
config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={'provider': 'db', 'username': env.username, 'password': env.password, 'refresh': False},
|
||||
verify_ssl=True,
|
||||
timeout=30
|
||||
)
|
||||
client = SupersetClient(config)
|
||||
client = SupersetClient(env)
|
||||
dashboards = client.get_dashboards_summary()
|
||||
return dashboards
|
||||
# [/DEF:get_dashboards:Function]
|
||||
@@ -47,6 +41,7 @@ async def get_dashboards(env_id: str, config_manager=Depends(get_config_manager)
|
||||
# @RETURN: Dict - {"task_id": str, "message": str}
|
||||
@router.post("/migration/execute")
|
||||
async def execute_migration(selection: DashboardSelection, config_manager=Depends(get_config_manager), task_manager=Depends(get_task_manager)):
|
||||
with belief_scope("execute_migration"):
|
||||
# Validate environments exist
|
||||
environments = config_manager.get_environments()
|
||||
env_ids = {e.id for e in environments}
|
||||
|
||||
@@ -17,7 +17,6 @@ from ...dependencies import get_config_manager
|
||||
from ...core.config_manager import ConfigManager
|
||||
from ...core.logger import logger, belief_scope
|
||||
from ...core.superset_client import SupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
import os
|
||||
# [/SECTION]
|
||||
|
||||
@@ -28,7 +27,7 @@ router = APIRouter()
|
||||
# @PRE: Config manager is available.
|
||||
# @POST: Returns masked AppConfig.
|
||||
# @RETURN: AppConfig - The current configuration.
|
||||
@router.get("/", response_model=AppConfig)
|
||||
@router.get("", response_model=AppConfig)
|
||||
async def get_settings(config_manager: ConfigManager = Depends(get_config_manager)):
|
||||
with belief_scope("get_settings"):
|
||||
logger.info("[get_settings][Entry] Fetching all settings")
|
||||
@@ -85,17 +84,7 @@ async def add_environment(
|
||||
|
||||
# Validate connection before adding
|
||||
try:
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db",
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "true"
|
||||
}
|
||||
)
|
||||
client = SupersetClient(config=superset_config)
|
||||
client = SupersetClient(env)
|
||||
client.get_dashboards(query={"page_size": 1})
|
||||
except Exception as e:
|
||||
logger.error(f"[add_environment][Coherence:Failed] Connection validation failed: {e}")
|
||||
@@ -130,17 +119,7 @@ async def update_environment(
|
||||
|
||||
# Validate connection before updating
|
||||
try:
|
||||
superset_config = SupersetConfig(
|
||||
env=env_to_validate.name,
|
||||
base_url=env_to_validate.url,
|
||||
auth={
|
||||
"provider": "db",
|
||||
"username": env_to_validate.username,
|
||||
"password": env_to_validate.password,
|
||||
"refresh": "true"
|
||||
}
|
||||
)
|
||||
client = SupersetClient(config=superset_config)
|
||||
client = SupersetClient(env_to_validate)
|
||||
client.get_dashboards(query={"page_size": 1})
|
||||
except Exception as e:
|
||||
logger.error(f"[update_environment][Coherence:Failed] Connection validation failed: {e}")
|
||||
@@ -187,21 +166,8 @@ async def test_environment_connection(
|
||||
raise HTTPException(status_code=404, detail=f"Environment {id} not found")
|
||||
|
||||
try:
|
||||
# Create SupersetConfig
|
||||
# Note: SupersetConfig expects 'auth' dict with specific keys
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db", # Defaulting to db for now
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "true"
|
||||
}
|
||||
)
|
||||
|
||||
# Initialize client (this will trigger authentication)
|
||||
client = SupersetClient(config=superset_config)
|
||||
client = SupersetClient(env)
|
||||
|
||||
# Try a simple request to verify
|
||||
client.get_dashboards(query={"page_size": 1})
|
||||
|
||||
@@ -6,10 +6,8 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to sys.path to allow importing superset_tool
|
||||
# Assuming app.py is in backend/src/
|
||||
# project_root is used for static files mounting
|
||||
project_root = Path(__file__).resolve().parent.parent.parent
|
||||
sys.path.append(str(project_root))
|
||||
|
||||
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, Request, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
@@ -167,7 +165,8 @@ if frontend_path.exists():
|
||||
with belief_scope("serve_spa", f"path={file_path}"):
|
||||
# Don't serve SPA for API routes that fell through
|
||||
if file_path.startswith("api/"):
|
||||
raise HTTPException(status_code=404, detail="API endpoint not found")
|
||||
logger.info(f"[DEBUG] API route fell through to serve_spa: {file_path}")
|
||||
raise HTTPException(status_code=404, detail=f"API endpoint not found: {file_path}")
|
||||
|
||||
full_path = frontend_path / file_path
|
||||
if full_path.is_file():
|
||||
|
||||
@@ -186,6 +186,20 @@ class ConfigManager:
|
||||
return len(self.config.environments) > 0
|
||||
# [/DEF:has_environments:Function]
|
||||
|
||||
# [DEF:get_environment:Function]
|
||||
# @PURPOSE: Returns a single environment by ID.
|
||||
# @PRE: self.config is set and isinstance(env_id, str) and len(env_id) > 0.
|
||||
# @POST: Returns Environment object if found, None otherwise.
|
||||
# @PARAM: env_id (str) - The ID of the environment to retrieve.
|
||||
# @RETURN: Optional[Environment] - The environment with the given ID, or None.
|
||||
def get_environment(self, env_id: str) -> Optional[Environment]:
|
||||
with belief_scope("get_environment"):
|
||||
for env in self.config.environments:
|
||||
if env.id == env_id:
|
||||
return env
|
||||
return None
|
||||
# [/DEF:get_environment:Function]
|
||||
|
||||
# [DEF:add_environment:Function]
|
||||
# @PURPOSE: Adds a new environment to the configuration.
|
||||
# @PRE: isinstance(env, Environment)
|
||||
|
||||
@@ -23,6 +23,8 @@ class Environment(BaseModel):
|
||||
url: str
|
||||
username: str
|
||||
password: str # Will be masked in UI
|
||||
verify_ssl: bool = True
|
||||
timeout: int = 30
|
||||
is_default: bool = False
|
||||
backup_schedule: Schedule = Field(default_factory=Schedule)
|
||||
# [/DEF:Environment:DataClass]
|
||||
|
||||
@@ -29,11 +29,10 @@ class BeliefFormatter(logging.Formatter):
|
||||
# @PARAM: record (logging.LogRecord) - The log record to format.
|
||||
# @RETURN: str - The formatted log message.
|
||||
def format(self, record):
|
||||
msg = super().format(record)
|
||||
anchor_id = getattr(_belief_state, 'anchor_id', None)
|
||||
if anchor_id:
|
||||
msg = f"[{anchor_id}][Action] {msg}"
|
||||
return msg
|
||||
record.msg = f"[{anchor_id}][Action] {record.msg}"
|
||||
return super().format(record)
|
||||
# [/DEF:format:Function]
|
||||
# [/DEF:BeliefFormatter:Class]
|
||||
|
||||
@@ -193,6 +192,18 @@ class WebSocketLogHandler(logging.Handler):
|
||||
# @SEMANTICS: logger, global, instance
|
||||
# @PURPOSE: The global logger instance for the application, configured with both a console handler and the custom WebSocket handler.
|
||||
logger = logging.getLogger("superset_tools_app")
|
||||
|
||||
# [DEF:believed:Function]
|
||||
# @PURPOSE: A decorator that wraps a function in a belief scope.
|
||||
# @PARAM: anchor_id (str) - The identifier for the semantic block.
|
||||
def believed(anchor_id: str):
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
with belief_scope(anchor_id):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
# [/DEF:believed:Function]
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Create a formatter
|
||||
|
||||
@@ -1,74 +1,102 @@
|
||||
# [DEF:backend.src.core.superset_client:Module]
|
||||
#
|
||||
# @SEMANTICS: superset, api, client, database, metadata
|
||||
# @PURPOSE: Extends the base SupersetClient with database-specific metadata fetching.
|
||||
# @SEMANTICS: superset, api, client, rest, http, dashboard, dataset, import, export
|
||||
# @PURPOSE: Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
|
||||
# @LAYER: Core
|
||||
# @RELATION: INHERITS_FROM -> superset_tool.client.SupersetClient
|
||||
# @RELATION: USES -> backend.src.core.utils.network.APIClient
|
||||
# @RELATION: USES -> backend.src.core.config_models.Environment
|
||||
#
|
||||
# @INVARIANT: All database metadata requests must include UUID and name.
|
||||
# @INVARIANT: All network operations must use the internal APIClient instance.
|
||||
# @PUBLIC_API: SupersetClient
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from backend.src.core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient as BaseSupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
import json
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union, cast
|
||||
from requests import Response
|
||||
from .logger import logger as app_logger, belief_scope
|
||||
from .utils.network import APIClient, SupersetAPIError, AuthenticationError, DashboardNotFoundError, NetworkError
|
||||
from .utils.fileio import get_filename_from_headers
|
||||
from .config_models import Environment
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:SupersetClient:Class]
|
||||
# @PURPOSE: Extended SupersetClient for migration-specific operations.
|
||||
class SupersetClient(BaseSupersetClient):
|
||||
|
||||
# [DEF:get_databases_summary:Function]
|
||||
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
|
||||
# @PRE: self.network must be initialized and authenticated.
|
||||
# @POST: Returns a list of database dictionaries with 'engine' field.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
def get_databases_summary(self) -> List[Dict]:
|
||||
with belief_scope("SupersetClient.get_databases_summary"):
|
||||
"""
|
||||
Fetch a summary of databases including uuid, name, and engine.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
# @PURPOSE: Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
|
||||
class SupersetClient:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
|
||||
# @PRE: `env` должен быть валидным объектом Environment.
|
||||
# @POST: Атрибуты `env` и `network` созданы и готовы к работе.
|
||||
# @PARAM: env (Environment) - Конфигурация окружения.
|
||||
def __init__(self, env: Environment):
|
||||
with belief_scope("__init__"):
|
||||
app_logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient for env %s.", env.name)
|
||||
self.env = env
|
||||
# Construct auth payload expected by Superset API
|
||||
auth_payload = {
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"provider": "db",
|
||||
"refresh": "true"
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
self.network = APIClient(
|
||||
config={
|
||||
"base_url": env.url,
|
||||
"auth": auth_payload
|
||||
},
|
||||
verify_ssl=env.verify_ssl,
|
||||
timeout=env.timeout
|
||||
)
|
||||
self.delete_before_reimport: bool = False
|
||||
app_logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
# [DEF:authenticate:Function]
|
||||
# @PURPOSE: Authenticates the client using the configured credentials.
|
||||
# @PRE: self.network must be initialized with valid auth configuration.
|
||||
# @POST: Client is authenticated and tokens are stored.
|
||||
# @RETURN: Dict[str, str] - Authentication tokens.
|
||||
def authenticate(self) -> Dict[str, str]:
|
||||
with belief_scope("SupersetClient.authenticate"):
|
||||
return self.network.authenticate()
|
||||
# [/DEF:authenticate:Function]
|
||||
|
||||
return databases
|
||||
# [/DEF:get_databases_summary:Function]
|
||||
@property
|
||||
# [DEF:headers:Function]
|
||||
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
|
||||
def headers(self) -> dict:
|
||||
with belief_scope("headers"):
|
||||
return self.network.headers
|
||||
# [/DEF:headers:Function]
|
||||
|
||||
# [DEF:get_database_by_uuid:Function]
|
||||
# @PURPOSE: Find a database by its UUID.
|
||||
# @PRE: db_uuid must be a string.
|
||||
# @POST: Returns database metadata if found.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
|
||||
with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"):
|
||||
"""
|
||||
Find a database by its UUID.
|
||||
"""
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:get_database_by_uuid:Function]
|
||||
# [SECTION: DASHBOARD OPERATIONS]
|
||||
|
||||
# [DEF:get_dashboards:Function]
|
||||
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса для API.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список дашбордов).
|
||||
def get_dashboards(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_dashboards"):
|
||||
app_logger.info("[get_dashboards][Enter] Fetching dashboards.")
|
||||
validated_query = self._validate_query_params(query or {})
|
||||
if 'columns' not in validated_query:
|
||||
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
|
||||
|
||||
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/dashboard/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
app_logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_dashboards:Function]
|
||||
|
||||
# [DEF:get_dashboards_summary:Function]
|
||||
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
|
||||
# @PRE: self.network must be authenticated.
|
||||
# @POST: Returns a list of dashboard dictionaries mapped to the grid schema.
|
||||
# @RETURN: List[Dict]
|
||||
def get_dashboards_summary(self) -> List[Dict]:
|
||||
with belief_scope("SupersetClient.get_dashboards_summary"):
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
@@ -86,34 +114,287 @@ class SupersetClient(BaseSupersetClient):
|
||||
return result
|
||||
# [/DEF:get_dashboards_summary:Function]
|
||||
|
||||
# [DEF:export_dashboard:Function]
|
||||
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда для экспорта.
|
||||
# @RETURN: Tuple[bytes, str] - Бинарное содержимое ZIP-архива и имя файла.
|
||||
def export_dashboard(self, dashboard_id: int) -> Tuple[bytes, str]:
|
||||
with belief_scope("export_dashboard"):
|
||||
app_logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id)
|
||||
response = self.network.request(
|
||||
method="GET",
|
||||
endpoint="/dashboard/export/",
|
||||
params={"q": json.dumps([dashboard_id])},
|
||||
stream=True,
|
||||
raw_response=True,
|
||||
)
|
||||
response = cast(Response, response)
|
||||
self._validate_export_response(response, dashboard_id)
|
||||
filename = self._resolve_export_filename(response, dashboard_id)
|
||||
app_logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
|
||||
return response.content, filename
|
||||
# [/DEF:export_dashboard:Function]
|
||||
|
||||
# [DEF:import_dashboard:Function]
|
||||
# @PURPOSE: Импортирует дашборд из ZIP-файла.
|
||||
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-архиву.
|
||||
# @PARAM: dash_id (Optional[int]) - ID дашборда для удаления при сбое.
|
||||
# @PARAM: dash_slug (Optional[str]) - Slug дашборда для поиска ID.
|
||||
# @RETURN: Dict - Ответ API в случае успеха.
|
||||
def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict:
|
||||
with belief_scope("import_dashboard"):
|
||||
file_path = str(file_name)
|
||||
self._validate_import_file(file_path)
|
||||
try:
|
||||
return self._do_import(file_path)
|
||||
except Exception as exc:
|
||||
app_logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True)
|
||||
if not self.delete_before_reimport:
|
||||
raise
|
||||
|
||||
target_id = self._resolve_target_id_for_delete(dash_id, dash_slug)
|
||||
if target_id is None:
|
||||
app_logger.error("[import_dashboard][Failure] No ID available for delete-retry.")
|
||||
raise
|
||||
|
||||
self.delete_dashboard(target_id)
|
||||
app_logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
|
||||
return self._do_import(file_path)
|
||||
# [/DEF:import_dashboard:Function]
|
||||
|
||||
# [DEF:delete_dashboard:Function]
|
||||
# @PURPOSE: Удаляет дашборд по его ID или slug.
|
||||
# @PARAM: dashboard_id (Union[int, str]) - ID или slug дашборда.
|
||||
def delete_dashboard(self, dashboard_id: Union[int, str]) -> None:
|
||||
with belief_scope("delete_dashboard"):
|
||||
app_logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id)
|
||||
response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}")
|
||||
response = cast(Dict, response)
|
||||
if response.get("result", True) is not False:
|
||||
app_logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
|
||||
else:
|
||||
app_logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
|
||||
# [/DEF:delete_dashboard:Function]
|
||||
|
||||
# [/SECTION]
|
||||
|
||||
# [SECTION: DATASET OPERATIONS]
|
||||
|
||||
# [DEF:get_datasets:Function]
|
||||
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список датасетов).
|
||||
def get_datasets(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_datasets"):
|
||||
app_logger.info("[get_datasets][Enter] Fetching datasets.")
|
||||
validated_query = self._validate_query_params(query)
|
||||
|
||||
total_count = self._fetch_total_object_count(endpoint="/dataset/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/dataset/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
app_logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_datasets:Function]
|
||||
|
||||
# [DEF:get_dataset:Function]
|
||||
# @PURPOSE: Fetch full dataset structure including columns and metrics.
|
||||
# @PRE: dataset_id must be a valid integer.
|
||||
# @POST: Returns full dataset metadata from Superset API.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @RETURN: Dict - The dataset metadata.
|
||||
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
|
||||
# @PARAM: dataset_id (int) - ID датасета.
|
||||
# @RETURN: Dict - Информация о датасете.
|
||||
def get_dataset(self, dataset_id: int) -> Dict:
|
||||
with belief_scope("SupersetClient.get_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Fetch full dataset structure.
|
||||
"""
|
||||
return self.network.get(f"/api/v1/dataset/{dataset_id}").json()
|
||||
app_logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id)
|
||||
response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
|
||||
response = cast(Dict, response)
|
||||
app_logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
|
||||
return response
|
||||
# [/DEF:get_dataset:Function]
|
||||
|
||||
# [DEF:update_dataset:Function]
|
||||
# @PURPOSE: Update dataset metadata.
|
||||
# @PRE: dataset_id must be valid, data must be a valid Superset dataset payload.
|
||||
# @POST: Dataset is updated in Superset.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @PARAM: data (Dict) - The payload for update.
|
||||
def update_dataset(self, dataset_id: int, data: Dict):
|
||||
# @PURPOSE: Обновляет данные датасета по его ID.
|
||||
# @PARAM: dataset_id (int) - ID датасета.
|
||||
# @PARAM: data (Dict) - Данные для обновления.
|
||||
# @RETURN: Dict - Ответ API.
|
||||
def update_dataset(self, dataset_id: int, data: Dict) -> Dict:
|
||||
with belief_scope("SupersetClient.update_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Update dataset metadata.
|
||||
"""
|
||||
self.network.put(f"/api/v1/dataset/{dataset_id}", json=data)
|
||||
app_logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id)
|
||||
response = self.network.request(
|
||||
method="PUT",
|
||||
endpoint=f"/dataset/{dataset_id}",
|
||||
data=json.dumps(data),
|
||||
headers={'Content-Type': 'application/json'}
|
||||
)
|
||||
response = cast(Dict, response)
|
||||
app_logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
|
||||
return response
|
||||
# [/DEF:update_dataset:Function]
|
||||
|
||||
# [/SECTION]
|
||||
|
||||
# [SECTION: DATABASE OPERATIONS]
|
||||
|
||||
# [DEF:get_databases:Function]
|
||||
# @PURPOSE: Получает полный список баз данных.
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список баз данных).
|
||||
def get_databases(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_databases"):
|
||||
app_logger.info("[get_databases][Enter] Fetching databases.")
|
||||
validated_query = self._validate_query_params(query or {})
|
||||
if 'columns' not in validated_query:
|
||||
validated_query['columns'] = []
|
||||
total_count = self._fetch_total_object_count(endpoint="/database/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/database/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
app_logger.info("[get_databases][Exit] Found %d databases.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_databases:Function]
|
||||
|
||||
# [DEF:get_database:Function]
|
||||
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
|
||||
# @PARAM: database_id (int) - ID базы данных.
|
||||
# @RETURN: Dict - Информация о базе данных.
|
||||
def get_database(self, database_id: int) -> Dict:
|
||||
with belief_scope("get_database"):
|
||||
app_logger.info("[get_database][Enter] Fetching database %s.", database_id)
|
||||
response = self.network.request(method="GET", endpoint=f"/database/{database_id}")
|
||||
response = cast(Dict, response)
|
||||
app_logger.info("[get_database][Exit] Got database %s.", database_id)
|
||||
return response
|
||||
# [/DEF:get_database:Function]
|
||||
|
||||
# [DEF:get_databases_summary:Function]
|
||||
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
def get_databases_summary(self) -> List[Dict]:
|
||||
with belief_scope("SupersetClient.get_databases_summary"):
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
|
||||
return databases
|
||||
# [/DEF:get_databases_summary:Function]
|
||||
|
||||
# [DEF:get_database_by_uuid:Function]
|
||||
# @PURPOSE: Find a database by its UUID.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
|
||||
with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"):
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:get_database_by_uuid:Function]
|
||||
|
||||
# [/SECTION]
|
||||
|
||||
# [SECTION: HELPERS]
|
||||
|
||||
# [DEF:_resolve_target_id_for_delete:Function]
|
||||
def _resolve_target_id_for_delete(self, dash_id: Optional[int], dash_slug: Optional[str]) -> Optional[int]:
|
||||
with belief_scope("_resolve_target_id_for_delete"):
|
||||
if dash_id is not None:
|
||||
return dash_id
|
||||
if dash_slug is not None:
|
||||
app_logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
|
||||
try:
|
||||
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
|
||||
if candidates:
|
||||
target_id = candidates[0]["id"]
|
||||
app_logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id)
|
||||
return target_id
|
||||
except Exception as e:
|
||||
app_logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
|
||||
return None
|
||||
# [/DEF:_resolve_target_id_for_delete:Function]
|
||||
|
||||
# [DEF:_do_import:Function]
|
||||
def _do_import(self, file_name: Union[str, Path]) -> Dict:
|
||||
with belief_scope("_do_import"):
|
||||
app_logger.debug(f"[_do_import][State] Uploading file: {file_name}")
|
||||
file_path = Path(file_name)
|
||||
if not file_path.exists():
|
||||
app_logger.error(f"[_do_import][Failure] File does not exist: {file_name}")
|
||||
raise FileNotFoundError(f"File does not exist: {file_name}")
|
||||
|
||||
return self.network.upload_file(
|
||||
endpoint="/dashboard/import/",
|
||||
file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"},
|
||||
extra_data={"overwrite": "true"},
|
||||
timeout=self.env.timeout * 2,
|
||||
)
|
||||
# [/DEF:_do_import:Function]
|
||||
|
||||
# [DEF:_validate_export_response:Function]
|
||||
def _validate_export_response(self, response: Response, dashboard_id: int) -> None:
|
||||
with belief_scope("_validate_export_response"):
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "application/zip" not in content_type:
|
||||
raise SupersetAPIError(f"Получен не ZIP-архив (Content-Type: {content_type})")
|
||||
if not response.content:
|
||||
raise SupersetAPIError("Получены пустые данные при экспорте")
|
||||
# [/DEF:_validate_export_response:Function]
|
||||
|
||||
# [DEF:_resolve_export_filename:Function]
|
||||
def _resolve_export_filename(self, response: Response, dashboard_id: int) -> str:
|
||||
with belief_scope("_resolve_export_filename"):
|
||||
filename = get_filename_from_headers(dict(response.headers))
|
||||
if not filename:
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
|
||||
app_logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
|
||||
return filename
|
||||
# [/DEF:_resolve_export_filename:Function]
|
||||
|
||||
# [DEF:_validate_query_params:Function]
|
||||
def _validate_query_params(self, query: Optional[Dict]) -> Dict:
|
||||
with belief_scope("_validate_query_params"):
|
||||
base_query = {"page": 0, "page_size": 1000}
|
||||
return {**base_query, **(query or {})}
|
||||
# [/DEF:_validate_query_params:Function]
|
||||
|
||||
# [DEF:_fetch_total_object_count:Function]
|
||||
def _fetch_total_object_count(self, endpoint: str) -> int:
|
||||
with belief_scope("_fetch_total_object_count"):
|
||||
return self.network.fetch_paginated_count(
|
||||
endpoint=endpoint,
|
||||
query_params={"page": 0, "page_size": 1},
|
||||
count_field="count",
|
||||
)
|
||||
# [/DEF:_fetch_total_object_count:Function]
|
||||
|
||||
# [DEF:_fetch_all_pages:Function]
|
||||
def _fetch_all_pages(self, endpoint: str, pagination_options: Dict) -> List[Dict]:
|
||||
with belief_scope("_fetch_all_pages"):
|
||||
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
|
||||
# [/DEF:_fetch_all_pages:Function]
|
||||
|
||||
# [DEF:_validate_import_file:Function]
|
||||
def _validate_import_file(self, zip_path: Union[str, Path]) -> None:
|
||||
with belief_scope("_validate_import_file"):
|
||||
path = Path(zip_path)
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Файл {zip_path} не существует")
|
||||
if not zipfile.is_zipfile(path):
|
||||
raise SupersetAPIError(f"Файл {zip_path} не является ZIP-архивом")
|
||||
with zipfile.ZipFile(path, "r") as zf:
|
||||
if not any(n.endswith("metadata.yaml") for n in zf.namelist()):
|
||||
raise SupersetAPIError(f"Архив {zip_path} не содержит 'metadata.yaml'")
|
||||
# [/DEF:_validate_import_file:Function]
|
||||
|
||||
# [/SECTION]
|
||||
|
||||
# [/DEF:SupersetClient:Class]
|
||||
|
||||
# [/DEF:backend.src.core.superset_client:Module]
|
||||
|
||||
47
superset_tool/utils/dataset_mapper.py → backend/src/core/utils/dataset_mapper.py
Executable file → Normal file
47
superset_tool/utils/dataset_mapper.py → backend/src/core/utils/dataset_mapper.py
Executable file → Normal file
@@ -1,9 +1,9 @@
|
||||
# [DEF:superset_tool.utils.dataset_mapper:Module]
|
||||
# [DEF:backend.core.utils.dataset_mapper:Module]
|
||||
#
|
||||
# @SEMANTICS: dataset, mapping, postgresql, xlsx, superset
|
||||
# @PURPOSE: Этот модуль отвечает за обновление метаданных (verbose_map) в датасетах Superset, извлекая их из PostgreSQL или XLSX-файлов.
|
||||
# @LAYER: Domain
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> backend.core.superset_client
|
||||
# @RELATION: DEPENDS_ON -> pandas
|
||||
# @RELATION: DEPENDS_ON -> psycopg2
|
||||
# @PUBLIC_API: DatasetMapper
|
||||
@@ -11,10 +11,8 @@
|
||||
# [SECTION: IMPORTS]
|
||||
import pandas as pd # type: ignore
|
||||
import psycopg2 # type: ignore
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from typing import Dict, List, Optional, Any
|
||||
from ..logger import logger as app_logger, belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:DatasetMapper:Class]
|
||||
@@ -22,10 +20,9 @@ from typing import Dict, List, Optional, Any
|
||||
class DatasetMapper:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the mapper.
|
||||
# @PRE: logger должен быть экземпляром SupersetLogger.
|
||||
# @POST: Объект DatasetMapper инициализирован.
|
||||
def __init__(self, logger: SupersetLogger):
|
||||
self.logger = logger
|
||||
def __init__(self):
|
||||
pass
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:get_postgres_comments:Function]
|
||||
@@ -39,8 +36,8 @@ class DatasetMapper:
|
||||
# @PARAM: table_schema (str) - Схема таблицы.
|
||||
# @RETURN: Dict[str, str] - Словарь с комментариями к колонкам.
|
||||
def get_postgres_comments(self, db_config: Dict, table_name: str, table_schema: str) -> Dict[str, str]:
|
||||
with self.logger.belief_scope("Fetch comments from PostgreSQL"):
|
||||
self.logger.info("[get_postgres_comments][Enter] Fetching comments from PostgreSQL for %s.%s.", table_schema, table_name)
|
||||
with belief_scope("Fetch comments from PostgreSQL"):
|
||||
app_logger.info("[get_postgres_comments][Enter] Fetching comments from PostgreSQL for %s.%s.", table_schema, table_name)
|
||||
query = f"""
|
||||
SELECT
|
||||
cols.column_name,
|
||||
@@ -86,9 +83,9 @@ class DatasetMapper:
|
||||
for row in cursor.fetchall():
|
||||
if row[1]:
|
||||
comments[row[0]] = row[1]
|
||||
self.logger.info("[get_postgres_comments][Success] Fetched %d comments.", len(comments))
|
||||
app_logger.info("[get_postgres_comments][Success] Fetched %d comments.", len(comments))
|
||||
except Exception as e:
|
||||
self.logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
|
||||
app_logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
|
||||
raise
|
||||
return comments
|
||||
# [/DEF:get_postgres_comments:Function]
|
||||
@@ -101,15 +98,15 @@ class DatasetMapper:
|
||||
# @PARAM: file_path (str) - Путь к XLSX файлу.
|
||||
# @RETURN: Dict[str, str] - Словарь с меппингами.
|
||||
def load_excel_mappings(self, file_path: str) -> Dict[str, str]:
|
||||
with self.logger.belief_scope("Load mappings from Excel"):
|
||||
self.logger.info("[load_excel_mappings][Enter] Loading mappings from %s.", file_path)
|
||||
with belief_scope("Load mappings from Excel"):
|
||||
app_logger.info("[load_excel_mappings][Enter] Loading mappings from %s.", file_path)
|
||||
try:
|
||||
df = pd.read_excel(file_path)
|
||||
mappings = df.set_index('column_name')['verbose_name'].to_dict()
|
||||
self.logger.info("[load_excel_mappings][Success] Loaded %d mappings.", len(mappings))
|
||||
app_logger.info("[load_excel_mappings][Success] Loaded %d mappings.", len(mappings))
|
||||
return mappings
|
||||
except Exception as e:
|
||||
self.logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
|
||||
app_logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
|
||||
raise
|
||||
# [/DEF:load_excel_mappings:Function]
|
||||
|
||||
@@ -122,16 +119,16 @@ class DatasetMapper:
|
||||
# @RELATION: CALLS -> self.load_excel_mappings
|
||||
# @RELATION: CALLS -> superset_client.get_dataset
|
||||
# @RELATION: CALLS -> superset_client.update_dataset
|
||||
# @PARAM: superset_client (SupersetClient) - Клиент Superset.
|
||||
# @PARAM: superset_client (Any) - Клиент Superset.
|
||||
# @PARAM: dataset_id (int) - ID датасета для обновления.
|
||||
# @PARAM: source (str) - Источник данных ('postgres', 'excel', 'both').
|
||||
# @PARAM: postgres_config (Optional[Dict]) - Конфигурация для подключения к PostgreSQL.
|
||||
# @PARAM: excel_path (Optional[str]) - Путь к XLSX файлу.
|
||||
# @PARAM: table_name (Optional[str]) - Имя таблицы в PostgreSQL.
|
||||
# @PARAM: table_schema (Optional[str]) - Схема таблицы в PostgreSQL.
|
||||
def run_mapping(self, superset_client: SupersetClient, dataset_id: int, source: str, postgres_config: Optional[Dict] = None, excel_path: Optional[str] = None, table_name: Optional[str] = None, table_schema: Optional[str] = None):
|
||||
with self.logger.belief_scope(f"Run dataset mapping for ID {dataset_id}"):
|
||||
self.logger.info("[run_mapping][Enter] Starting dataset mapping for ID %d from source '%s'.", dataset_id, source)
|
||||
def run_mapping(self, superset_client: Any, dataset_id: int, source: str, postgres_config: Optional[Dict] = None, excel_path: Optional[str] = None, table_name: Optional[str] = None, table_schema: Optional[str] = None):
|
||||
with belief_scope(f"Run dataset mapping for ID {dataset_id}"):
|
||||
app_logger.info("[run_mapping][Enter] Starting dataset mapping for ID %d from source '%s'.", dataset_id, source)
|
||||
mappings: Dict[str, str] = {}
|
||||
|
||||
try:
|
||||
@@ -142,7 +139,7 @@ class DatasetMapper:
|
||||
assert excel_path, "Excel path is required."
|
||||
mappings.update(self.load_excel_mappings(excel_path))
|
||||
if source not in ['postgres', 'excel', 'both']:
|
||||
self.logger.error("[run_mapping][Failure] Invalid source: %s.", source)
|
||||
app_logger.error("[run_mapping][Failure] Invalid source: %s.", source)
|
||||
return
|
||||
|
||||
dataset_response = superset_client.get_dataset(dataset_id)
|
||||
@@ -227,14 +224,14 @@ class DatasetMapper:
|
||||
payload_for_update = {k: v for k, v in payload_for_update.items() if v is not None}
|
||||
|
||||
superset_client.update_dataset(dataset_id, payload_for_update)
|
||||
self.logger.info("[run_mapping][Success] Dataset %d columns' verbose_name updated.", dataset_id)
|
||||
app_logger.info("[run_mapping][Success] Dataset %d columns' verbose_name updated.", dataset_id)
|
||||
else:
|
||||
self.logger.info("[run_mapping][State] No changes in columns' verbose_name, skipping update.")
|
||||
app_logger.info("[run_mapping][State] No changes in columns' verbose_name, skipping update.")
|
||||
|
||||
except (AssertionError, FileNotFoundError, Exception) as e:
|
||||
self.logger.error("[run_mapping][Failure] %s", e, exc_info=True)
|
||||
app_logger.error("[run_mapping][Failure] %s", e, exc_info=True)
|
||||
return
|
||||
# [/DEF:run_mapping:Function]
|
||||
# [/DEF:DatasetMapper:Class]
|
||||
|
||||
# [/DEF:superset_tool.utils.dataset_mapper:Module]
|
||||
# [/DEF:backend.core.utils.dataset_mapper:Module]
|
||||
169
superset_tool/utils/fileio.py → backend/src/core/utils/fileio.py
Executable file → Normal file
169
superset_tool/utils/fileio.py → backend/src/core/utils/fileio.py
Executable file → Normal file
@@ -1,10 +1,9 @@
|
||||
# [DEF:superset_tool.utils.fileio:Module]
|
||||
# [DEF:backend.core.utils.fileio:Module]
|
||||
#
|
||||
# @SEMANTICS: file, io, zip, yaml, temp, archive, utility
|
||||
# @PURPOSE: Предоставляет набор утилит для управления файловыми операциями, включая работу с временными файлами, архивами ZIP, файлами YAML и очистку директорий.
|
||||
# @LAYER: Infra
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.exceptions
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils.logger
|
||||
# @RELATION: DEPENDS_ON -> backend.src.core.logger
|
||||
# @RELATION: DEPENDS_ON -> pyyaml
|
||||
# @PUBLIC_API: create_temp_file, remove_empty_directories, read_dashboard_from_disk, calculate_crc32, RetentionPolicy, archive_exports, save_and_unpack_dashboard, update_yamls, create_dashboard_export, sanitize_filename, get_filename_from_headers, consolidate_archive_folders
|
||||
|
||||
@@ -17,15 +16,17 @@ from typing import Any, Optional, Tuple, Dict, List, Union, LiteralString, Gener
|
||||
from contextlib import contextmanager
|
||||
import tempfile
|
||||
from datetime import date, datetime
|
||||
import glob
|
||||
import shutil
|
||||
import zlib
|
||||
from dataclasses import dataclass
|
||||
import yaml
|
||||
from superset_tool.exceptions import InvalidZipFormatError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from ..logger import logger as app_logger, belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:InvalidZipFormatError:Class]
|
||||
class InvalidZipFormatError(Exception):
|
||||
pass
|
||||
|
||||
# [DEF:create_temp_file:Function]
|
||||
# @PURPOSE: Контекстный менеджер для создания временного файла или директории с гарантированным удалением.
|
||||
# @PRE: suffix должен быть строкой, определяющей тип ресурса.
|
||||
@@ -33,20 +34,18 @@ from superset_tool.utils.logger import SupersetLogger
|
||||
# @PARAM: content (Optional[bytes]) - Бинарное содержимое для записи во временный файл.
|
||||
# @PARAM: suffix (str) - Суффикс ресурса. Если `.dir`, создается директория.
|
||||
# @PARAM: mode (str) - Режим записи в файл (e.g., 'wb').
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
# @YIELDS: Path - Путь к временному ресурсу.
|
||||
# @THROW: IOError - При ошибках создания ресурса.
|
||||
@contextmanager
|
||||
def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode: str = 'wb', dry_run = False, logger: Optional[SupersetLogger] = None) -> Generator[Path, None, None]:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope("Create temporary resource"):
|
||||
def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode: str = 'wb', dry_run = False) -> Generator[Path, None, None]:
|
||||
with belief_scope("Create temporary resource"):
|
||||
resource_path = None
|
||||
is_dir = suffix.startswith('.dir')
|
||||
try:
|
||||
if is_dir:
|
||||
with tempfile.TemporaryDirectory(suffix=suffix) as temp_dir:
|
||||
resource_path = Path(temp_dir)
|
||||
logger.debug("[create_temp_file][State] Created temporary directory: %s", resource_path)
|
||||
app_logger.debug("[create_temp_file][State] Created temporary directory: %s", resource_path)
|
||||
yield resource_path
|
||||
else:
|
||||
fd, temp_path_str = tempfile.mkstemp(suffix=suffix)
|
||||
@@ -54,19 +53,19 @@ def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode
|
||||
os.close(fd)
|
||||
if content:
|
||||
resource_path.write_bytes(content)
|
||||
logger.debug("[create_temp_file][State] Created temporary file: %s", resource_path)
|
||||
app_logger.debug("[create_temp_file][State] Created temporary file: %s", resource_path)
|
||||
yield resource_path
|
||||
finally:
|
||||
if resource_path and resource_path.exists() and not dry_run:
|
||||
try:
|
||||
if resource_path.is_dir():
|
||||
shutil.rmtree(resource_path)
|
||||
logger.debug("[create_temp_file][Cleanup] Removed temporary directory: %s", resource_path)
|
||||
app_logger.debug("[create_temp_file][Cleanup] Removed temporary directory: %s", resource_path)
|
||||
else:
|
||||
resource_path.unlink()
|
||||
logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
|
||||
app_logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
|
||||
except OSError as e:
|
||||
logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
|
||||
app_logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
|
||||
# [/DEF:create_temp_file:Function]
|
||||
|
||||
# [DEF:remove_empty_directories:Function]
|
||||
@@ -74,25 +73,23 @@ def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode
|
||||
# @PRE: root_dir должен быть путем к существующей директории.
|
||||
# @POST: Все пустые поддиректории удалены, возвращено их количество.
|
||||
# @PARAM: root_dir (str) - Путь к корневой директории для очистки.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
# @RETURN: int - Количество удаленных директорий.
|
||||
def remove_empty_directories(root_dir: str, logger: Optional[SupersetLogger] = None) -> int:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Remove empty directories in {root_dir}"):
|
||||
logger.info("[remove_empty_directories][Enter] Starting cleanup of empty directories in %s", root_dir)
|
||||
def remove_empty_directories(root_dir: str) -> int:
|
||||
with belief_scope(f"Remove empty directories in {root_dir}"):
|
||||
app_logger.info("[remove_empty_directories][Enter] Starting cleanup of empty directories in %s", root_dir)
|
||||
removed_count = 0
|
||||
if not os.path.isdir(root_dir):
|
||||
logger.error("[remove_empty_directories][Failure] Directory not found: %s", root_dir)
|
||||
app_logger.error("[remove_empty_directories][Failure] Directory not found: %s", root_dir)
|
||||
return 0
|
||||
for current_dir, _, _ in os.walk(root_dir, topdown=False):
|
||||
if not os.listdir(current_dir):
|
||||
try:
|
||||
os.rmdir(current_dir)
|
||||
removed_count += 1
|
||||
logger.info("[remove_empty_directories][State] Removed empty directory: %s", current_dir)
|
||||
app_logger.info("[remove_empty_directories][State] Removed empty directory: %s", current_dir)
|
||||
except OSError as e:
|
||||
logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
|
||||
logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
|
||||
app_logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
|
||||
app_logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
|
||||
return removed_count
|
||||
# [/DEF:remove_empty_directories:Function]
|
||||
|
||||
@@ -101,18 +98,16 @@ def remove_empty_directories(root_dir: str, logger: Optional[SupersetLogger] = N
|
||||
# @PRE: file_path должен указывать на существующий файл.
|
||||
# @POST: Возвращает байты содержимого и имя файла.
|
||||
# @PARAM: file_path (str) - Путь к файлу.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
# @RETURN: Tuple[bytes, str] - Кортеж (содержимое, имя файла).
|
||||
# @THROW: FileNotFoundError - Если файл не найден.
|
||||
def read_dashboard_from_disk(file_path: str, logger: Optional[SupersetLogger] = None) -> Tuple[bytes, str]:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Read dashboard from {file_path}"):
|
||||
def read_dashboard_from_disk(file_path: str) -> Tuple[bytes, str]:
|
||||
with belief_scope(f"Read dashboard from {file_path}"):
|
||||
path = Path(file_path)
|
||||
assert path.is_file(), f"Файл дашборда не найден: {file_path}"
|
||||
logger.info("[read_dashboard_from_disk][Enter] Reading file: %s", file_path)
|
||||
app_logger.info("[read_dashboard_from_disk][Enter] Reading file: %s", file_path)
|
||||
content = path.read_bytes()
|
||||
if not content:
|
||||
logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
|
||||
app_logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
|
||||
return content, path.name
|
||||
# [/DEF:read_dashboard_from_disk:Function]
|
||||
|
||||
@@ -124,8 +119,7 @@ def read_dashboard_from_disk(file_path: str, logger: Optional[SupersetLogger] =
|
||||
# @RETURN: str - 8-значное шестнадцатеричное представление CRC32.
|
||||
# @THROW: IOError - При ошибках чтения файла.
|
||||
def calculate_crc32(file_path: Path) -> str:
|
||||
logger = SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Calculate CRC32 for {file_path}"):
|
||||
with belief_scope(f"Calculate CRC32 for {file_path}"):
|
||||
with open(file_path, 'rb') as f:
|
||||
crc32_value = zlib.crc32(f.read())
|
||||
return f"{crc32_value:08x}"
|
||||
@@ -151,26 +145,24 @@ class RetentionPolicy:
|
||||
# @PARAM: output_dir (str) - Директория с архивами.
|
||||
# @PARAM: policy (RetentionPolicy) - Политика хранения.
|
||||
# @PARAM: deduplicate (bool) - Флаг для включения удаления дубликатов по CRC32.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool = False, logger: Optional[SupersetLogger] = None) -> None:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Archive exports in {output_dir}"):
|
||||
def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool = False) -> None:
|
||||
with belief_scope(f"Archive exports in {output_dir}"):
|
||||
output_path = Path(output_dir)
|
||||
if not output_path.is_dir():
|
||||
logger.warning("[archive_exports][Skip] Archive directory not found: %s", output_dir)
|
||||
app_logger.warning("[archive_exports][Skip] Archive directory not found: %s", output_dir)
|
||||
return
|
||||
|
||||
logger.info("[archive_exports][Enter] Managing archive in %s", output_dir)
|
||||
app_logger.info("[archive_exports][Enter] Managing archive in %s", output_dir)
|
||||
|
||||
# 1. Collect all zip files
|
||||
zip_files = list(output_path.glob("*.zip"))
|
||||
if not zip_files:
|
||||
logger.info("[archive_exports][State] No zip files found in %s", output_dir)
|
||||
app_logger.info("[archive_exports][State] No zip files found in %s", output_dir)
|
||||
return
|
||||
|
||||
# 2. Deduplication
|
||||
if deduplicate:
|
||||
logger.info("[archive_exports][State] Starting deduplication...")
|
||||
app_logger.info("[archive_exports][State] Starting deduplication...")
|
||||
checksums = {}
|
||||
files_to_remove = []
|
||||
|
||||
@@ -182,19 +174,19 @@ def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool
|
||||
crc = calculate_crc32(file_path)
|
||||
if crc in checksums:
|
||||
files_to_remove.append(file_path)
|
||||
logger.debug("[archive_exports][State] Duplicate found: %s (same as %s)", file_path.name, checksums[crc].name)
|
||||
app_logger.debug("[archive_exports][State] Duplicate found: %s (same as %s)", file_path.name, checksums[crc].name)
|
||||
else:
|
||||
checksums[crc] = file_path
|
||||
except Exception as e:
|
||||
logger.error("[archive_exports][Failure] Failed to calculate CRC32 for %s: %s", file_path, e)
|
||||
app_logger.error("[archive_exports][Failure] Failed to calculate CRC32 for %s: %s", file_path, e)
|
||||
|
||||
for f in files_to_remove:
|
||||
try:
|
||||
f.unlink()
|
||||
zip_files.remove(f)
|
||||
logger.info("[archive_exports][State] Removed duplicate: %s", f.name)
|
||||
app_logger.info("[archive_exports][State] Removed duplicate: %s", f.name)
|
||||
except OSError as e:
|
||||
logger.error("[archive_exports][Failure] Failed to remove duplicate %s: %s", f, e)
|
||||
app_logger.error("[archive_exports][Failure] Failed to remove duplicate %s: %s", f, e)
|
||||
|
||||
# 3. Retention Policy
|
||||
files_with_dates = []
|
||||
@@ -216,15 +208,15 @@ def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool
|
||||
|
||||
files_with_dates.append((file_path, file_date))
|
||||
|
||||
files_to_keep = apply_retention_policy(files_with_dates, policy, logger)
|
||||
files_to_keep = apply_retention_policy(files_with_dates, policy)
|
||||
|
||||
for file_path, _ in files_with_dates:
|
||||
if file_path not in files_to_keep:
|
||||
try:
|
||||
file_path.unlink()
|
||||
logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
|
||||
app_logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
|
||||
except OSError as e:
|
||||
logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
|
||||
app_logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
|
||||
# [/DEF:archive_exports:Function]
|
||||
|
||||
# [DEF:apply_retention_policy:Function]
|
||||
@@ -233,10 +225,9 @@ def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool
|
||||
# @POST: Returns a set of files to keep.
|
||||
# @PARAM: files_with_dates (List[Tuple[Path, date]]) - Список файлов с датами.
|
||||
# @PARAM: policy (RetentionPolicy) - Политика хранения.
|
||||
# @PARAM: logger (SupersetLogger) - Логгер.
|
||||
# @RETURN: set - Множество путей к файлам, которые должны быть сохранены.
|
||||
def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: RetentionPolicy, logger: SupersetLogger) -> set:
|
||||
with logger.belief_scope("Apply retention policy"):
|
||||
def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: RetentionPolicy) -> set:
|
||||
with belief_scope("Apply retention policy"):
|
||||
# Сортируем по дате (от новой к старой)
|
||||
sorted_files = sorted(files_with_dates, key=lambda x: x[1], reverse=True)
|
||||
# Словарь для хранения файлов по категориям
|
||||
@@ -259,7 +250,7 @@ def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: Re
|
||||
files_to_keep.update(daily_files)
|
||||
files_to_keep.update(weekly_files[:policy.weekly])
|
||||
files_to_keep.update(monthly_files[:policy.monthly])
|
||||
logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
|
||||
app_logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
|
||||
return files_to_keep
|
||||
# [/DEF:apply_retention_policy:Function]
|
||||
|
||||
@@ -271,28 +262,26 @@ def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: Re
|
||||
# @PARAM: output_dir (Union[str, Path]) - Директория для сохранения.
|
||||
# @PARAM: unpack (bool) - Флаг, нужно ли распаковывать архив.
|
||||
# @PARAM: original_filename (Optional[str]) - Исходное имя файла для сохранения.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
# @RETURN: Tuple[Path, Optional[Path]] - Путь к ZIP-файлу и, если применимо, путь к директории с распаковкой.
|
||||
# @THROW: InvalidZipFormatError - При ошибке формата ZIP.
|
||||
def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path], unpack: bool = False, original_filename: Optional[str] = None, logger: Optional[SupersetLogger] = None) -> Tuple[Path, Optional[Path]]:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope("Save and unpack dashboard"):
|
||||
logger.info("[save_and_unpack_dashboard][Enter] Processing dashboard. Unpack: %s", unpack)
|
||||
def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path], unpack: bool = False, original_filename: Optional[str] = None) -> Tuple[Path, Optional[Path]]:
|
||||
with belief_scope("Save and unpack dashboard"):
|
||||
app_logger.info("[save_and_unpack_dashboard][Enter] Processing dashboard. Unpack: %s", unpack)
|
||||
try:
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
zip_name = sanitize_filename(original_filename) if original_filename else f"dashboard_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip"
|
||||
zip_path = output_path / zip_name
|
||||
zip_path.write_bytes(zip_content)
|
||||
logger.info("[save_and_unpack_dashboard][State] Dashboard saved to: %s", zip_path)
|
||||
app_logger.info("[save_and_unpack_dashboard][State] Dashboard saved to: %s", zip_path)
|
||||
if unpack:
|
||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||
zip_ref.extractall(output_path)
|
||||
logger.info("[save_and_unpack_dashboard][State] Dashboard unpacked to: %s", output_path)
|
||||
app_logger.info("[save_and_unpack_dashboard][State] Dashboard unpacked to: %s", output_path)
|
||||
return zip_path, output_path
|
||||
return zip_path, None
|
||||
except zipfile.BadZipFile as e:
|
||||
logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
|
||||
app_logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
|
||||
raise InvalidZipFormatError(f"Invalid ZIP file: {e}") from e
|
||||
# [/DEF:save_and_unpack_dashboard:Function]
|
||||
|
||||
@@ -306,18 +295,16 @@ def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path],
|
||||
# @PARAM: path (str) - Путь к директории с YAML файлами.
|
||||
# @PARAM: regexp_pattern (Optional[LiteralString]) - Паттерн для поиска.
|
||||
# @PARAM: replace_string (Optional[LiteralString]) - Строка для замены.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str = "dashboards", regexp_pattern: Optional[LiteralString] = None, replace_string: Optional[LiteralString] = None, logger: Optional[SupersetLogger] = None) -> None:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope("Update YAML configurations"):
|
||||
logger.info("[update_yamls][Enter] Starting YAML configuration update.")
|
||||
def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str = "dashboards", regexp_pattern: Optional[LiteralString] = None, replace_string: Optional[LiteralString] = None) -> None:
|
||||
with belief_scope("Update YAML configurations"):
|
||||
app_logger.info("[update_yamls][Enter] Starting YAML configuration update.")
|
||||
dir_path = Path(path)
|
||||
assert dir_path.is_dir(), f"Путь {path} не существует или не является директорией"
|
||||
|
||||
configs: List[Dict[str, Any]] = db_configs or []
|
||||
|
||||
for file_path in dir_path.rglob("*.yaml"):
|
||||
_update_yaml_file(file_path, configs, regexp_pattern, replace_string, logger)
|
||||
_update_yaml_file(file_path, configs, regexp_pattern, replace_string)
|
||||
# [/DEF:update_yamls:Function]
|
||||
|
||||
# [DEF:_update_yaml_file:Function]
|
||||
@@ -328,15 +315,14 @@ def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str =
|
||||
# @PARAM: db_configs (List[Dict]) - Конфигурации.
|
||||
# @PARAM: regexp_pattern (Optional[str]) - Паттерн.
|
||||
# @PARAM: replace_string (Optional[str]) - Замена.
|
||||
# @PARAM: logger (SupersetLogger) - Логгер.
|
||||
def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_pattern: Optional[str], replace_string: Optional[str], logger: SupersetLogger) -> None:
|
||||
with logger.belief_scope(f"Update YAML file: {file_path}"):
|
||||
def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_pattern: Optional[str], replace_string: Optional[str]) -> None:
|
||||
with belief_scope(f"Update YAML file: {file_path}"):
|
||||
# Читаем содержимое файла
|
||||
try:
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
except Exception as e:
|
||||
logger.error("[_update_yaml_file][Failure] Failed to read %s: %s", file_path, e)
|
||||
app_logger.error("[_update_yaml_file][Failure] Failed to read %s: %s", file_path, e)
|
||||
return
|
||||
# Если задан pattern и replace_string, применяем замену по регулярному выражению
|
||||
if regexp_pattern and replace_string:
|
||||
@@ -345,9 +331,9 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
|
||||
if new_content != content:
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
f.write(new_content)
|
||||
logger.info("[_update_yaml_file][State] Updated %s using regex pattern", file_path)
|
||||
app_logger.info("[_update_yaml_file][State] Updated %s using regex pattern", file_path)
|
||||
except Exception as e:
|
||||
logger.error("[_update_yaml_file][Failure] Error applying regex to %s: %s", file_path, e)
|
||||
app_logger.error("[_update_yaml_file][Failure] Error applying regex to %s: %s", file_path, e)
|
||||
# Если заданы конфигурации, заменяем значения (поддержка old/new)
|
||||
if db_configs:
|
||||
try:
|
||||
@@ -373,7 +359,6 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
|
||||
# @PRE: match должен быть объектом совпадения регулярного выражения.
|
||||
# @POST: Возвращает строку с новым значением, сохраняя префикс и кавычки.
|
||||
def replacer(match):
|
||||
with logger.belief_scope("replacer"):
|
||||
prefix = match.group(1)
|
||||
quote_open = match.group(2)
|
||||
quote_close = match.group(4)
|
||||
@@ -381,12 +366,12 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
|
||||
# [/DEF:replacer:Function]
|
||||
|
||||
modified_content = re.sub(pattern, replacer, modified_content)
|
||||
logger.info("[_update_yaml_file][State] Replaced '%s' with '%s' for key %s in %s", old_val, new_val, key, file_path)
|
||||
app_logger.info("[_update_yaml_file][State] Replaced '%s' with '%s' for key %s in %s", old_val, new_val, key, file_path)
|
||||
# Записываем обратно изменённый контент без парсинга YAML, сохраняем оригинальное форматирование
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
f.write(modified_content)
|
||||
except Exception as e:
|
||||
logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
|
||||
app_logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
|
||||
# [/DEF:_update_yaml_file:Function]
|
||||
|
||||
# [DEF:create_dashboard_export:Function]
|
||||
@@ -396,12 +381,10 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
|
||||
# @PARAM: zip_path (Union[str, Path]) - Путь для сохранения ZIP архива.
|
||||
# @PARAM: source_paths (List[Union[str, Path]]) - Список исходных путей для архивации.
|
||||
# @PARAM: exclude_extensions (Optional[List[str]]) - Список расширений для исключения.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
# @RETURN: bool - `True` при успехе, `False` при ошибке.
|
||||
def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union[str, Path]], exclude_extensions: Optional[List[str]] = None, logger: Optional[SupersetLogger] = None) -> bool:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Create dashboard export: {zip_path}"):
|
||||
logger.info("[create_dashboard_export][Enter] Packing dashboard: %s -> %s", source_paths, zip_path)
|
||||
def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union[str, Path]], exclude_extensions: Optional[List[str]] = None) -> bool:
|
||||
with belief_scope(f"Create dashboard export: {zip_path}"):
|
||||
app_logger.info("[create_dashboard_export][Enter] Packing dashboard: %s -> %s", source_paths, zip_path)
|
||||
try:
|
||||
exclude_ext = [ext.lower() for ext in exclude_extensions or []]
|
||||
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
|
||||
@@ -412,10 +395,10 @@ def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union
|
||||
if item.is_file() and item.suffix.lower() not in exclude_ext:
|
||||
arcname = item.relative_to(src_path.parent)
|
||||
zipf.write(item, arcname)
|
||||
logger.info("[create_dashboard_export][Exit] Archive created: %s", zip_path)
|
||||
app_logger.info("[create_dashboard_export][Exit] Archive created: %s", zip_path)
|
||||
return True
|
||||
except (IOError, zipfile.BadZipFile, AssertionError) as e:
|
||||
logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
|
||||
app_logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
|
||||
return False
|
||||
# [/DEF:create_dashboard_export:Function]
|
||||
|
||||
@@ -426,8 +409,7 @@ def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union
|
||||
# @PARAM: filename (str) - Исходное имя файла.
|
||||
# @RETURN: str - Очищенная строка.
|
||||
def sanitize_filename(filename: str) -> str:
|
||||
logger = SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Sanitize filename: {filename}"):
|
||||
with belief_scope(f"Sanitize filename: {filename}"):
|
||||
return re.sub(r'[\\/*?:"<>|]', "_", filename).strip()
|
||||
# [/DEF:sanitize_filename:Function]
|
||||
|
||||
@@ -438,8 +420,7 @@ def sanitize_filename(filename: str) -> str:
|
||||
# @PARAM: headers (dict) - Словарь HTTP заголовков.
|
||||
# @RETURN: Optional[str] - Имя файла or `None`.
|
||||
def get_filename_from_headers(headers: dict) -> Optional[str]:
|
||||
logger = SupersetLogger(name="fileio")
|
||||
with logger.belief_scope("Get filename from headers"):
|
||||
with belief_scope("Get filename from headers"):
|
||||
content_disposition = headers.get("Content-Disposition", "")
|
||||
if match := re.search(r'filename="?([^"]+)"?', content_disposition):
|
||||
return match.group(1).strip()
|
||||
@@ -452,14 +433,12 @@ def get_filename_from_headers(headers: dict) -> Optional[str]:
|
||||
# @POST: Директории с одинаковым префиксом объединены в одну.
|
||||
# @THROW: TypeError, ValueError - Если `root_directory` невалиден.
|
||||
# @PARAM: root_directory (Path) - Корневая директория для консолидации.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetLogger] = None) -> None:
|
||||
logger = logger or SupersetLogger(name="fileio")
|
||||
with logger.belief_scope(f"Consolidate archives in {root_directory}"):
|
||||
def consolidate_archive_folders(root_directory: Path) -> None:
|
||||
with belief_scope(f"Consolidate archives in {root_directory}"):
|
||||
assert isinstance(root_directory, Path), "root_directory must be a Path object."
|
||||
assert root_directory.is_dir(), "root_directory must be an existing directory."
|
||||
|
||||
logger.info("[consolidate_archive_folders][Enter] Consolidating archives in %s", root_directory)
|
||||
app_logger.info("[consolidate_archive_folders][Enter] Consolidating archives in %s", root_directory)
|
||||
# Собираем все директории с архивами
|
||||
archive_dirs = []
|
||||
for item in root_directory.iterdir():
|
||||
@@ -482,7 +461,7 @@ def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetL
|
||||
# Создаем целевую директорию
|
||||
target_dir = root_directory / slug
|
||||
target_dir.mkdir(exist_ok=True)
|
||||
logger.info("[consolidate_archive_folders][State] Consolidating %d directories under %s", len(dirs), target_dir)
|
||||
app_logger.info("[consolidate_archive_folders][State] Consolidating %d directories under %s", len(dirs), target_dir)
|
||||
# Перемещаем содержимое
|
||||
for source_dir in dirs:
|
||||
if source_dir == target_dir:
|
||||
@@ -495,13 +474,13 @@ def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetL
|
||||
else:
|
||||
shutil.move(str(item), str(dest_item))
|
||||
except Exception as e:
|
||||
logger.error("[consolidate_archive_folders][Failure] Failed to move %s to %s: %s", item, dest_item, e)
|
||||
app_logger.error("[consolidate_archive_folders][Failure] Failed to move %s to %s: %s", item, dest_item, e)
|
||||
# Удаляем исходную директорию
|
||||
try:
|
||||
source_dir.rmdir()
|
||||
logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
|
||||
app_logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
|
||||
except Exception as e:
|
||||
logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
|
||||
app_logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
|
||||
# [/DEF:consolidate_archive_folders:Function]
|
||||
|
||||
# [/DEF:superset_tool.utils.fileio:Module]
|
||||
# [/DEF:backend.core.utils.fileio:Module]
|
||||
55
superset_tool/utils/network.py → backend/src/core/utils/network.py
Executable file → Normal file
55
superset_tool/utils/network.py → backend/src/core/utils/network.py
Executable file → Normal file
@@ -1,10 +1,9 @@
|
||||
# [DEF:superset_tool.utils.network:Module]
|
||||
# [DEF:backend.core.utils.network:Module]
|
||||
#
|
||||
# @SEMANTICS: network, http, client, api, requests, session, authentication
|
||||
# @PURPOSE: Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
|
||||
# @LAYER: Infra
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.exceptions
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils.logger
|
||||
# @RELATION: DEPENDS_ON -> backend.src.core.logger
|
||||
# @RELATION: DEPENDS_ON -> requests
|
||||
# @PUBLIC_API: APIClient
|
||||
|
||||
@@ -16,12 +15,37 @@ from pathlib import Path
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
import urllib3
|
||||
from superset_tool.utils.logger import belief_scope
|
||||
from urllib3.util.retry import Retry
|
||||
from superset_tool.exceptions import AuthenticationError, NetworkError, DashboardNotFoundError, SupersetAPIError, PermissionDeniedError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from ..logger import logger as app_logger, belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:SupersetAPIError:Class]
|
||||
class SupersetAPIError(Exception):
|
||||
def __init__(self, message: str = "Superset API error", **context: Any):
|
||||
self.context = context
|
||||
super().__init__(f"[API_FAILURE] {message} | Context: {self.context}")
|
||||
|
||||
# [DEF:AuthenticationError:Class]
|
||||
class AuthenticationError(SupersetAPIError):
|
||||
def __init__(self, message: str = "Authentication failed", **context: Any):
|
||||
super().__init__(message, type="authentication", **context)
|
||||
|
||||
# [DEF:PermissionDeniedError:Class]
|
||||
class PermissionDeniedError(AuthenticationError):
|
||||
def __init__(self, message: str = "Permission denied", **context: Any):
|
||||
super().__init__(message, **context)
|
||||
|
||||
# [DEF:DashboardNotFoundError:Class]
|
||||
class DashboardNotFoundError(SupersetAPIError):
|
||||
def __init__(self, resource_id: Union[int, str], message: str = "Dashboard not found", **context: Any):
|
||||
super().__init__(f"Dashboard '{resource_id}' {message}", subtype="not_found", resource_id=resource_id, **context)
|
||||
|
||||
# [DEF:NetworkError:Class]
|
||||
class NetworkError(Exception):
|
||||
def __init__(self, message: str = "Network connection failed", **context: Any):
|
||||
self.context = context
|
||||
super().__init__(f"[NETWORK_FAILURE] {message} | Context: {self.context}")
|
||||
|
||||
# [DEF:APIClient:Class]
|
||||
# @PURPOSE: Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
|
||||
class APIClient:
|
||||
@@ -32,20 +56,18 @@ class APIClient:
|
||||
# @PARAM: config (Dict[str, Any]) - Конфигурация.
|
||||
# @PARAM: verify_ssl (bool) - Проверять ли SSL.
|
||||
# @PARAM: timeout (int) - Таймаут запросов.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Логгер.
|
||||
# @PRE: config must contain 'base_url' and 'auth'.
|
||||
# @POST: APIClient instance is initialized with a session.
|
||||
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT, logger: Optional[SupersetLogger] = None):
|
||||
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT):
|
||||
with belief_scope("__init__"):
|
||||
self.logger = logger or SupersetLogger(name="APIClient")
|
||||
self.logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
|
||||
app_logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
|
||||
self.base_url: str = config.get("base_url", "")
|
||||
self.auth = config.get("auth")
|
||||
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
|
||||
self.session = self._init_session()
|
||||
self._tokens: Dict[str, str] = {}
|
||||
self._authenticated = False
|
||||
self.logger.info("[APIClient.__init__][Exit] APIClient initialized.")
|
||||
app_logger.info("[APIClient.__init__][Exit] APIClient initialized.")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_init_session:Function]
|
||||
@@ -62,7 +84,7 @@ class APIClient:
|
||||
session.mount('https://', adapter)
|
||||
if not self.request_settings["verify_ssl"]:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
self.logger.warning("[_init_session][State] SSL verification disabled.")
|
||||
app_logger.warning("[_init_session][State] SSL verification disabled.")
|
||||
session.verify = self.request_settings["verify_ssl"]
|
||||
return session
|
||||
# [/DEF:_init_session:Function]
|
||||
@@ -75,7 +97,7 @@ class APIClient:
|
||||
# @THROW: AuthenticationError, NetworkError - при ошибках.
|
||||
def authenticate(self) -> Dict[str, str]:
|
||||
with belief_scope("authenticate"):
|
||||
self.logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
|
||||
app_logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
|
||||
try:
|
||||
login_url = f"{self.base_url}/security/login"
|
||||
response = self.session.post(login_url, json=self.auth, timeout=self.request_settings["timeout"])
|
||||
@@ -88,7 +110,7 @@ class APIClient:
|
||||
|
||||
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
|
||||
self._authenticated = True
|
||||
self.logger.info("[authenticate][Exit] Authenticated successfully.")
|
||||
app_logger.info("[authenticate][Exit] Authenticated successfully.")
|
||||
return self._tokens
|
||||
except requests.exceptions.HTTPError as e:
|
||||
raise AuthenticationError(f"Authentication failed: {e}") from e
|
||||
@@ -211,12 +233,11 @@ class APIClient:
|
||||
try:
|
||||
response = self.session.post(url, files=files, data=data or {}, headers=headers, timeout=timeout or self.request_settings["timeout"])
|
||||
response.raise_for_status()
|
||||
# Добавляем логирование для отладки
|
||||
if response.status_code == 200:
|
||||
try:
|
||||
return response.json()
|
||||
except Exception as json_e:
|
||||
self.logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
|
||||
app_logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
|
||||
raise SupersetAPIError(f"API error during upload: Response is not valid JSON: {json_e}") from json_e
|
||||
return response.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
@@ -262,4 +283,4 @@ class APIClient:
|
||||
|
||||
# [/DEF:APIClient:Class]
|
||||
|
||||
# [/DEF:superset_tool.utils.network:Module]
|
||||
# [/DEF:backend.core.utils.network:Module]
|
||||
@@ -12,10 +12,9 @@ from requests.exceptions import RequestException
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from ..core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.exceptions import SupersetAPIError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from superset_tool.utils.fileio import (
|
||||
from ..core.superset_client import SupersetClient
|
||||
from ..core.utils.network import SupersetAPIError
|
||||
from ..core.utils.fileio import (
|
||||
save_and_unpack_dashboard,
|
||||
archive_exports,
|
||||
sanitize_filename,
|
||||
@@ -23,7 +22,6 @@ from superset_tool.utils.fileio import (
|
||||
remove_empty_directories,
|
||||
RetentionPolicy
|
||||
)
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from ..dependencies import get_config_manager
|
||||
|
||||
# [DEF:BackupPlugin:Class]
|
||||
@@ -131,25 +129,25 @@ class BackupPlugin(PluginBase):
|
||||
backup_path_str = params.get("backup_path") or config_manager.get_config().settings.backup_path
|
||||
backup_path = Path(backup_path_str)
|
||||
|
||||
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
|
||||
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
|
||||
from ..core.logger import logger as app_logger
|
||||
app_logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
|
||||
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
if not config_manager.has_environments():
|
||||
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
|
||||
|
||||
clients = setup_clients(logger, custom_envs=config_manager.get_environments())
|
||||
client = clients.get(env)
|
||||
|
||||
if not client:
|
||||
env_config = config_manager.get_environment(env)
|
||||
if not env_config:
|
||||
raise ValueError(f"Environment '{env}' not found in configuration.")
|
||||
|
||||
client = SupersetClient(env_config)
|
||||
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
|
||||
app_logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
|
||||
|
||||
if dashboard_count == 0:
|
||||
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
|
||||
app_logger.info("[BackupPlugin][Exit] No dashboards to back up.")
|
||||
return
|
||||
|
||||
for db in dashboard_meta:
|
||||
@@ -169,23 +167,22 @@ class BackupPlugin(PluginBase):
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
unpack=False
|
||||
)
|
||||
|
||||
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
|
||||
archive_exports(str(dashboard_dir), policy=RetentionPolicy())
|
||||
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
app_logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
|
||||
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
|
||||
consolidate_archive_folders(backup_path / env.upper())
|
||||
remove_empty_directories(str(backup_path / env.upper()))
|
||||
|
||||
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
|
||||
app_logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
|
||||
|
||||
except (RequestException, IOError, KeyError) as e:
|
||||
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
|
||||
app_logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
|
||||
raise e
|
||||
# [/DEF:execute:Function]
|
||||
# [/DEF:BackupPlugin:Class]
|
||||
|
||||
@@ -12,8 +12,7 @@ from ..core.superset_client import SupersetClient
|
||||
from ..core.logger import logger, belief_scope
|
||||
from ..core.database import SessionLocal
|
||||
from ..models.connection import ConnectionConfig
|
||||
from superset_tool.utils.dataset_mapper import DatasetMapper
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from ..core.utils.dataset_mapper import DatasetMapper
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:MapperPlugin:Class]
|
||||
@@ -173,9 +172,7 @@ class MapperPlugin(PluginBase):
|
||||
|
||||
logger.info(f"[MapperPlugin.execute][Action] Starting mapping for dataset {dataset_id} in {env_name}")
|
||||
|
||||
# Use internal SupersetLogger for DatasetMapper
|
||||
s_logger = SupersetLogger(name="dataset_mapper_plugin")
|
||||
mapper = DatasetMapper(s_logger)
|
||||
mapper = DatasetMapper()
|
||||
|
||||
try:
|
||||
mapper.run_mapping(
|
||||
|
||||
@@ -13,11 +13,9 @@ import re
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from ..core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
|
||||
from ..core.superset_client import SupersetClient
|
||||
from ..core.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
|
||||
from ..dependencies import get_config_manager
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from ..core.migration_engine import MigrationEngine
|
||||
from ..core.database import SessionLocal
|
||||
from ..models.mapping import DatabaseMapping, Environment
|
||||
@@ -150,7 +148,7 @@ class MigrationPlugin(PluginBase):
|
||||
from ..dependencies import get_task_manager
|
||||
tm = get_task_manager()
|
||||
|
||||
class TaskLoggerProxy(SupersetLogger):
|
||||
class TaskLoggerProxy:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the proxy logger.
|
||||
# @PRE: None.
|
||||
@@ -158,7 +156,7 @@ class MigrationPlugin(PluginBase):
|
||||
def __init__(self):
|
||||
with belief_scope("__init__"):
|
||||
# Initialize parent with dummy values since we override methods
|
||||
super().__init__(console=False)
|
||||
pass
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:debug:Function]
|
||||
@@ -246,9 +244,8 @@ class MigrationPlugin(PluginBase):
|
||||
|
||||
logger.info(f"[MigrationPlugin][State] Resolved environments: {from_env_name} -> {to_env_name}")
|
||||
|
||||
all_clients = setup_clients(logger, custom_envs=environments)
|
||||
from_c = all_clients.get(from_env_name)
|
||||
to_c = all_clients.get(to_env_name)
|
||||
from_c = SupersetClient(src_env)
|
||||
to_c = SupersetClient(tgt_env)
|
||||
|
||||
if not from_c or not to_c:
|
||||
raise ValueError(f"Clients not initialized for environments: {from_env_name}, {to_env_name}")
|
||||
|
||||
@@ -13,7 +13,6 @@ from typing import List, Dict
|
||||
from backend.src.core.logger import belief_scope
|
||||
from backend.src.core.superset_client import SupersetClient
|
||||
from backend.src.core.utils.matching import suggest_mappings
|
||||
from superset_tool.models import SupersetConfig
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:MappingService:Class]
|
||||
@@ -43,17 +42,7 @@ class MappingService:
|
||||
if not env:
|
||||
raise ValueError(f"Environment {env_id} not found")
|
||||
|
||||
superset_config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={
|
||||
"provider": "db",
|
||||
"username": env.username,
|
||||
"password": env.password,
|
||||
"refresh": "false"
|
||||
}
|
||||
)
|
||||
return SupersetClient(superset_config)
|
||||
return SupersetClient(env)
|
||||
# [/DEF:_get_client:Function]
|
||||
|
||||
# [DEF:get_suggestions:Function]
|
||||
|
||||
BIN
backend/tasks.db
BIN
backend/tasks.db
Binary file not shown.
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
from backend.src.core.logger import belief_scope, logger
|
||||
from src.core.logger import belief_scope, logger
|
||||
|
||||
|
||||
# [DEF:test_belief_scope_logs_entry_action_exit:Function]
|
||||
|
||||
@@ -1,62 +1,23 @@
|
||||
import pytest
|
||||
from superset_tool.models import SupersetConfig
|
||||
from superset_tool.utils.logger import belief_scope
|
||||
from src.core.config_models import Environment
|
||||
from src.core.logger import belief_scope
|
||||
|
||||
# [DEF:test_superset_config_url_normalization:Function]
|
||||
# @PURPOSE: Tests that SupersetConfig correctly normalizes the base URL.
|
||||
# @PRE: SupersetConfig class is available.
|
||||
# @POST: URL normalization is verified.
|
||||
def test_superset_config_url_normalization():
|
||||
with belief_scope("test_superset_config_url_normalization"):
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
# Test with /api/v1 already present
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088/api/v1",
|
||||
auth=auth
|
||||
# [DEF:test_environment_model:Function]
|
||||
# @PURPOSE: Tests that Environment model correctly stores values.
|
||||
# @PRE: Environment class is available.
|
||||
# @POST: Values are verified.
|
||||
def test_environment_model():
|
||||
with belief_scope("test_environment_model"):
|
||||
env = Environment(
|
||||
id="test-id",
|
||||
name="test-env",
|
||||
url="http://localhost:8088/api/v1",
|
||||
username="admin",
|
||||
password="password"
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test without /api/v1
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
|
||||
# Test with trailing slash
|
||||
config = SupersetConfig(
|
||||
env="dev",
|
||||
base_url="http://localhost:8088/",
|
||||
auth=auth
|
||||
)
|
||||
assert config.base_url == "http://localhost:8088/api/v1"
|
||||
assert env.id == "test-id"
|
||||
assert env.name == "test-env"
|
||||
assert env.url == "http://localhost:8088/api/v1"
|
||||
# [/DEF:test_superset_config_url_normalization:Function]
|
||||
|
||||
# [DEF:test_superset_config_invalid_url:Function]
|
||||
# @PURPOSE: Tests that SupersetConfig raises ValueError for invalid URLs.
|
||||
# @PRE: SupersetConfig class is available.
|
||||
# @POST: ValueError is raised for invalid URLs.
|
||||
def test_superset_config_invalid_url():
|
||||
with belief_scope("test_superset_config_invalid_url"):
|
||||
auth = {
|
||||
"provider": "db",
|
||||
"username": "admin",
|
||||
"password": "password",
|
||||
"refresh": "token"
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError, match="Must start with http:// or https://"):
|
||||
SupersetConfig(
|
||||
env="dev",
|
||||
base_url="localhost:8088",
|
||||
auth=auth
|
||||
)
|
||||
# [/DEF:test_superset_config_invalid_url:Function]
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>frontend</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
<script type="module" src="/src/main.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"extends": "./.svelte-kit/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"moduleResolution": "bundler",
|
||||
"target": "ESNext",
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
<!-- [DEF:App:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: main, entrypoint, layout, navigation
|
||||
@PURPOSE: The root component of the frontend application. Manages navigation and layout.
|
||||
@LAYER: UI
|
||||
@RELATION: DEPENDS_ON -> frontend/src/pages/Dashboard.svelte
|
||||
@RELATION: DEPENDS_ON -> frontend/src/pages/Settings.svelte
|
||||
@RELATION: DEPENDS_ON -> frontend/src/lib/stores.js
|
||||
|
||||
@INVARIANT: Navigation state must be persisted in the currentPage store.
|
||||
-->
|
||||
<script>
|
||||
// [SECTION: IMPORTS]
|
||||
import { get } from 'svelte/store';
|
||||
import Dashboard from './pages/Dashboard.svelte';
|
||||
import Settings from './pages/Settings.svelte';
|
||||
import { selectedPlugin, selectedTask, currentPage } from './lib/stores.js';
|
||||
import TaskRunner from './components/TaskRunner.svelte';
|
||||
import DynamicForm from './components/DynamicForm.svelte';
|
||||
import { api } from './lib/api.js';
|
||||
import Toast from './components/Toast.svelte';
|
||||
// [/SECTION]
|
||||
|
||||
// [DEF:handleFormSubmit:Function]
|
||||
/**
|
||||
* @purpose Handles form submission for task creation.
|
||||
* @pre event.detail contains form parameters.
|
||||
* @post Task is created and selectedTask is updated.
|
||||
* @param {CustomEvent} event - The submit event from DynamicForm.
|
||||
*/
|
||||
async function handleFormSubmit(event) {
|
||||
console.log("[App.handleFormSubmit][Action] Handling form submission for task creation.");
|
||||
const params = event.detail;
|
||||
try {
|
||||
const plugin = get(selectedPlugin);
|
||||
const task = await api.createTask(plugin.id, params);
|
||||
selectedTask.set(task);
|
||||
selectedPlugin.set(null);
|
||||
console.log(`[App.handleFormSubmit][Coherence:OK] Task created id=${task.id}`);
|
||||
} catch (error) {
|
||||
console.error(`[App.handleFormSubmit][Coherence:Failed] Task creation failed error=${error}`);
|
||||
}
|
||||
}
|
||||
// [/DEF:handleFormSubmit:Function]
|
||||
|
||||
// [DEF:navigate:Function]
|
||||
/**
|
||||
* @purpose Changes the current page and resets state.
|
||||
* @pre Target page name is provided.
|
||||
* @post currentPage store is updated and selection state is reset.
|
||||
* @param {string} page - Target page name.
|
||||
*/
|
||||
function navigate(page) {
|
||||
console.log(`[App.navigate][Action] Navigating to ${page}.`);
|
||||
// Reset selection first
|
||||
if (page !== get(currentPage)) {
|
||||
selectedPlugin.set(null);
|
||||
selectedTask.set(null);
|
||||
}
|
||||
// Then set page
|
||||
currentPage.set(page);
|
||||
}
|
||||
// [/DEF:navigate:Function]
|
||||
</script>
|
||||
|
||||
<!-- [SECTION: TEMPLATE] -->
|
||||
<Toast />
|
||||
|
||||
<main class="bg-gray-50 min-h-screen">
|
||||
<header class="bg-white shadow-md p-4 flex justify-between items-center">
|
||||
<button
|
||||
type="button"
|
||||
class="text-3xl font-bold text-gray-800 focus:outline-none"
|
||||
on:click={() => navigate('dashboard')}
|
||||
>
|
||||
Superset Tools
|
||||
</button>
|
||||
<nav class="space-x-4">
|
||||
<button
|
||||
type="button"
|
||||
on:click={() => navigate('dashboard')}
|
||||
class="text-gray-600 hover:text-blue-600 font-medium {$currentPage === 'dashboard' ? 'text-blue-600 border-b-2 border-blue-600' : ''}"
|
||||
>
|
||||
Dashboard
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
on:click={() => navigate('settings')}
|
||||
class="text-gray-600 hover:text-blue-600 font-medium {$currentPage === 'settings' ? 'text-blue-600 border-b-2 border-blue-600' : ''}"
|
||||
>
|
||||
Settings
|
||||
</button>
|
||||
</nav>
|
||||
</header>
|
||||
|
||||
<div class="p-4">
|
||||
{#if $currentPage === 'settings'}
|
||||
<Settings />
|
||||
{:else if $selectedTask}
|
||||
<TaskRunner />
|
||||
<button on:click={() => selectedTask.set(null)} class="mt-4 bg-blue-500 text-white p-2 rounded">
|
||||
Back to Task List
|
||||
</button>
|
||||
{:else if $selectedPlugin}
|
||||
<h2 class="text-2xl font-bold mb-4">{$selectedPlugin.name}</h2>
|
||||
<DynamicForm schema={$selectedPlugin.schema} on:submit={handleFormSubmit} />
|
||||
<button on:click={() => selectedPlugin.set(null)} class="mt-4 bg-gray-500 text-white p-2 rounded">
|
||||
Back to Dashboard
|
||||
</button>
|
||||
{:else}
|
||||
<Dashboard />
|
||||
{/if}
|
||||
</div>
|
||||
</main>
|
||||
<!-- [/SECTION] -->
|
||||
|
||||
<!-- [/DEF:App:Component] -->
|
||||
@@ -39,7 +39,7 @@
|
||||
<button class="text-gray-600 hover:text-blue-600 font-medium pb-1 {$page.url.pathname.startsWith('/tools') ? 'text-blue-600 border-b-2 border-blue-600' : ''}">
|
||||
Tools
|
||||
</button>
|
||||
<div class="absolute hidden group-hover:block bg-white shadow-lg rounded-md mt-1 py-2 w-48 z-10 border border-gray-100">
|
||||
<div class="absolute hidden group-hover:block bg-white shadow-lg rounded-md mt-1 py-2 w-48 z-10 border border-gray-100 before:absolute before:-top-2 before:left-0 before:right-0 before:h-2 before:content-[''] right-0">
|
||||
<a href="/tools/search" class="block px-4 py-2 text-sm text-gray-700 hover:bg-blue-50 hover:text-blue-600">Dataset Search</a>
|
||||
<a href="/tools/mapper" class="block px-4 py-2 text-sm text-gray-700 hover:bg-blue-50 hover:text-blue-600">Dataset Mapper</a>
|
||||
<a href="/tools/debug" class="block px-4 py-2 text-sm text-gray-700 hover:bg-blue-50 hover:text-blue-600">System Debug</a>
|
||||
@@ -49,7 +49,7 @@
|
||||
<button class="text-gray-600 hover:text-blue-600 font-medium pb-1 {$page.url.pathname.startsWith('/settings') ? 'text-blue-600 border-b-2 border-blue-600' : ''}">
|
||||
Settings
|
||||
</button>
|
||||
<div class="absolute hidden group-hover:block bg-white shadow-lg rounded-md mt-1 py-2 w-48 z-10 border border-gray-100">
|
||||
<div class="absolute hidden group-hover:block bg-white shadow-lg rounded-md mt-1 py-2 w-48 z-10 border border-gray-100 before:absolute before:-top-2 before:left-0 before:right-0 before:h-2 before:content-[''] right-0">
|
||||
<a href="/settings" class="block px-4 py-2 text-sm text-gray-700 hover:bg-blue-50 hover:text-blue-600">General Settings</a>
|
||||
<a href="/settings/connections" class="block px-4 py-2 text-sm text-gray-700 hover:bg-blue-50 hover:text-blue-600">Connections</a>
|
||||
</div>
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
<!-- [DEF:TaskLogViewer:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: task, log, viewer, modal
|
||||
@PURPOSE: Displays detailed logs for a specific task in a modal.
|
||||
@SEMANTICS: task, log, viewer, modal, inline
|
||||
@PURPOSE: Displays detailed logs for a specific task in a modal or inline.
|
||||
@LAYER: UI
|
||||
@RELATION: USES -> frontend/src/lib/api.js (inferred)
|
||||
@RELATION: USES -> frontend/src/services/taskService.js
|
||||
-->
|
||||
<script>
|
||||
import { createEventDispatcher, onMount, onDestroy } from 'svelte';
|
||||
import { getTaskLogs } from '../services/taskService.js';
|
||||
|
||||
export let show = false;
|
||||
export let inline = false;
|
||||
export let taskId = null;
|
||||
export let taskStatus = null; // To know if we should poll
|
||||
|
||||
@@ -22,19 +23,27 @@
|
||||
let autoScroll = true;
|
||||
let logContainer;
|
||||
|
||||
$: shouldShow = inline || show;
|
||||
|
||||
// [DEF:fetchLogs:Function]
|
||||
// @PURPOSE: Fetches logs for the current task.
|
||||
// @PRE: taskId must be set.
|
||||
// @POST: logs array is updated with data from taskService.
|
||||
/**
|
||||
* @purpose Fetches logs for the current task.
|
||||
* @pre taskId must be set.
|
||||
* @post logs array is updated with data from taskService.
|
||||
* @side_effect Updates logs, loading, and error state.
|
||||
*/
|
||||
async function fetchLogs() {
|
||||
if (!taskId) return;
|
||||
console.log(`[fetchLogs][Action] Fetching logs for task context={{'taskId': '${taskId}'}}`);
|
||||
try {
|
||||
logs = await getTaskLogs(taskId);
|
||||
if (autoScroll) {
|
||||
scrollToBottom();
|
||||
}
|
||||
console.log(`[fetchLogs][Coherence:OK] Logs fetched context={{'count': ${logs.length}}}`);
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
console.error(`[fetchLogs][Coherence:Failed] Error fetching logs context={{'error': '${e.message}'}}`);
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
@@ -42,9 +51,11 @@
|
||||
// [/DEF:fetchLogs:Function]
|
||||
|
||||
// [DEF:scrollToBottom:Function]
|
||||
// @PURPOSE: Scrolls the log container to the bottom.
|
||||
// @PRE: logContainer element must be bound.
|
||||
// @POST: logContainer scrollTop is set to scrollHeight.
|
||||
/**
|
||||
* @purpose Scrolls the log container to the bottom.
|
||||
* @pre logContainer element must be bound.
|
||||
* @post logContainer scrollTop is set to scrollHeight.
|
||||
*/
|
||||
function scrollToBottom() {
|
||||
if (logContainer) {
|
||||
setTimeout(() => {
|
||||
@@ -55,9 +66,11 @@
|
||||
// [/DEF:scrollToBottom:Function]
|
||||
|
||||
// [DEF:handleScroll:Function]
|
||||
// @PURPOSE: Updates auto-scroll preference based on scroll position.
|
||||
// @PRE: logContainer scroll event fired.
|
||||
// @POST: autoScroll boolean is updated.
|
||||
/**
|
||||
* @purpose Updates auto-scroll preference based on scroll position.
|
||||
* @pre logContainer scroll event fired.
|
||||
* @post autoScroll boolean is updated.
|
||||
*/
|
||||
function handleScroll() {
|
||||
if (!logContainer) return;
|
||||
// If user scrolls up, disable auto-scroll
|
||||
@@ -68,9 +81,11 @@
|
||||
// [/DEF:handleScroll:Function]
|
||||
|
||||
// [DEF:close:Function]
|
||||
// @PURPOSE: Closes the log viewer modal.
|
||||
// @PRE: Modal is open.
|
||||
// @POST: Modal is closed and close event is dispatched.
|
||||
/**
|
||||
* @purpose Closes the log viewer modal.
|
||||
* @pre Modal is open.
|
||||
* @post Modal is closed and close event is dispatched.
|
||||
*/
|
||||
function close() {
|
||||
dispatch('close');
|
||||
show = false;
|
||||
@@ -78,9 +93,11 @@
|
||||
// [/DEF:close:Function]
|
||||
|
||||
// [DEF:getLogLevelColor:Function]
|
||||
// @PURPOSE: Returns the CSS color class for a given log level.
|
||||
// @PRE: level string is provided.
|
||||
// @POST: Returns tailwind color class string.
|
||||
/**
|
||||
* @purpose Returns the CSS color class for a given log level.
|
||||
* @pre level string is provided.
|
||||
* @post Returns tailwind color class string.
|
||||
*/
|
||||
function getLogLevelColor(level) {
|
||||
switch (level) {
|
||||
case 'INFO': return 'text-blue-600';
|
||||
@@ -92,8 +109,10 @@
|
||||
}
|
||||
// [/DEF:getLogLevelColor:Function]
|
||||
|
||||
// React to changes in show/taskId
|
||||
$: if (show && taskId) {
|
||||
// React to changes in show/taskId/taskStatus
|
||||
$: if (shouldShow && taskId) {
|
||||
if (interval) clearInterval(interval);
|
||||
|
||||
logs = [];
|
||||
loading = true;
|
||||
error = "";
|
||||
@@ -108,16 +127,59 @@
|
||||
}
|
||||
|
||||
// [DEF:onDestroy:Function]
|
||||
// @PURPOSE: Cleans up the polling interval.
|
||||
// @PRE: Component is being destroyed.
|
||||
// @POST: Polling interval is cleared.
|
||||
/**
|
||||
* @purpose Cleans up the polling interval.
|
||||
* @pre Component is being destroyed.
|
||||
* @post Polling interval is cleared.
|
||||
*/
|
||||
onDestroy(() => {
|
||||
if (interval) clearInterval(interval);
|
||||
});
|
||||
// [/DEF:onDestroy:Function]
|
||||
</script>
|
||||
|
||||
{#if show}
|
||||
{#if shouldShow}
|
||||
{#if inline}
|
||||
<div class="flex flex-col h-full w-full p-4">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h3 class="text-lg font-medium text-gray-900">
|
||||
Task Logs <span class="text-sm text-gray-500 font-normal">({taskId})</span>
|
||||
</h3>
|
||||
<button on:click={fetchLogs} class="text-sm text-indigo-600 hover:text-indigo-900">Refresh</button>
|
||||
</div>
|
||||
|
||||
<div class="flex-1 border rounded-md bg-gray-50 p-4 overflow-y-auto font-mono text-sm"
|
||||
bind:this={logContainer}
|
||||
on:scroll={handleScroll}>
|
||||
{#if loading && logs.length === 0}
|
||||
<p class="text-gray-500 text-center">Loading logs...</p>
|
||||
{:else if error}
|
||||
<p class="text-red-500 text-center">{error}</p>
|
||||
{:else if logs.length === 0}
|
||||
<p class="text-gray-500 text-center">No logs available.</p>
|
||||
{:else}
|
||||
{#each logs as log}
|
||||
<div class="mb-1 hover:bg-gray-100 p-1 rounded">
|
||||
<span class="text-gray-400 text-xs mr-2">
|
||||
{new Date(log.timestamp).toLocaleTimeString()}
|
||||
</span>
|
||||
<span class="font-bold text-xs mr-2 w-16 inline-block {getLogLevelColor(log.level)}">
|
||||
[{log.level}]
|
||||
</span>
|
||||
<span class="text-gray-800 break-words">
|
||||
{log.message}
|
||||
</span>
|
||||
{#if log.context}
|
||||
<div class="ml-24 text-xs text-gray-500 mt-1 bg-gray-100 p-1 rounded overflow-x-auto">
|
||||
<pre>{JSON.stringify(log.context, null, 2)}</pre>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="fixed inset-0 z-50 overflow-y-auto" aria-labelledby="modal-title" role="dialog" aria-modal="true">
|
||||
<div class="flex items-end justify-center min-h-screen pt-4 px-4 pb-20 text-center sm:block sm:p-0">
|
||||
<!-- Background overlay -->
|
||||
@@ -180,4 +242,5 @@
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
<!-- [/DEF:TaskLogViewer:Component] -->
|
||||
@@ -1,18 +0,0 @@
|
||||
// [DEF:main:Module]
|
||||
// @SEMANTICS: entrypoint, svelte, init
|
||||
// @PURPOSE: Entry point for the Svelte application.
|
||||
// @LAYER: UI-Entry
|
||||
|
||||
import './app.css'
|
||||
import App from './App.svelte'
|
||||
|
||||
// [DEF:app_instance:Data]
|
||||
// @PURPOSE: Initialized Svelte app instance.
|
||||
const app = new App({
|
||||
target: document.getElementById('app'),
|
||||
props: {}
|
||||
})
|
||||
// [/DEF:app_instance:Data]
|
||||
|
||||
export default app
|
||||
// [/DEF:main:Module]
|
||||
@@ -1,3 +1,11 @@
|
||||
<!-- [DEF:TaskManagementPage:Component] -->
|
||||
<!--
|
||||
@SEMANTICS: tasks, management, history, logs
|
||||
@PURPOSE: Page for managing and monitoring tasks.
|
||||
@LAYER: Page
|
||||
@RELATION: USES -> TaskList
|
||||
@RELATION: USES -> TaskLogViewer
|
||||
-->
|
||||
<script>
|
||||
import { onMount, onDestroy } from 'svelte';
|
||||
import { getTasks, createTask, getEnvironmentsList } from '../../lib/api';
|
||||
@@ -14,11 +22,13 @@
|
||||
let selectedEnvId = '';
|
||||
|
||||
// [DEF:loadInitialData:Function]
|
||||
/* @PURPOSE: Loads tasks and environments on page initialization.
|
||||
@PRE: API must be reachable.
|
||||
@POST: tasks and environments variables are populated.
|
||||
/**
|
||||
* @purpose Loads tasks and environments on page initialization.
|
||||
* @pre API must be reachable.
|
||||
* @post tasks and environments variables are populated.
|
||||
*/
|
||||
async function loadInitialData() {
|
||||
console.log("[loadInitialData][Action] Loading initial tasks and environments");
|
||||
try {
|
||||
loading = true;
|
||||
const [tasksData, envsData] = await Promise.all([
|
||||
@@ -27,8 +37,9 @@
|
||||
]);
|
||||
tasks = tasksData;
|
||||
environments = envsData;
|
||||
console.log(`[loadInitialData][Coherence:OK] Data loaded context={{'tasks': ${tasks.length}, 'envs': ${environments.length}}}`);
|
||||
} catch (error) {
|
||||
console.error('Failed to load tasks data:', error);
|
||||
console.error(`[loadInitialData][Coherence:Failed] Failed to load tasks data context={{'error': '${error.message}'}}`);
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
@@ -36,9 +47,10 @@
|
||||
// [/DEF:loadInitialData:Function]
|
||||
|
||||
// [DEF:refreshTasks:Function]
|
||||
/* @PURPOSE: Periodically refreshes the task list.
|
||||
@PRE: API must be reachable.
|
||||
@POST: tasks variable is updated if data is valid.
|
||||
/**
|
||||
* @purpose Periodically refreshes the task list.
|
||||
* @pre API must be reachable.
|
||||
* @post tasks variable is updated if data is valid.
|
||||
*/
|
||||
async function refreshTasks() {
|
||||
try {
|
||||
@@ -48,25 +60,28 @@
|
||||
tasks = data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to refresh tasks:', error);
|
||||
console.error(`[refreshTasks][Coherence:Failed] Failed to refresh tasks context={{'error': '${error.message}'}}`);
|
||||
}
|
||||
}
|
||||
// [/DEF:refreshTasks:Function]
|
||||
|
||||
// [DEF:handleSelectTask:Function]
|
||||
/* @PURPOSE: Updates the selected task ID when a task is clicked.
|
||||
@PRE: event.detail.id must be provided.
|
||||
@POST: selectedTaskId is updated.
|
||||
/**
|
||||
* @purpose Updates the selected task ID when a task is clicked.
|
||||
* @pre event.detail.id must be provided.
|
||||
* @post selectedTaskId is updated.
|
||||
*/
|
||||
function handleSelectTask(event) {
|
||||
selectedTaskId = event.detail.id;
|
||||
console.log(`[handleSelectTask][Action] Task selected context={{'taskId': '${selectedTaskId}'}}`);
|
||||
}
|
||||
// [/DEF:handleSelectTask:Function]
|
||||
|
||||
// [DEF:handleRunBackup:Function]
|
||||
/* @PURPOSE: Triggers a manual backup task for the selected environment.
|
||||
@PRE: selectedEnvId must not be empty.
|
||||
@POST: Backup task is created and task list is refreshed.
|
||||
/**
|
||||
* @purpose Triggers a manual backup task for the selected environment.
|
||||
* @pre selectedEnvId must not be empty.
|
||||
* @post Backup task is created and task list is refreshed.
|
||||
*/
|
||||
async function handleRunBackup() {
|
||||
if (!selectedEnvId) {
|
||||
@@ -74,14 +89,16 @@
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[handleRunBackup][Action] Starting backup for env context={{'envId': '${selectedEnvId}'}}`);
|
||||
try {
|
||||
const task = await createTask('superset-backup', { environment_id: selectedEnvId });
|
||||
addToast('Backup task started', 'success');
|
||||
showBackupModal = false;
|
||||
selectedTaskId = task.id;
|
||||
await refreshTasks();
|
||||
console.log(`[handleRunBackup][Coherence:OK] Backup task created context={{'taskId': '${task.id}'}}`);
|
||||
} catch (error) {
|
||||
console.error('Failed to start backup:', error);
|
||||
console.error(`[handleRunBackup][Coherence:Failed] Failed to start backup context={{'error': '${error.message}'}}`);
|
||||
}
|
||||
}
|
||||
// [/DEF:handleRunBackup:Function]
|
||||
@@ -117,7 +134,11 @@
|
||||
<h2 class="text-lg font-semibold mb-3 text-gray-700">Task Details & Logs</h2>
|
||||
{#if selectedTaskId}
|
||||
<div class="bg-white rounded-lg shadow-lg h-[600px] flex flex-col">
|
||||
<TaskLogViewer taskId={selectedTaskId} />
|
||||
<TaskLogViewer
|
||||
taskId={selectedTaskId}
|
||||
taskStatus={tasks.find(t => t.id === selectedTaskId)?.status}
|
||||
inline={true}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="bg-gray-50 border-2 border-dashed border-gray-300 rounded-lg h-[600px] flex items-center justify-center text-gray-500">
|
||||
@@ -162,3 +183,5 @@
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<!-- [/DEF:TaskManagementPage:Component] -->
|
||||
@@ -1,413 +0,0 @@
|
||||
# [DEF:migration_script:Module]
|
||||
#
|
||||
# @SEMANTICS: migration, cli, superset, ui, logging, error-recovery, batch-delete
|
||||
# @PURPOSE: Предоставляет интерактивный CLI для миграции дашбордов Superset между окружениями с возможностью восстановления после ошибок.
|
||||
# @LAYER: App
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
# @PUBLIC_API: Migration
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import zipfile
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple, Dict
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
|
||||
from superset_tool.utils.whiptail_fallback import menu, checklist, yesno, msgbox, inputbox, gauge
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:Migration:Class]
|
||||
# @PURPOSE: Инкапсулирует логику интерактивной миграции дашбордов с возможностью «удалить‑и‑перезаписать» при ошибке импорта.
|
||||
# @RELATION: CREATES_INSTANCE_OF -> SupersetLogger
|
||||
# @RELATION: USES -> SupersetClient
|
||||
class Migration:
|
||||
"""
|
||||
Интерактивный процесс миграции дашбордов.
|
||||
"""
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Инициализирует сервис миграции, настраивает логгер и начальные состояния.
|
||||
# @PRE: None.
|
||||
# @POST: `self.logger` готов к использованию; `enable_delete_on_failure` = `False`.
|
||||
def __init__(self) -> None:
|
||||
default_log_dir = Path.cwd() / "logs"
|
||||
self.logger = SupersetLogger(
|
||||
name="migration_script",
|
||||
log_dir=default_log_dir,
|
||||
level=logging.INFO,
|
||||
console=True,
|
||||
)
|
||||
self.enable_delete_on_failure = False
|
||||
self.from_c: Optional[SupersetClient] = None
|
||||
self.to_c: Optional[SupersetClient] = None
|
||||
self.dashboards_to_migrate: List[dict] = []
|
||||
self.db_config_replacement: Optional[dict] = None
|
||||
self._failed_imports: List[dict] = []
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:run:Function]
|
||||
# @PURPOSE: Точка входа – последовательный запуск всех шагов миграции.
|
||||
# @PRE: Логгер готов.
|
||||
# @POST: Скрипт завершён, пользователю выведено сообщение.
|
||||
# @RELATION: CALLS -> self.ask_delete_on_failure
|
||||
# @RELATION: CALLS -> self.select_environments
|
||||
# @RELATION: CALLS -> self.select_dashboards
|
||||
# @RELATION: CALLS -> self.confirm_db_config_replacement
|
||||
# @RELATION: CALLS -> self.execute_migration
|
||||
def run(self) -> None:
|
||||
with self.logger.belief_scope("Migration.run"):
|
||||
self.logger.info("[run][Entry] Запуск скрипта миграции.")
|
||||
self.ask_delete_on_failure()
|
||||
self.select_environments()
|
||||
self.select_dashboards()
|
||||
self.confirm_db_config_replacement()
|
||||
self.execute_migration()
|
||||
self.logger.info("[run][Exit] Скрипт миграции завершён.")
|
||||
# [/DEF:run:Function]
|
||||
|
||||
# [DEF:ask_delete_on_failure:Function]
|
||||
# @PURPOSE: Запрашивает у пользователя, следует ли удалять дашборд при ошибке импорта.
|
||||
# @PRE: None.
|
||||
# @POST: `self.enable_delete_on_failure` установлен.
|
||||
# @RELATION: CALLS -> yesno
|
||||
def ask_delete_on_failure(self) -> None:
|
||||
with self.logger.belief_scope("Migration.ask_delete_on_failure"):
|
||||
self.enable_delete_on_failure = yesno(
|
||||
"Поведение при ошибке импорта",
|
||||
"Если импорт завершится ошибкой, удалить существующий дашборд и попытаться импортировать заново?",
|
||||
)
|
||||
self.logger.info(
|
||||
"[ask_delete_on_failure][State] Delete-on-failure = %s",
|
||||
self.enable_delete_on_failure,
|
||||
)
|
||||
# [/DEF:ask_delete_on_failure:Function]
|
||||
|
||||
# [DEF:select_environments:Function]
|
||||
# @PURPOSE: Позволяет пользователю выбрать исходное и целевое окружения Superset.
|
||||
# @PRE: `setup_clients` успешно инициализирует все клиенты.
|
||||
# @POST: `self.from_c` и `self.to_c` установлены.
|
||||
# @RELATION: CALLS -> setup_clients
|
||||
# @RELATION: CALLS -> menu
|
||||
def select_environments(self) -> None:
|
||||
with self.logger.belief_scope("Migration.select_environments"):
|
||||
self.logger.info("[select_environments][Entry] Шаг 1/5: Выбор окружений.")
|
||||
try:
|
||||
all_clients = setup_clients(self.logger)
|
||||
available_envs = list(all_clients.keys())
|
||||
except Exception as e:
|
||||
self.logger.error("[select_environments][Failure] %s", e, exc_info=True)
|
||||
msgbox("Ошибка", "Не удалось инициализировать клиенты.")
|
||||
return
|
||||
|
||||
rc, from_env_name = menu(
|
||||
title="Выбор окружения",
|
||||
prompt="Исходное окружение:",
|
||||
choices=available_envs,
|
||||
)
|
||||
if rc != 0 or from_env_name is None:
|
||||
self.logger.info("[select_environments][State] Source environment selection cancelled.")
|
||||
return
|
||||
self.from_c = all_clients[from_env_name]
|
||||
self.logger.info("[select_environments][State] from = %s", from_env_name)
|
||||
|
||||
available_envs.remove(from_env_name)
|
||||
rc, to_env_name = menu(
|
||||
title="Выбор окружения",
|
||||
prompt="Целевое окружение:",
|
||||
choices=available_envs,
|
||||
)
|
||||
if rc != 0 or to_env_name is None:
|
||||
self.logger.info("[select_environments][State] Target environment selection cancelled.")
|
||||
return
|
||||
self.to_c = all_clients[to_env_name]
|
||||
self.logger.info("[select_environments][State] to = %s", to_env_name)
|
||||
self.logger.info("[select_environments][Exit] Шаг 1 завершён.")
|
||||
# [/DEF:select_environments:Function]
|
||||
|
||||
# [DEF:select_dashboards:Function]
|
||||
# @PURPOSE: Позволяет пользователю выбрать набор дашбордов для миграции.
|
||||
# @PRE: `self.from_c` инициализирован.
|
||||
# @POST: `self.dashboards_to_migrate` заполнен.
|
||||
# @RELATION: CALLS -> self.from_c.get_dashboards
|
||||
# @RELATION: CALLS -> checklist
|
||||
def select_dashboards(self) -> None:
|
||||
with self.logger.belief_scope("Migration.select_dashboards"):
|
||||
self.logger.info("[select_dashboards][Entry] Шаг 2/5: Выбор дашбордов.")
|
||||
if self.from_c is None:
|
||||
self.logger.error("[select_dashboards][Failure] Source client not initialized.")
|
||||
msgbox("Ошибка", "Исходное окружение не выбрано.")
|
||||
return
|
||||
try:
|
||||
_, all_dashboards = self.from_c.get_dashboards()
|
||||
if not all_dashboards:
|
||||
self.logger.warning("[select_dashboards][State] No dashboards.")
|
||||
msgbox("Информация", "В исходном окружении нет дашбордов.")
|
||||
return
|
||||
|
||||
rc, regex = inputbox("Поиск", "Введите регулярное выражение для поиска дашбордов:")
|
||||
if rc != 0:
|
||||
return
|
||||
# Ensure regex is a string and perform case‑insensitive search
|
||||
regex_str = str(regex)
|
||||
filtered_dashboards = [
|
||||
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
|
||||
]
|
||||
|
||||
options = [("ALL", "Все дашборды")] + [
|
||||
(str(d["id"]), d["dashboard_title"]) for d in filtered_dashboards
|
||||
]
|
||||
|
||||
rc, selected = checklist(
|
||||
title="Выбор дашбордов",
|
||||
prompt="Отметьте нужные дашборды (введите номера):",
|
||||
options=options,
|
||||
)
|
||||
if rc != 0:
|
||||
return
|
||||
|
||||
if "ALL" in selected:
|
||||
self.dashboards_to_migrate = filtered_dashboards
|
||||
else:
|
||||
self.dashboards_to_migrate = [
|
||||
d for d in filtered_dashboards if str(d["id"]) in selected
|
||||
]
|
||||
|
||||
self.logger.info(
|
||||
"[select_dashboards][State] Выбрано %d дашбордов.",
|
||||
len(self.dashboards_to_migrate),
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error("[select_dashboards][Failure] %s", e, exc_info=True)
|
||||
msgbox("Ошибка", "Не удалось получить список дашбордов.")
|
||||
self.logger.info("[select_dashboards][Exit] Шаг 2 завершён.")
|
||||
# [/DEF:select_dashboards:Function]
|
||||
|
||||
# [DEF:confirm_db_config_replacement:Function]
|
||||
# @PURPOSE: Запрашивает у пользователя, требуется ли заменить имена БД в YAML-файлах.
|
||||
# @PRE: None.
|
||||
# @POST: `self.db_config_replacement` либо `None`, либо заполнен.
|
||||
# @RELATION: CALLS -> yesno
|
||||
# @RELATION: CALLS -> self._select_databases
|
||||
def confirm_db_config_replacement(self) -> None:
|
||||
with self.logger.belief_scope("Migration.confirm_db_config_replacement"):
|
||||
if yesno("Замена БД", "Заменить конфигурацию БД в YAML‑файлах?"):
|
||||
old_db, new_db = self._select_databases()
|
||||
if not old_db or not new_db:
|
||||
self.logger.info("[confirm_db_config_replacement][State] Selection cancelled.")
|
||||
return
|
||||
print(f"old_db: {old_db}")
|
||||
old_result = old_db.get("result", {})
|
||||
new_result = new_db.get("result", {})
|
||||
|
||||
self.db_config_replacement = {
|
||||
"old": {
|
||||
"database_name": old_result.get("database_name"),
|
||||
"uuid": old_result.get("uuid"),
|
||||
"database_uuid": old_result.get("uuid"),
|
||||
"id": str(old_db.get("id"))
|
||||
},
|
||||
"new": {
|
||||
"database_name": new_result.get("database_name"),
|
||||
"uuid": new_result.get("uuid"),
|
||||
"database_uuid": new_result.get("uuid"),
|
||||
"id": str(new_db.get("id"))
|
||||
}
|
||||
}
|
||||
|
||||
self.logger.info("[confirm_db_config_replacement][State] Replacement set: %s", self.db_config_replacement)
|
||||
else:
|
||||
self.logger.info("[confirm_db_config_replacement][State] Skipped.")
|
||||
# [/DEF:confirm_db_config_replacement:Function]
|
||||
|
||||
# [DEF:_select_databases:Function]
|
||||
# @PURPOSE: Позволяет пользователю выбрать исходную и целевую БД через API.
|
||||
# @PRE: Clients are initialized.
|
||||
# @POST: Возвращает кортеж (старая БД, новая БД) или (None, None) при отмене.
|
||||
# @RELATION: CALLS -> self.from_c.get_databases
|
||||
# @RELATION: CALLS -> self.to_c.get_databases
|
||||
# @RELATION: CALLS -> self.from_c.get_database
|
||||
# @RELATION: CALLS -> self.to_c.get_database
|
||||
# @RELATION: CALLS -> menu
|
||||
def _select_databases(self) -> Tuple[Optional[Dict], Optional[Dict]]:
|
||||
with self.logger.belief_scope("Migration._select_databases"):
|
||||
self.logger.info("[_select_databases][Entry] Selecting databases from both environments.")
|
||||
|
||||
if self.from_c is None or self.to_c is None:
|
||||
self.logger.error("[_select_databases][Failure] Source or target client not initialized.")
|
||||
msgbox("Ошибка", "Исходное или целевое окружение не выбрано.")
|
||||
return None, None
|
||||
|
||||
# Получаем список БД из обоих окружений
|
||||
try:
|
||||
_, from_dbs = self.from_c.get_databases()
|
||||
_, to_dbs = self.to_c.get_databases()
|
||||
except Exception as e:
|
||||
self.logger.error("[_select_databases][Failure] Failed to fetch databases: %s", e)
|
||||
msgbox("Ошибка", "Не удалось получить список баз данных.")
|
||||
return None, None
|
||||
|
||||
# Формируем список для выбора
|
||||
# По Swagger документации, в ответе API поле называется "database_name"
|
||||
from_choices = []
|
||||
for db in from_dbs:
|
||||
db_name = db.get("database_name", "Без имени")
|
||||
from_choices.append((str(db["id"]), f"{db_name} (ID: {db['id']})"))
|
||||
|
||||
to_choices = []
|
||||
for db in to_dbs:
|
||||
db_name = db.get("database_name", "Без имени")
|
||||
to_choices.append((str(db["id"]), f"{db_name} (ID: {db['id']})"))
|
||||
|
||||
# Показываем список БД для исходного окружения
|
||||
rc, from_sel = menu(
|
||||
title="Выбор исходной БД",
|
||||
prompt="Выберите исходную БД:",
|
||||
choices=[f"{name}" for id, name in from_choices]
|
||||
)
|
||||
if rc != 0:
|
||||
return None, None
|
||||
|
||||
# Определяем выбранную БД
|
||||
from_db_id = from_choices[[choice[1] for choice in from_choices].index(from_sel)][0]
|
||||
# Получаем полную информацию о выбранной БД из исходного окружения
|
||||
try:
|
||||
from_db = self.from_c.get_database(int(from_db_id))
|
||||
except Exception as e:
|
||||
self.logger.error("[_select_databases][Failure] Failed to fetch database details: %s", e)
|
||||
msgbox("Ошибка", "Не удалось получить информацию о выбранной базе данных.")
|
||||
return None, None
|
||||
|
||||
# Показываем список БД для целевого окружения
|
||||
rc, to_sel = menu(
|
||||
title="Выбор целевой БД",
|
||||
prompt="Выберите целевую БД:",
|
||||
choices=[f"{name}" for id, name in to_choices]
|
||||
)
|
||||
if rc != 0:
|
||||
return None, None
|
||||
|
||||
# Определяем выбранную БД
|
||||
to_db_id = to_choices[[choice[1] for choice in to_choices].index(to_sel)][0]
|
||||
# Получаем полную информацию о выбранной БД из целевого окружения
|
||||
try:
|
||||
to_db = self.to_c.get_database(int(to_db_id))
|
||||
except Exception as e:
|
||||
self.logger.error("[_select_databases][Failure] Failed to fetch database details: %s", e)
|
||||
msgbox("Ошибка", "Не удалось получить информацию о выбранной базе данных.")
|
||||
return None, None
|
||||
|
||||
self.logger.info("[_select_databases][Exit] Selected databases: %s -> %s", from_db.get("database_name", "Без имени"), to_db.get("database_name", "Без имени"))
|
||||
return from_db, to_db
|
||||
# [/DEF:_select_databases:Function]
|
||||
|
||||
# [DEF:_batch_delete_by_ids:Function]
|
||||
# @PURPOSE: Удаляет набор дашбордов по их ID единым запросом.
|
||||
# @PRE: `ids` – непустой список целых чисел.
|
||||
# @POST: Все указанные дашборды удалены (если они существовали).
|
||||
# @RELATION: CALLS -> self.to_c.network.request
|
||||
# @PARAM: ids (List[int]) - Список ID дашбордов для удаления.
|
||||
def _batch_delete_by_ids(self, ids: List[int]) -> None:
|
||||
with self.logger.belief_scope("Migration._batch_delete_by_ids", f"ids={ids}"):
|
||||
if not ids:
|
||||
self.logger.debug("[_batch_delete_by_ids][Skip] Empty ID list – nothing to delete.")
|
||||
return
|
||||
|
||||
if self.to_c is None:
|
||||
self.logger.error("[_batch_delete_by_ids][Failure] Target client not initialized.")
|
||||
msgbox("Ошибка", "Целевое окружение не выбрано.")
|
||||
return
|
||||
|
||||
self.logger.info("[_batch_delete_by_ids][Entry] Deleting dashboards IDs: %s", ids)
|
||||
q_param = json.dumps(ids)
|
||||
response = self.to_c.network.request(method="DELETE", endpoint="/dashboard/", params={"q": q_param})
|
||||
|
||||
if isinstance(response, dict) and response.get("result", True) is False:
|
||||
self.logger.warning("[_batch_delete_by_ids][Warning] Unexpected delete response: %s", response)
|
||||
else:
|
||||
self.logger.info("[_batch_delete_by_ids][Success] Delete request completed.")
|
||||
# [/DEF:_batch_delete_by_ids:Function]
|
||||
|
||||
# [DEF:execute_migration:Function]
|
||||
# @PURPOSE: Выполняет экспорт-импорт дашбордов, обрабатывает ошибки и, при необходимости, выполняет процедуру восстановления.
|
||||
# @PRE: `self.dashboards_to_migrate` не пуст; `self.from_c` и `self.to_c` инициализированы.
|
||||
# @POST: Успешные дашборды импортированы; неудачные - восстановлены или залогированы.
|
||||
# @RELATION: CALLS -> self.from_c.export_dashboard
|
||||
# @RELATION: CALLS -> create_temp_file
|
||||
# @RELATION: CALLS -> update_yamls
|
||||
# @RELATION: CALLS -> create_dashboard_export
|
||||
# @RELATION: CALLS -> self.to_c.import_dashboard
|
||||
# @RELATION: CALLS -> self._batch_delete_by_ids
|
||||
def execute_migration(self) -> None:
|
||||
with self.logger.belief_scope("Migration.execute_migration"):
|
||||
if not self.dashboards_to_migrate:
|
||||
self.logger.warning("[execute_migration][Skip] No dashboards to migrate.")
|
||||
msgbox("Информация", "Нет дашбордов для миграции.")
|
||||
return
|
||||
|
||||
if self.from_c is None or self.to_c is None:
|
||||
self.logger.error("[execute_migration][Failure] Source or target client not initialized.")
|
||||
msgbox("Ошибка", "Исходное или целевое окружение не выбрано.")
|
||||
return
|
||||
|
||||
total = len(self.dashboards_to_migrate)
|
||||
self.logger.info("[execute_migration][Entry] Starting migration of %d dashboards.", total)
|
||||
self.to_c.delete_before_reimport = self.enable_delete_on_failure
|
||||
|
||||
with gauge("Миграция...", width=60, height=10) as g:
|
||||
for i, dash in enumerate(self.dashboards_to_migrate):
|
||||
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
|
||||
g.set_text(f"Миграция: {title} ({i + 1}/{total})")
|
||||
g.set_percent(int((i / total) * 100))
|
||||
exported_content = None # Initialize exported_content
|
||||
try:
|
||||
exported_content, _ = self.from_c.export_dashboard(dash_id)
|
||||
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=self.logger) as tmp_zip_path, \
|
||||
create_temp_file(suffix=".dir", logger=self.logger) as tmp_unpack_dir:
|
||||
|
||||
if not self.db_config_replacement:
|
||||
self.to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
|
||||
else:
|
||||
with zipfile.ZipFile(tmp_zip_path, "r") as zip_ref:
|
||||
zip_ref.extractall(tmp_unpack_dir)
|
||||
|
||||
if self.db_config_replacement:
|
||||
update_yamls(db_configs=[self.db_config_replacement], path=str(tmp_unpack_dir))
|
||||
|
||||
with create_temp_file(suffix=".zip", dry_run=True, logger=self.logger) as tmp_new_zip:
|
||||
create_dashboard_export(zip_path=tmp_new_zip, source_paths=[str(p) for p in Path(tmp_unpack_dir).glob("**/*")])
|
||||
self.to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
|
||||
|
||||
self.logger.info("[execute_migration][Success] Dashboard %s imported.", title)
|
||||
except Exception as exc:
|
||||
self.logger.error("[execute_migration][Failure] %s", exc, exc_info=True)
|
||||
self._failed_imports.append({"slug": dash_slug, "dash_id": dash_id, "zip_content": exported_content})
|
||||
msgbox("Ошибка", f"Не удалось мигрировать дашборд {title}.\n\n{exc}")
|
||||
g.set_percent(100)
|
||||
|
||||
if self.enable_delete_on_failure and self._failed_imports:
|
||||
self.logger.info("[execute_migration][Recovery] %d dashboards failed. Starting recovery.", len(self._failed_imports))
|
||||
_, target_dashboards = self.to_c.get_dashboards()
|
||||
slug_to_id = {d["slug"]: d["id"] for d in target_dashboards if "slug" in d and "id" in d}
|
||||
ids_to_delete = [slug_to_id[f["slug"]] for f in self._failed_imports if f["slug"] in slug_to_id]
|
||||
self._batch_delete_by_ids(ids_to_delete)
|
||||
|
||||
for fail in self._failed_imports:
|
||||
with create_temp_file(content=fail["zip_content"], suffix=".zip", logger=self.logger) as retry_zip:
|
||||
self.to_c.import_dashboard(file_name=retry_zip, dash_id=fail["dash_id"], dash_slug=fail["slug"])
|
||||
self.logger.info("[execute_migration][Recovered] Dashboard slug '%s' re-imported.", fail["slug"])
|
||||
|
||||
self.logger.info("[execute_migration][Exit] Migration finished.")
|
||||
msgbox("Ошибка" if self._failed_imports else "Информация", "Миграция завершена!")
|
||||
# [/DEF:execute_migration:Function]
|
||||
|
||||
# [/DEF:Migration:Class]
|
||||
|
||||
if __name__ == "__main__":
|
||||
Migration().run()
|
||||
|
||||
# [/DEF:migration_script:Module]
|
||||
103
semantics/reports/semantic_report_20260122_175153.md
Normal file
103
semantics/reports/semantic_report_20260122_175153.md
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,34 @@
|
||||
# Specification Quality Checklist: Git Integration Plugin for Dashboard Development
|
||||
|
||||
**Purpose**: Validate specification completeness and quality before proceeding to planning
|
||||
**Created**: 2026-01-18
|
||||
**Feature**: [spec.md](../spec.md)
|
||||
|
||||
## Content Quality
|
||||
|
||||
- [x] No implementation details (languages, frameworks, APIs)
|
||||
- [x] Focused on user value and business needs
|
||||
- [x] Written for non-technical stakeholders
|
||||
- [x] All mandatory sections completed
|
||||
|
||||
## Requirement Completeness
|
||||
|
||||
- [x] No [NEEDS CLARIFICATION] markers remain
|
||||
- [x] Requirements are testable and unambiguous
|
||||
- [x] Success criteria are measurable
|
||||
- [x] Success criteria are technology-agnostic (no implementation details)
|
||||
- [x] All acceptance scenarios are defined
|
||||
- [x] Edge cases are identified
|
||||
- [x] Scope is clearly bounded
|
||||
- [x] Dependencies and assumptions identified
|
||||
|
||||
## Feature Readiness
|
||||
|
||||
- [x] All functional requirements have clear acceptance criteria
|
||||
- [x] User scenarios cover primary flows
|
||||
- [x] Feature meets measurable outcomes defined in Success Criteria
|
||||
- [x] No implementation details leak into specification
|
||||
|
||||
## Notes
|
||||
|
||||
✅ **VALIDATION COMPLETE**: All checklist items pass. The specification is ready for `/speckit.clarify` or `/speckit.plan`
|
||||
172
specs/011-git-integration-dashboard/spec.md
Normal file
172
specs/011-git-integration-dashboard/spec.md
Normal file
@@ -0,0 +1,172 @@
|
||||
# Feature Specification: Git Integration Plugin for Dashboard Development
|
||||
|
||||
**Feature Branch**: `011-git-integration-dashboard`
|
||||
**Created**: 2026-01-18
|
||||
**Status**: In Progress
|
||||
**Input**: User description: "Нужен плагин интеграции git в разработку дашбордов. Требования - возможность настройки целевого git сервера (базово - интеграция с gitlab), хранение и синхронизация веток разработки дашбордов, возможность публикацию в другое целевое окружение после коммита"
|
||||
|
||||
## User Scenarios & Testing *(mandatory)*
|
||||
|
||||
<!--
|
||||
IMPORTANT: User stories should be PRIORITIZED as user journeys ordered by importance.
|
||||
Each user story/journey must be INDEPENDENTLY TESTABLE - meaning if you implement just ONE of them,
|
||||
you should still have a viable MVP (Minimum Viable Product) that delivers value.
|
||||
|
||||
Assign priorities (P1, P2, P3, etc.) to each story, where P1 is the most critical.
|
||||
Think of each story as a standalone slice of functionality that can be:
|
||||
- Developed independently
|
||||
- Tested independently
|
||||
- Deployed independently
|
||||
- Demonstrated to users independently
|
||||
-->
|
||||
|
||||
### User Story 1 - Configure Git Server Connection (Priority: P1)
|
||||
|
||||
A dashboard developer needs to connect the system to their GitLab server to enable version control for dashboard development. They want to configure the Git server URL, authentication credentials, and repository details through a simple form interface.
|
||||
|
||||
**Why this priority**: This is the foundational requirement - without Git server configuration, no other Git functionality can work. It's the entry point for all Git operations.
|
||||
|
||||
**Independent Test**: Can be fully tested by configuring a GitLab server connection and verifying the connection test succeeds, delivering the ability to establish Git server connectivity.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a user is on the Git integration settings page, **When** they enter valid GitLab server URL and API token, **Then** the system successfully validates the connection
|
||||
2. **Given** a user enters invalid GitLab credentials, **When** they test the connection, **Then** the system displays a clear error message indicating authentication failure
|
||||
3. **Given** a user has configured a Git server, **When** they save the settings, **Then** the configuration is persisted and can be retrieved later
|
||||
|
||||
---
|
||||
|
||||
### User Story 2 - Dashboard Branch Management (Priority: P1)
|
||||
|
||||
A dashboard developer needs to create, switch between, and manage different development branches for their dashboards. They want to see available branches, create new feature branches, and switch between branches to work on different dashboard versions.
|
||||
|
||||
**Why this priority**: This is core to the Git workflow - developers need to manage branches to work on different features or versions of dashboards simultaneously.
|
||||
|
||||
**Independent Test**: Can be fully tested by creating a new branch, switching to it, and verifying the branch operations work correctly, delivering basic Git branch workflow capabilities.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a Git server is configured, **When** a user views the branch list, **Then** they see all available branches from the remote repository
|
||||
2. **Given** a user wants to create a new feature branch, **When** they specify a branch name, **Then** the system creates the branch both locally and pushes it to the remote repository
|
||||
3. **Given** multiple branches exist, **When** a user switches to a different branch, **Then** the dashboard content updates to reflect the selected branch's state
|
||||
|
||||
---
|
||||
|
||||
### User Story 3 - Dashboard Synchronization with Git (Priority: P1)
|
||||
|
||||
A dashboard developer needs to synchronize their local dashboard changes with the Git repository. They want to commit changes, push to remote branches, and pull updates from other developers working on the same dashboard.
|
||||
|
||||
**Why this priority**: This enables collaborative development and ensures changes are properly tracked and shared between team members.
|
||||
|
||||
**Independent Test**: Can be fully tested by making dashboard changes, committing them, and pushing to the remote repository, delivering complete Git workflow integration.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a user has made changes to a dashboard, **When** they commit the changes with a message, **Then** the changes are committed to the current branch
|
||||
2. **Given** local changes exist, **When** a user pushes to the remote repository, **Then** the changes are successfully pushed and visible to other team members
|
||||
3. **Given** remote changes exist, **When** a user pulls from the remote repository, **Then** local dashboard content is updated with the latest changes
|
||||
|
||||
---
|
||||
|
||||
### User Story 4 - Environment Deployment (Priority: P2)
|
||||
|
||||
A dashboard developer needs to deploy their dashboard changes to different target environments (e.g., staging, production) after committing changes. They want to select a target environment and trigger the deployment process.
|
||||
|
||||
**Why this priority**: This enables the complete development-to-production workflow, allowing teams to promote dashboard changes through different environments.
|
||||
|
||||
**Independent Test**: Can be fully tested by selecting a target environment and triggering deployment, delivering the ability to promote dashboard changes to different environments.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** dashboard changes are committed, **When** a user selects a target environment for deployment, **Then** the system validates the deployment configuration
|
||||
2. **Given** deployment is initiated, **When** the process completes, **Then** the user receives clear feedback on deployment success or failure
|
||||
3. **Given** multiple environments are configured, **When** a user deploys to one environment, **Then** other environments remain unaffected
|
||||
|
||||
---
|
||||
|
||||
### User Story 5 - Git History and Change Tracking (Priority: P3)
|
||||
|
||||
A dashboard developer needs to view the commit history and changes made to dashboards over time. They want to see who made changes, when they were made, and what specific changes were included in each commit.
|
||||
|
||||
**Why this priority**: This provides visibility into the development process and helps with debugging, auditing, and understanding the evolution of dashboards.
|
||||
|
||||
**Independent Test**: Can be fully tested by viewing commit history and examining specific commits, delivering transparency into dashboard development history.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** multiple commits exist, **When** a user views the commit history, **Then** they see a chronological list of all commits with relevant details
|
||||
2. **Given** a specific commit is selected, **When** the user views commit details, **Then** they see the changes included in that commit
|
||||
3. **Given** commit history is available, **When** a user searches for specific changes, **Then** they can filter and find relevant commits
|
||||
|
||||
---
|
||||
|
||||
### Edge Cases
|
||||
|
||||
- What happens when Git server is temporarily unavailable during synchronization?
|
||||
- How does system handle merge conflicts when multiple developers modify the same dashboard?
|
||||
- What happens when a user tries to deploy to an environment that doesn't exist or is misconfigured?
|
||||
- How does system handle large dashboard files that exceed Git repository size limits?
|
||||
- What happens when Git authentication tokens expire during operations?
|
||||
- How does system handle network interruptions during long-running operations like large pushes?
|
||||
|
||||
## Requirements *(mandatory)*
|
||||
|
||||
## Clarifications
|
||||
|
||||
### Session 2026-01-18
|
||||
- Q: What is the primary data format for storing dashboards in the Git repository? → A: YAML files (more human-readable for diffs)
|
||||
- Q: How should the system handle merge conflicts during synchronization? → A: Built-in UI for basic resolution (Mine/Theirs/Manual)
|
||||
- Q: What triggers a deployment to a target environment? → A: Manual trigger by user from UI
|
||||
- Q: How should the system handle authentication with GitLab? → A: Personal Access Token (PAT)
|
||||
- Q: What should be the scope of a "Dashboard" in the Git repository? → A: Directory per dashboard (metadata + assets)
|
||||
|
||||
### Functional Requirements
|
||||
|
||||
- **FR-001**: System MUST allow users to configure Git server connection settings including server URL, authentication via Personal Access Token (PAT), and repository details
|
||||
- **FR-002**: System MUST support GitLab as the primary Git server with extensible architecture for other Git servers
|
||||
- **FR-003**: System MUST validate Git server connection using the provided PAT before saving configuration
|
||||
- **FR-004**: Users MUST be able to view all available branches from the configured Git repository
|
||||
- **FR-005**: Users MUST be able to create new branches both locally and remotely with proper naming validation
|
||||
- **FR-006**: Users MUST be able to switch between existing branches and have dashboard content update accordingly
|
||||
- **FR-007**: System MUST allow users to commit dashboard changes with commit messages and optional file selection
|
||||
- **FR-008**: System MUST support pushing local commits to remote repository branches
|
||||
- **FR-009**: System MUST support pulling changes from remote repository to local working directory
|
||||
- **FR-010**: System MUST handle merge conflicts with a built-in UI providing "Keep Mine", "Keep Theirs", and "Manual Edit" resolution options
|
||||
- **FR-011**: Users MUST be able to configure multiple target environments for dashboard deployment
|
||||
- **FR-012**: System MUST allow users to manually trigger deployment to a selected target environment from the UI
|
||||
- **FR-013**: System MUST validate deployment configuration before initiating deployment process
|
||||
- **FR-014**: System MUST provide clear feedback on deployment success or failure with detailed logs
|
||||
- **FR-015**: Users MUST be able to view commit history with details including author, timestamp, and commit message
|
||||
- **FR-016**: System MUST display detailed changes included in each commit for audit purposes
|
||||
- **FR-017**: System MUST handle PAT expiration gracefully with re-authentication prompts
|
||||
- **FR-018**: System MUST provide offline mode functionality when Git server is unavailable
|
||||
- **FR-019**: System MUST validate dashboard files before committing to ensure they are properly formatted in YAML within their respective dashboard directories
|
||||
- **FR-020**: Users MUST be able to search and filter commit history by date, author, or message content
|
||||
- **FR-021**: System MUST support rollback functionality to previous dashboard versions via Git operations
|
||||
|
||||
### Key Entities *(include if feature involves data)*
|
||||
|
||||
- **GitServerConfig**: Represents Git server connection configuration including server URL, Personal Access Token (PAT), repository path, and connection status
|
||||
- **Branch**: Represents a Git branch with properties like name, commit hash, last updated timestamp, and remote tracking status
|
||||
- **Commit**: Represents a Git commit with properties like commit hash, author, timestamp, commit message, and list of changed files
|
||||
- **Environment**: Represents a deployment target environment with properties like name, URL, authentication details, and deployment status
|
||||
- **DashboardChange**: Represents changes made to dashboard directories (YAML metadata + assets) including file paths, change type (add/modify/delete), and content differences
|
||||
- **Branch**: Represents a Git branch with properties like name, commit hash, last updated timestamp, and remote tracking status
|
||||
- **Commit**: Represents a Git commit with properties like commit hash, author, timestamp, commit message, and list of changed files
|
||||
- **Environment**: Represents a deployment target environment with properties like name, URL, authentication details, and deployment status
|
||||
- **DashboardChange**: Represents changes made to dashboard files (YAML format) including file paths, change type (add/modify/delete), and content differences
|
||||
|
||||
## Success Criteria *(mandatory)*
|
||||
|
||||
### Measurable Outcomes
|
||||
|
||||
- **SC-001**: Users can successfully configure a GitLab server connection and validate it within 2 minutes
|
||||
- **SC-002**: Dashboard branch switching operations complete within 5 seconds for repositories with up to 100 commits
|
||||
- **SC-003**: Commit and push operations succeed in 95% of attempts under normal network conditions
|
||||
- **SC-004**: Deployment to target environments completes successfully within 30 seconds for standard dashboard configurations
|
||||
- **SC-005**: Users can view and navigate through commit history for dashboards with up to 1000 commits without performance degradation
|
||||
- **SC-006**: Merge conflict resolution guidance is provided in 100% of conflict scenarios with clear resolution steps
|
||||
- **SC-007**: Authentication token refresh process completes within 10 seconds when tokens expire
|
||||
- **SC-008**: System maintains dashboard state consistency across branch switches in 99% of operations
|
||||
- **SC-009**: Deployment rollback operations complete within 1 minute for standard dashboard configurations
|
||||
- **SC-010**: Users can search and filter commit history with results displayed within 2 seconds for repositories with up to 500 commits
|
||||
34
specs/012-remove-superset-tool/checklists/requirements.md
Normal file
34
specs/012-remove-superset-tool/checklists/requirements.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Specification Quality Checklist: Backend Refactoring - Remove superset_tool
|
||||
|
||||
**Purpose**: Validate specification completeness and quality before proceeding to planning
|
||||
**Created**: 2026-01-22
|
||||
**Feature**: [Link to spec.md](../spec.md)
|
||||
|
||||
## Content Quality
|
||||
|
||||
- [x] No implementation details (languages, frameworks, APIs) - *Refactoring is technical by nature, but focused on structural goals.*
|
||||
- [x] Focused on user value and business needs
|
||||
- [x] Written for non-technical stakeholders
|
||||
- [x] All mandatory sections completed
|
||||
|
||||
## Requirement Completeness
|
||||
|
||||
- [x] No [NEEDS CLARIFICATION] markers remain
|
||||
- [x] Requirements are testable and unambiguous
|
||||
- [x] Success criteria are measurable
|
||||
- [x] Success criteria are technology-agnostic (no implementation details)
|
||||
- [x] All acceptance scenarios are defined
|
||||
- [x] Edge cases are identified
|
||||
- [x] Scope is clearly bounded
|
||||
- [x] Dependencies and assumptions identified
|
||||
|
||||
## Feature Readiness
|
||||
|
||||
- [x] All functional requirements have clear acceptance criteria
|
||||
- [x] User scenarios cover primary flows
|
||||
- [x] Feature meets measurable outcomes defined in Success Criteria
|
||||
- [x] No implementation details leak into specification
|
||||
|
||||
## Notes
|
||||
|
||||
- Items marked incomplete require spec updates before `/speckit.clarify` or `/speckit.plan`
|
||||
39
specs/012-remove-superset-tool/contracts/api.md
Normal file
39
specs/012-remove-superset-tool/contracts/api.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# API Contracts: 012-remove-superset-tool
|
||||
|
||||
## SupersetClient Internal API
|
||||
|
||||
The `SupersetClient` will provide the following methods for internal backend use:
|
||||
|
||||
### `get_dashboards(query: Optional[Dict] = None) -> Tuple[int, List[Dict]]`
|
||||
- **Purpose**: Fetches paginated dashboards.
|
||||
- **Contract**:
|
||||
- `query`: Optional filters and pagination parameters.
|
||||
- Returns: `(total_count, dashboard_list)`.
|
||||
|
||||
### `export_dashboard(dashboard_id: int) -> Tuple[bytes, str]`
|
||||
- **Purpose**: Exports a dashboard as a ZIP file.
|
||||
- **Contract**:
|
||||
- `dashboard_id`: ID of the dashboard.
|
||||
- Returns: `(zip_content, filename)`.
|
||||
|
||||
### `import_dashboard(file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict`
|
||||
- **Purpose**: Imports a dashboard from a ZIP file.
|
||||
- **Contract**:
|
||||
- `file_name`: Path to ZIP.
|
||||
- `dash_id`/`dash_slug`: Optional identifiers for delete-retry logic.
|
||||
- Returns: API response dictionary.
|
||||
|
||||
## Configuration Models (Pydantic)
|
||||
|
||||
### `Environment`
|
||||
```python
|
||||
class Environment(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
url: str
|
||||
username: str
|
||||
password: str
|
||||
verify_ssl: bool = True
|
||||
timeout: int = 30
|
||||
is_default: bool = False
|
||||
backup_schedule: Schedule = Field(default_factory=Schedule)
|
||||
38
specs/012-remove-superset-tool/data-model.md
Normal file
38
specs/012-remove-superset-tool/data-model.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Data Model: 012-remove-superset-tool
|
||||
|
||||
## Entities
|
||||
|
||||
### 1. Environment (Updated)
|
||||
- **Source**: `backend/src/core/config_models.py`
|
||||
- **Purpose**: Represents a Superset environment configuration, now incorporating all fields required for the `SupersetClient`.
|
||||
- **Fields**:
|
||||
- `id`: `str` (Unique identifier)
|
||||
- `name`: `str` (Display name)
|
||||
- `url`: `str` (Base URL for API)
|
||||
- `username`: `str`
|
||||
- `password`: `str`
|
||||
- `verify_ssl`: `bool` (Default: `True`)
|
||||
- `timeout`: `int` (Default: `30`)
|
||||
- `is_default`: `bool` (Default: `False`)
|
||||
- `backup_schedule`: `Schedule`
|
||||
- **Validation Rules**:
|
||||
- `url` must start with `http://` or `https://`.
|
||||
- `timeout` must be positive.
|
||||
|
||||
### 2. API Contract (SupersetClient)
|
||||
- **Source**: `backend/src/core/superset_client.py`
|
||||
- **Purpose**: Self-contained client for Superset API interactions.
|
||||
- **Key Methods**:
|
||||
- `authenticate()`
|
||||
- `get_dashboards(query)`
|
||||
- `get_datasets(query)`
|
||||
- `get_databases(query)`
|
||||
- `export_dashboard(dashboard_id)`
|
||||
- `import_dashboard(file_name, dash_id, dash_slug)`
|
||||
- `delete_dashboard(dashboard_id)`
|
||||
- `get_databases_summary()`
|
||||
- `get_dashboards_summary()`
|
||||
|
||||
## State Transitions
|
||||
- **Authentication**: Unauthenticated -> Authenticated (token stored in memory/client session).
|
||||
- **Import/Export**: File System <-> Superset API.
|
||||
69
specs/012-remove-superset-tool/plan.md
Normal file
69
specs/012-remove-superset-tool/plan.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# Implementation Plan: 012-remove-superset-tool
|
||||
|
||||
**Branch**: `012-remove-superset-tool` | **Date**: 2026-01-22 | **Spec**: [spec.md](specs/012-remove-superset-tool/spec.md)
|
||||
|
||||
## Summary
|
||||
|
||||
The primary requirement is to refactor the backend by removing the redundant `superset_tool` module and merging its essential logic into the `backend` package. This involves migrating the `SupersetClient` logic, utility modules, and configuration models while deprecating CLI-specific interactive logic.
|
||||
|
||||
## Technical Context
|
||||
|
||||
**Language/Version**: Python 3.9+
|
||||
**Primary Dependencies**: FastAPI, Pydantic, requests, pyyaml (migrated from superset_tool)
|
||||
**Storage**: SQLite (tasks.db, migrations.db), Filesystem
|
||||
**Testing**: pytest
|
||||
**Target Platform**: Linux server
|
||||
**Project Type**: Web application (FastAPI backend + SvelteKit frontend)
|
||||
**Performance Goals**: N/A (Cleanup task)
|
||||
**Constraints**: Zero references to `superset_tool` in `backend/` source code.
|
||||
**Scale/Scope**: Refactoring core backend infrastructure.
|
||||
|
||||
## Constitution Check
|
||||
|
||||
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
|
||||
|
||||
1. **Semantic Protocol Compliance**: All migrated code must use `[DEF]` anchors and `@RELATION` tags as per `semantic_protocol.md`.
|
||||
2. **Causal Validity**: Contracts (Pydantic models and API surfaces) must be defined before implementation.
|
||||
3. **Everything is a Plugin**: Ensure that logic previously in `superset_tool` that acts as a tool or extension is integrated within the backend's plugin architecture if applicable.
|
||||
4. **Fractal Complexity Limit**: Migrated modules must adhere to complexity limits.
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Documentation (this feature)
|
||||
|
||||
```text
|
||||
specs/012-remove-superset-tool/
|
||||
├── plan.md # This file
|
||||
├── research.md # Phase 0 output
|
||||
├── data-model.md # Phase 1 output
|
||||
├── quickstart.md # Phase 1 output
|
||||
├── contracts/ # Phase 1 output
|
||||
└── tasks.md # Phase 2 output
|
||||
```
|
||||
|
||||
### Source Code (repository root)
|
||||
|
||||
```text
|
||||
backend/
|
||||
├── src/
|
||||
│ ├── api/ # API routes
|
||||
│ ├── core/ # Core logic (Target for migration)
|
||||
│ │ ├── utils/ # Shared utilities
|
||||
│ │ ├── config_models.py
|
||||
│ │ ├── logger.py
|
||||
│ │ └── superset_client.py
|
||||
│ ├── models/ # Database models
|
||||
│ ├── plugins/ # Plugin system
|
||||
│ └── app.py
|
||||
└── tests/ # Backend tests
|
||||
```
|
||||
|
||||
**Structure Decision**: Web application structure. The `superset_tool` logic will be absorbed into `backend/src/core/` and its subdirectories.
|
||||
|
||||
## Complexity Tracking
|
||||
|
||||
> **Fill ONLY if Constitution Check has violations that must be justified**
|
||||
|
||||
| Violation | Why Needed | Simpler Alternative Rejected Because |
|
||||
|-----------|------------|-------------------------------------|
|
||||
| None | | |
|
||||
20
specs/012-remove-superset-tool/quickstart.md
Normal file
20
specs/012-remove-superset-tool/quickstart.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Quickstart: 012-remove-superset-tool
|
||||
|
||||
## Overview
|
||||
This feature refactors the backend to remove the `superset_tool` dependency. All Superset-related logic is now centralized in `backend/src/core/superset_client.py`.
|
||||
|
||||
## Key Changes
|
||||
1. **Models**: `Environment` model in `backend/src/core/config_models.py` now includes `verify_ssl` and `timeout`.
|
||||
2. **Client**: `SupersetClient` in `backend/src/core/superset_client.py` is now self-contained and does not inherit from any external module.
|
||||
3. **Utilities**: Network and file I/O utilities previously in `superset_tool` are now available in `backend/src/core/utils/`.
|
||||
|
||||
## Verification Steps
|
||||
1. **Run Backend**: Start the FastAPI server and ensure no import errors occur.
|
||||
```bash
|
||||
cd backend && .venv/bin/python3 -m uvicorn src.app:app --reload
|
||||
```
|
||||
2. **Check Connections**: Verify that existing Superset connections load correctly in the UI/API.
|
||||
3. **Test Migration**: Trigger a dashboard list refresh or a migration task to confirm the consolidated client works as expected.
|
||||
|
||||
## Cleanup
|
||||
Once verified, the `superset_tool/` directory and root-level CLI scripts (`migration_script.py`, `run_mapper.py`) can be deleted.
|
||||
32
specs/012-remove-superset-tool/research.md
Normal file
32
specs/012-remove-superset-tool/research.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Research: 012-remove-superset-tool
|
||||
|
||||
## Unknowns & Clarifications
|
||||
|
||||
### 1. Merging `SupersetConfig` into `Environment`
|
||||
- **Decision**: Fields from `SupersetConfig` (auth, verify_ssl, timeout) will be merged into the `Environment` model in `backend/src/core/config_models.py`.
|
||||
- **Rationale**: Centralizes configuration and removes dependency on `superset_tool.models`.
|
||||
- **Alternatives considered**: Keeping them separate, but that defeats the purpose of the refactoring.
|
||||
|
||||
### 2. Utility Migration
|
||||
- **Decision**: `superset_tool/utils/` will be flattened into `backend/src/core/utils/`.
|
||||
- **Rationale**: Simplifies the directory structure and aligns with the backend's existing utility pattern.
|
||||
- **Alternatives considered**: Creating a `backend/src/core/utils/superset/` subdirectory, but flattening is preferred for simplicity.
|
||||
|
||||
### 3. CLI Deprecation
|
||||
- **Decision**: `migration_script.py`, `run_mapper.py`, and `whiptail_fallback.py` will be deprecated and not migrated.
|
||||
- **Rationale**: The web UI now handles these operations, and maintaining interactive CLI logic in the backend is out of scope.
|
||||
|
||||
### 4. Logging Consolidation
|
||||
- **Decision**: Remove `SupersetLogger` and `belief_scope` from `superset_tool` and use `backend/src/core/logger.py`.
|
||||
- **Rationale**: Ensures consistent logging across the entire backend application.
|
||||
|
||||
## Dependency Analysis
|
||||
|
||||
- `backend/requirements.txt` already contains `requests`, `PyYAML`, `pydantic`.
|
||||
- `superset_tool/requirements.txt` is empty, indicating it likely relied on the same environment or was partially integrated.
|
||||
- **Action**: Ensure all `superset_tool` imports are replaced with backend-local equivalents.
|
||||
|
||||
## Integration Patterns
|
||||
|
||||
- `backend/src/core/superset_client.py` currently inherits from `superset_tool.client.SupersetClient`.
|
||||
- **Action**: Move all methods from `superset_tool/client.py` into `backend/src/core/superset_client.py` and remove the inheritance. Refactor `APIClient` (from `superset_tool/utils/network.py`) to be a utility or private helper within the backend.
|
||||
78
specs/012-remove-superset-tool/spec.md
Normal file
78
specs/012-remove-superset-tool/spec.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# Feature Specification: Backend Refactoring - Remove superset_tool
|
||||
|
||||
**Feature Branch**: `012-remove-superset-tool`
|
||||
**Created**: 2026-01-22
|
||||
**Status**: Completed
|
||||
**Input**: User description: "нужен рефакторинг бека - я хочу исключить модуль superset_tool, пусть останется только backend"
|
||||
|
||||
## Clarifications
|
||||
|
||||
### Session 2026-01-22
|
||||
|
||||
- Q: How should we reconcile the `SupersetConfig` model from `superset_tool` with the `Environment` model in the backend? → A: Merge all `SupersetConfig` fields (e.g., `verify_ssl`, `timeout`) into the `Environment` model in `backend/src/core/config_models.py`.
|
||||
- Q: Where should the utility modules from `superset_tool/utils/` be moved within the `backend/src/core/` directory? → A: Flatten them directly into `backend/src/core/utils/` alongside existing utilities.
|
||||
- Q: Should interactive CLI utilities (whiptail_fallback.py, migration_script.py) be migrated? → A: No, deprecate them and do not migrate CLI-specific interactive UI logic to the backend.
|
||||
- Q: How should we handle `SupersetLogger` and `belief_scope`? → A: Consolidate all logging into `backend/src/core/logger.py` and remove the redundant `SupersetLogger` class.
|
||||
|
||||
## User Scenarios & Testing *(mandatory)*
|
||||
|
||||
### User Story 1 - Maintain Core Functionality (Priority: P1)
|
||||
|
||||
As a developer, I want to remove the redundant `superset_tool` module and merge its essential logic into the `backend` package so that the codebase is easier to maintain and deploy.
|
||||
|
||||
**Why this priority**: This is a critical architectural cleanup. The existence of two overlapping modules (`superset_tool` and `backend/src/core/superset_client.py`) creates confusion and potential bugs.
|
||||
|
||||
**Independent Test**: The application (FastAPI backend) should start and perform all Superset-related operations (fetching dashboards, datasets, etc.) without the `superset_tool` directory present in the root.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** the `superset_tool` directory is deleted, **When** I run the backend server, **Then** it should start without `ModuleNotFoundError`.
|
||||
2. **Given** the backend is running, **When** I trigger a dashboard list refresh in the UI, **Then** the backend should successfully fetch data from Superset using the migrated client logic.
|
||||
3. **Given** a migration task, **When** I execute it, **Then** it should successfully use the consolidated `SupersetClient` to interact with the API.
|
||||
|
||||
---
|
||||
|
||||
### User Story 2 - Unified Configuration and Models (Priority: P2)
|
||||
|
||||
As a developer, I want to use a single set of Pydantic models and configuration logic within the `backend` package instead of relying on external `superset_tool.models`.
|
||||
|
||||
**Why this priority**: Eliminates duplicate definitions and ensures consistency across the backend.
|
||||
|
||||
**Independent Test**: Verify that `backend/src/core/config_models.py` (or equivalent) contains all necessary fields previously defined in `superset_tool/models.py`.
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** a connection configuration in the database, **When** the backend loads it, **Then** it should correctly instantiate the consolidated configuration models.
|
||||
|
||||
---
|
||||
|
||||
### Edge Cases
|
||||
|
||||
- **Deprecated Scripts**: CLI-only scripts like `migration_script.py` and `run_mapper.py` in the root will be deprecated and removed as their logic is now in the web UI.
|
||||
- **Dependency Conflicts**: How does the system handle potential version conflicts between `backend/requirements.txt` and `superset_tool/requirements.txt`?
|
||||
- **Environment Variables**: Ensure that any environment variables previously used by `superset_tool` are still correctly loaded by the consolidated client.
|
||||
|
||||
## Requirements *(mandatory)*
|
||||
|
||||
### Functional Requirements
|
||||
|
||||
- **FR-001**: The system MUST NOT depend on the `superset_tool` top-level module.
|
||||
- **FR-002**: All essential logic from `superset_tool/client.py` MUST be migrated to `backend/src/core/superset_client.py`. Utility modules from `superset_tool/utils/` (network, fileio, dataset_mapper) MUST be migrated to `backend/src/core/utils/`.
|
||||
- **FR-003**: `backend/src/core/superset_client.py` MUST be refactored to be a self-contained client that doesn't inherit from `superset_tool.client.BaseSupersetClient`.
|
||||
- **FR-004**: Redundant logging logic (`SupersetLogger`) MUST be removed in favor of `backend/src/core/logger.py`.
|
||||
- **FR-004**: The backend's `requirements.txt` MUST be updated to include any dependencies previously required by `superset_tool` (e.g., `requests`, `pyyaml`).
|
||||
- **FR-005**: All imports in `backend/src/` that reference `superset_tool` MUST be updated to reference the new locations within the backend.
|
||||
|
||||
### Key Entities *(include if feature involves data)*
|
||||
|
||||
- **SupersetClient**: The consolidated class responsible for all interactions with the Superset REST API.
|
||||
- **Environment**: The unified configuration model for Superset connections, incorporating legacy `SupersetConfig` fields.
|
||||
|
||||
## Success Criteria *(mandatory)*
|
||||
|
||||
### Measurable Outcomes
|
||||
|
||||
- **SC-001**: Zero references to `superset_tool` in the `backend/` source code.
|
||||
- **SC-002**: 100% of existing Superset integration tests pass after the refactoring.
|
||||
- **SC-003**: The `superset_tool/` directory can be safely deleted from the repository.
|
||||
- **SC-004**: Backend startup time remains unaffected or improves slightly due to simplified import structure.
|
||||
34
specs/012-remove-superset-tool/tasks.md
Normal file
34
specs/012-remove-superset-tool/tasks.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Tasks: 012-remove-superset-tool
|
||||
|
||||
## Phase 1: Setup
|
||||
- [x] T001 Initialize branch `012-remove-superset-tool`
|
||||
- [x] T002 Update `backend/requirements.txt` with dependencies from `superset_tool` (requests, pyyaml)
|
||||
|
||||
## Phase 2: Foundational (Infrastructure Migration)
|
||||
- [x] T003 [P] Migrate utility modules from `superset_tool/utils/` to `backend/src/core/utils/`
|
||||
- [x] T004 [P] Refactor `backend/src/core/logger.py` to incorporate essential logging logic from `superset_tool/utils/logging.py`
|
||||
- [x] T005 Update `Environment` model in `backend/src/core/config_models.py` with `verify_ssl` and `timeout` fields
|
||||
|
||||
## Phase 3: User Story 1 - Maintain Core Functionality [US1]
|
||||
- [x] T006 [US1] Refactor `backend/src/core/superset_client.py` to be self-contained (remove inheritance from `superset_tool.client.BaseSupersetClient`)
|
||||
- [x] T007 [US1] Migrate all remaining methods from `superset_tool/client.py` to `backend/src/core/superset_client.py`
|
||||
- [x] T008 [P] [US1] Update all imports in `backend/src/` to remove references to `superset_tool`
|
||||
- [x] T009 [US1] Verify backend starts without `superset_tool` directory present
|
||||
- [x] T010 [US1] Verify dashboard and dataset fetching works via API/UI
|
||||
|
||||
## Phase 4: User Story 2 - Unified Configuration and Models [US2]
|
||||
- [x] T011 [US2] Ensure all Pydantic models in `backend/src/core/config_models.py` cover legacy `superset_tool/models.py` requirements
|
||||
- [x] T012 [US2] Update database loading logic to correctly populate new `Environment` fields
|
||||
|
||||
## Phase 5: Polish & Cleanup
|
||||
- [x] T013 Remove `superset_tool/` directory
|
||||
- [x] T014 Remove deprecated root scripts: `migration_script.py`, `run_mapper.py`, `whiptail_fallback.py`
|
||||
- [x] T015 Run full backend test suite to ensure no regressions
|
||||
|
||||
## Dependencies
|
||||
- Phase 2 must be completed before Phase 3.
|
||||
- T006 and T007 are prerequisites for T008 and T009.
|
||||
|
||||
## Implementation Strategy
|
||||
- **MVP First**: Focus on T006-T008 to ensure the backend can function independently of the `superset_tool` module.
|
||||
- **Incremental Delivery**: Migrate utilities and models first to provide the necessary foundation for the client refactoring.
|
||||
@@ -51,350 +51,6 @@
|
||||
- 📝 Generates the token-optimized project map.
|
||||
- ƒ **_write_entity_md** (`Function`)
|
||||
- 📝 Recursive helper to write entity tree to Markdown.
|
||||
- 📦 **migration_script** (`Module`)
|
||||
- 📝 Предоставляет интерактивный CLI для миграции дашбордов Superset между окружениями с возможностью восстановления после ошибок.
|
||||
- 🏗️ Layer: App
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.client`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.utils`
|
||||
- ℂ **Migration** (`Class`)
|
||||
- 📝 Инкапсулирует логику интерактивной миграции дашбордов с возможностью «удалить‑и‑перезаписать» при ошибке импорта.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Инициализирует сервис миграции, настраивает логгер и начальные состояния.
|
||||
- ƒ **run** (`Function`)
|
||||
- 📝 Точка входа – последовательный запуск всех шагов миграции.
|
||||
- 🔗 CALLS -> `self.ask_delete_on_failure`
|
||||
- 🔗 CALLS -> `self.select_environments`
|
||||
- 🔗 CALLS -> `self.select_dashboards`
|
||||
- 🔗 CALLS -> `self.confirm_db_config_replacement`
|
||||
- 🔗 CALLS -> `self.execute_migration`
|
||||
- ƒ **ask_delete_on_failure** (`Function`)
|
||||
- 📝 Запрашивает у пользователя, следует ли удалять дашборд при ошибке импорта.
|
||||
- 🔗 CALLS -> `yesno`
|
||||
- ƒ **select_environments** (`Function`)
|
||||
- 📝 Позволяет пользователю выбрать исходное и целевое окружения Superset.
|
||||
- 🔗 CALLS -> `setup_clients`
|
||||
- 🔗 CALLS -> `menu`
|
||||
- ƒ **select_dashboards** (`Function`)
|
||||
- 📝 Позволяет пользователю выбрать набор дашбордов для миграции.
|
||||
- 🔗 CALLS -> `self.from_c.get_dashboards`
|
||||
- 🔗 CALLS -> `checklist`
|
||||
- ƒ **confirm_db_config_replacement** (`Function`)
|
||||
- 📝 Запрашивает у пользователя, требуется ли заменить имена БД в YAML-файлах.
|
||||
- 🔗 CALLS -> `yesno`
|
||||
- 🔗 CALLS -> `self._select_databases`
|
||||
- ƒ **_select_databases** (`Function`)
|
||||
- 📝 Позволяет пользователю выбрать исходную и целевую БД через API.
|
||||
- 🔗 CALLS -> `self.from_c.get_databases`
|
||||
- 🔗 CALLS -> `self.to_c.get_databases`
|
||||
- 🔗 CALLS -> `self.from_c.get_database`
|
||||
- 🔗 CALLS -> `self.to_c.get_database`
|
||||
- 🔗 CALLS -> `menu`
|
||||
- ƒ **_batch_delete_by_ids** (`Function`)
|
||||
- 📝 Удаляет набор дашбордов по их ID единым запросом.
|
||||
- 🔗 CALLS -> `self.to_c.network.request`
|
||||
- ƒ **execute_migration** (`Function`)
|
||||
- 📝 Выполняет экспорт-импорт дашбордов, обрабатывает ошибки и, при необходимости, выполняет процедуру восстановления.
|
||||
- 🔗 CALLS -> `self.from_c.export_dashboard`
|
||||
- 🔗 CALLS -> `create_temp_file`
|
||||
- 🔗 CALLS -> `update_yamls`
|
||||
- 🔗 CALLS -> `create_dashboard_export`
|
||||
- 🔗 CALLS -> `self.to_c.import_dashboard`
|
||||
- 🔗 CALLS -> `self._batch_delete_by_ids`
|
||||
- 📦 **superset_tool.exceptions** (`Module`)
|
||||
- 📝 Определяет иерархию пользовательских исключений для всего инструмента, обеспечивая единую точку обработки ошибок.
|
||||
- 🏗️ Layer: Infra
|
||||
- ℂ **SupersetToolError** (`Class`)
|
||||
- 📝 Базовый класс для всех ошибок, генерируемых инструментом.
|
||||
- 🔗 INHERITS_FROM -> `Exception`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes the base tool error.
|
||||
- ℂ **AuthenticationError** (`Class`)
|
||||
- 📝 Ошибки, связанные с аутентификацией или авторизацией.
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes an authentication error.
|
||||
- ℂ **PermissionDeniedError** (`Class`)
|
||||
- 📝 Ошибка, возникающая при отказе в доступе к ресурсу.
|
||||
- 🔗 INHERITS_FROM -> `AuthenticationError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes a permission denied error.
|
||||
- ℂ **SupersetAPIError** (`Class`)
|
||||
- 📝 Общие ошибки при взаимодействии с Superset API.
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes a Superset API error.
|
||||
- ℂ **ExportError** (`Class`)
|
||||
- 📝 Ошибки, специфичные для операций экспорта.
|
||||
- 🔗 INHERITS_FROM -> `SupersetAPIError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes an export error.
|
||||
- ℂ **DashboardNotFoundError** (`Class`)
|
||||
- 📝 Ошибка, когда запрошенный дашборд или ресурс не найден (404).
|
||||
- 🔗 INHERITS_FROM -> `SupersetAPIError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes a dashboard not found error.
|
||||
- ℂ **DatasetNotFoundError** (`Class`)
|
||||
- 📝 Ошибка, когда запрашиваемый набор данных не существует (404).
|
||||
- 🔗 INHERITS_FROM -> `SupersetAPIError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes a dataset not found error.
|
||||
- ℂ **InvalidZipFormatError** (`Class`)
|
||||
- 📝 Ошибка, указывающая на некорректный формат или содержимое ZIP-архива.
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes an invalid ZIP format error.
|
||||
- ℂ **NetworkError** (`Class`)
|
||||
- 📝 Ошибки, связанные с сетевым соединением.
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes a network error.
|
||||
- ℂ **FileOperationError** (`Class`)
|
||||
- 📝 Общие ошибки файловых операций (I/O).
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- ℂ **InvalidFileStructureError** (`Class`)
|
||||
- 📝 Ошибка, указывающая на некорректную структуру файлов или директорий.
|
||||
- 🔗 INHERITS_FROM -> `FileOperationError`
|
||||
- ℂ **ConfigurationError** (`Class`)
|
||||
- 📝 Ошибки, связанные с неверной конфигурацией инструмента.
|
||||
- 🔗 INHERITS_FROM -> `SupersetToolError`
|
||||
- 📦 **superset_tool.models** (`Module`)
|
||||
- 📝 Определяет Pydantic-модели для конфигурации инструмента, обеспечивая валидацию данных.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `pydantic`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.utils.logger`
|
||||
- ℂ **SupersetConfig** (`Class`)
|
||||
- 📝 Модель конфигурации для подключения к одному экземпляру Superset API.
|
||||
- 🔗 INHERITS_FROM -> `pydantic.BaseModel`
|
||||
- ƒ **validate_auth** (`Function`)
|
||||
- 📝 Проверяет, что словарь `auth` содержит все необходимые для аутентификации поля.
|
||||
- ƒ **normalize_base_url** (`Function`)
|
||||
- 📝 Нормализует `base_url`, добавляя `/api/v1`, если он отсутствует.
|
||||
- ℂ **DatabaseConfig** (`Class`)
|
||||
- 📝 Модель для параметров трансформации баз данных при миграции дашбордов.
|
||||
- 🔗 INHERITS_FROM -> `pydantic.BaseModel`
|
||||
- ƒ **validate_config** (`Function`)
|
||||
- 📝 Проверяет, что словарь `database_config` содержит ключи 'old' и 'new'.
|
||||
- 📦 **superset_tool.client** (`Module`)
|
||||
- 📝 Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
|
||||
- 🏗️ Layer: Domain
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.models`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.exceptions`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.utils`
|
||||
- ℂ **SupersetClient** (`Class`)
|
||||
- 📝 Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
|
||||
- ƒ **_validate_config** (`Function`)
|
||||
- 📝 Проверяет, что переданный объект конфигурации имеет корректный тип.
|
||||
- ƒ **headers** (`Function`)
|
||||
- 📝 Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
|
||||
- ƒ **get_dashboards** (`Function`)
|
||||
- 📝 Получает полный список дашбордов, автоматически обрабатывая пагинацию.
|
||||
- 🔗 CALLS -> `self._fetch_total_object_count`
|
||||
- 🔗 CALLS -> `self._fetch_all_pages`
|
||||
- ƒ **export_dashboard** (`Function`)
|
||||
- 📝 Экспортирует дашборд в виде ZIP-архива.
|
||||
- 🔗 CALLS -> `self.network.request`
|
||||
- ƒ **import_dashboard** (`Function`)
|
||||
- 📝 Импортирует дашборд из ZIP-файла с возможностью автоматического удаления и повторной попытки при ошибке.
|
||||
- 🔗 CALLS -> `self._do_import`
|
||||
- 🔗 CALLS -> `self.delete_dashboard`
|
||||
- 🔗 CALLS -> `self.get_dashboards`
|
||||
- ƒ **_resolve_target_id_for_delete** (`Function`)
|
||||
- 📝 Определяет ID дашборда для удаления, используя ID или slug.
|
||||
- ƒ **_do_import** (`Function`)
|
||||
- 📝 Выполняет один запрос на импорт без обработки исключений.
|
||||
- ƒ **delete_dashboard** (`Function`)
|
||||
- 📝 Удаляет дашборд по его ID или slug.
|
||||
- 🔗 CALLS -> `self.network.request`
|
||||
- ƒ **_extract_dashboard_id_from_zip** (`Function`)
|
||||
- 📝 Извлекает ID дашборда из `metadata.yaml` внутри ZIP-архива.
|
||||
- ƒ **_extract_dashboard_slug_from_zip** (`Function`)
|
||||
- 📝 Извлекает slug дашборда из `metadata.yaml` внутри ZIP-архива.
|
||||
- ƒ **_validate_export_response** (`Function`)
|
||||
- 📝 Проверяет, что HTTP-ответ на экспорт является валидным ZIP-архивом.
|
||||
- ƒ **_resolve_export_filename** (`Function`)
|
||||
- 📝 Определяет имя файла для экспорта из заголовков или генерирует его.
|
||||
- ƒ **_validate_query_params** (`Function`)
|
||||
- 📝 Формирует корректный набор параметров запроса с пагинацией.
|
||||
- ƒ **_fetch_total_object_count** (`Function`)
|
||||
- 📝 Получает общее количество объектов по указанному эндпоинту для пагинации.
|
||||
- ƒ **_fetch_all_pages** (`Function`)
|
||||
- 📝 Итерируется по всем страницам пагинированного API и собирает все данные.
|
||||
- ƒ **_validate_import_file** (`Function`)
|
||||
- 📝 Проверяет, что файл существует, является ZIP-архивом и содержит `metadata.yaml`.
|
||||
- ƒ **get_datasets** (`Function`)
|
||||
- 📝 Получает полный список датасетов, автоматически обрабатывая пагинацию.
|
||||
- 🔗 CALLS -> `self._fetch_total_object_count`
|
||||
- 🔗 CALLS -> `self._fetch_all_pages`
|
||||
- ƒ **get_databases** (`Function`)
|
||||
- 📝 Получает полный список баз данных, автоматически обрабатывая пагинацию.
|
||||
- 🔗 CALLS -> `self._fetch_total_object_count`
|
||||
- 🔗 CALLS -> `self._fetch_all_pages`
|
||||
- ƒ **get_dataset** (`Function`)
|
||||
- 📝 Получает информацию о конкретном датасете по его ID.
|
||||
- 🔗 CALLS -> `self.network.request`
|
||||
- ƒ **get_database** (`Function`)
|
||||
- 📝 Получает информацию о конкретной базе данных по её ID.
|
||||
- 🔗 CALLS -> `self.network.request`
|
||||
- ƒ **update_dataset** (`Function`)
|
||||
- 📝 Обновляет данные датасета по его ID.
|
||||
- 🔗 CALLS -> `self.network.request`
|
||||
- 📦 **superset_tool** (`Module`)
|
||||
- 📝 Root package for superset_tool.
|
||||
- 🏗️ Layer: Domain
|
||||
- 📦 **superset_tool.utils.init_clients** (`Module`)
|
||||
- 📝 Централизованно инициализирует клиенты Superset для различных окружений (DEV, PROD, SBX, PREPROD), используя `keyring` для безопасного доступа к паролям.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.models`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.client`
|
||||
- 🔗 DEPENDS_ON -> `keyring`
|
||||
- ƒ **setup_clients** (`Function`)
|
||||
- 📝 Инициализирует и возвращает словарь клиентов `SupersetClient`.
|
||||
- 📦 **superset_tool.utils.logger** (`Module`)
|
||||
- 📝 Предоставляет универсальную обёртку над стандартным `logging.Logger` для унифицированного создания и управления логгерами с выводом в консоль и/или файл.
|
||||
- 🏗️ Layer: Infra
|
||||
- ƒ **belief_scope** (`Function`)
|
||||
- 📝 Context manager for belief state logging to maintain execution coherence.
|
||||
- ℂ **SupersetLogger** (`Class`)
|
||||
- 📝 Обёртка над `logging.Logger`, которая упрощает конфигурацию и использование логгеров.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Конфигурирует и инициализирует логгер, добавляя обработчики для файла и/или консоли.
|
||||
- ƒ **_log** (`Function`)
|
||||
- 📝 (Helper) Универсальный метод для вызова соответствующего уровня логирования.
|
||||
- ƒ **info** (`Function`)
|
||||
- 📝 Записывает сообщение уровня INFO.
|
||||
- ƒ **debug** (`Function`)
|
||||
- 📝 Записывает сообщение уровня DEBUG.
|
||||
- ƒ **warning** (`Function`)
|
||||
- 📝 Записывает сообщение уровня WARNING.
|
||||
- ƒ **error** (`Function`)
|
||||
- 📝 Записывает сообщение уровня ERROR.
|
||||
- ƒ **critical** (`Function`)
|
||||
- 📝 Записывает сообщение уровня CRITICAL.
|
||||
- ƒ **exception** (`Function`)
|
||||
- 📝 Записывает сообщение уровня ERROR вместе с трассировкой стека текущего исключения.
|
||||
- 📦 **belief_scope** (`Method`)
|
||||
- 📝 Instance method wrapper for belief_scope context manager.
|
||||
- 📦 **superset_tool.utils.fileio** (`Module`)
|
||||
- 📝 Предоставляет набор утилит для управления файловыми операциями, включая работу с временными файлами, архивами ZIP, файлами YAML и очистку директорий.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.exceptions`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.utils.logger`
|
||||
- 🔗 DEPENDS_ON -> `pyyaml`
|
||||
- ƒ **create_temp_file** (`Function`)
|
||||
- 📝 Контекстный менеджер для создания временного файла или директории с гарантированным удалением.
|
||||
- ƒ **remove_empty_directories** (`Function`)
|
||||
- 📝 Рекурсивно удаляет все пустые поддиректории, начиная с указанного пути.
|
||||
- ƒ **read_dashboard_from_disk** (`Function`)
|
||||
- 📝 Читает бинарное содержимое файла с диска.
|
||||
- ƒ **calculate_crc32** (`Function`)
|
||||
- 📝 Вычисляет контрольную сумму CRC32 для файла.
|
||||
- 📦 **RetentionPolicy** (`DataClass`)
|
||||
- 📝 Определяет политику хранения для архивов (ежедневные, еженедельные, ежемесячные).
|
||||
- ƒ **archive_exports** (`Function`)
|
||||
- 📝 Управляет архивом экспортированных файлов, применяя политику хранения и дедупликацию.
|
||||
- 🔗 CALLS -> `apply_retention_policy`
|
||||
- 🔗 CALLS -> `calculate_crc32`
|
||||
- ƒ **apply_retention_policy** (`Function`)
|
||||
- 📝 (Helper) Применяет политику хранения к списку файлов, возвращая те, что нужно сохранить.
|
||||
- ƒ **save_and_unpack_dashboard** (`Function`)
|
||||
- 📝 Сохраняет бинарное содержимое ZIP-архива на диск и опционально распаковывает его.
|
||||
- ƒ **update_yamls** (`Function`)
|
||||
- 📝 Обновляет конфигурации в YAML-файлах, заменяя значения или применяя regex.
|
||||
- 🔗 CALLS -> `_update_yaml_file`
|
||||
- ƒ **_update_yaml_file** (`Function`)
|
||||
- 📝 (Helper) Обновляет один YAML файл.
|
||||
- ƒ **replacer** (`Function`)
|
||||
- 📝 Функция замены, сохраняющая кавычки если они были.
|
||||
- ƒ **create_dashboard_export** (`Function`)
|
||||
- 📝 Создает ZIP-архив из указанных исходных путей.
|
||||
- ƒ **sanitize_filename** (`Function`)
|
||||
- 📝 Очищает строку от символов, недопустимых в именах файлов.
|
||||
- ƒ **get_filename_from_headers** (`Function`)
|
||||
- 📝 Извлекает имя файла из HTTP заголовка 'Content-Disposition'.
|
||||
- ƒ **consolidate_archive_folders** (`Function`)
|
||||
- 📝 Консолидирует директории архивов на основе общего слага в имени.
|
||||
- 📦 **superset_tool.utils.network** (`Module`)
|
||||
- 📝 Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.exceptions`
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.utils.logger`
|
||||
- 🔗 DEPENDS_ON -> `requests`
|
||||
- ℂ **APIClient** (`Class`)
|
||||
- 📝 Инкапсулирует HTTP-логику для работы с API, включая сессии, аутентификацию, и обработку запросов.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Инициализирует API клиент с конфигурацией, сессией и логгером.
|
||||
- ƒ **_init_session** (`Function`)
|
||||
- 📝 Создает и настраивает `requests.Session` с retry-логикой.
|
||||
- ƒ **authenticate** (`Function`)
|
||||
- 📝 Выполняет аутентификацию в Superset API и получает access и CSRF токены.
|
||||
- ƒ **headers** (`Function`)
|
||||
- 📝 Возвращает HTTP-заголовки для аутентифицированных запросов.
|
||||
- ƒ **request** (`Function`)
|
||||
- 📝 Выполняет универсальный HTTP-запрос к API.
|
||||
- ƒ **_handle_http_error** (`Function`)
|
||||
- 📝 (Helper) Преобразует HTTP ошибки в кастомные исключения.
|
||||
- ƒ **_handle_network_error** (`Function`)
|
||||
- 📝 (Helper) Преобразует сетевые ошибки в `NetworkError`.
|
||||
- ƒ **upload_file** (`Function`)
|
||||
- 📝 Загружает файл на сервер через multipart/form-data.
|
||||
- ƒ **_perform_upload** (`Function`)
|
||||
- 📝 (Helper) Выполняет POST запрос с файлом.
|
||||
- ƒ **fetch_paginated_count** (`Function`)
|
||||
- 📝 Получает общее количество элементов для пагинации.
|
||||
- ƒ **fetch_paginated_data** (`Function`)
|
||||
- 📝 Автоматически собирает данные со всех страниц пагинированного эндпоинта.
|
||||
- 📦 **superset_tool.utils.whiptail_fallback** (`Module`)
|
||||
- 📝 Предоставляет плотный консольный UI-fallback для интерактивных диалогов, имитируя `whiptail` для систем, где он недоступен.
|
||||
- 🏗️ Layer: UI
|
||||
- ƒ **menu** (`Function`)
|
||||
- 📝 Отображает меню выбора и возвращает выбранный элемент.
|
||||
- ƒ **checklist** (`Function`)
|
||||
- 📝 Отображает список с возможностью множественного выбора.
|
||||
- ƒ **yesno** (`Function`)
|
||||
- 📝 Задает вопрос с ответом да/нет.
|
||||
- ƒ **msgbox** (`Function`)
|
||||
- 📝 Отображает информационное сообщение.
|
||||
- ƒ **inputbox** (`Function`)
|
||||
- 📝 Запрашивает у пользователя текстовый ввод.
|
||||
- ℂ **_ConsoleGauge** (`Class`)
|
||||
- 📝 Контекстный менеджер для имитации `whiptail gauge` в консоли.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes the gauge.
|
||||
- ƒ **__enter__** (`Function`)
|
||||
- 📝 Enters the context.
|
||||
- ƒ **__exit__** (`Function`)
|
||||
- 📝 Exits the context.
|
||||
- ƒ **set_text** (`Function`)
|
||||
- 📝 Sets the gauge text.
|
||||
- ƒ **set_percent** (`Function`)
|
||||
- 📝 Sets the gauge percentage.
|
||||
- ƒ **gauge** (`Function`)
|
||||
- 📝 Создает и возвращает экземпляр `_ConsoleGauge`.
|
||||
- 📦 **superset_tool.utils.dataset_mapper** (`Module`)
|
||||
- 📝 Этот модуль отвечает за обновление метаданных (verbose_map) в датасетах Superset, извлекая их из PostgreSQL или XLSX-файлов.
|
||||
- 🏗️ Layer: Domain
|
||||
- 🔗 DEPENDS_ON -> `superset_tool.client`
|
||||
- 🔗 DEPENDS_ON -> `pandas`
|
||||
- 🔗 DEPENDS_ON -> `psycopg2`
|
||||
- ℂ **DatasetMapper** (`Class`)
|
||||
- 📝 Класс для меппинга и обновления verbose_map в датасетах Superset.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes the mapper.
|
||||
- ƒ **get_postgres_comments** (`Function`)
|
||||
- 📝 Извлекает комментарии к колонкам из системного каталога PostgreSQL.
|
||||
- ƒ **load_excel_mappings** (`Function`)
|
||||
- 📝 Загружает меппинги 'column_name' -> 'column_comment' из XLSX файла.
|
||||
- ƒ **run_mapping** (`Function`)
|
||||
- 📝 Основная функция для выполнения меппинга и обновления verbose_map датасета в Superset.
|
||||
- 🔗 CALLS -> `self.get_postgres_comments`
|
||||
- 🔗 CALLS -> `self.load_excel_mappings`
|
||||
- 🔗 CALLS -> `superset_client.get_dataset`
|
||||
- 🔗 CALLS -> `superset_client.update_dataset`
|
||||
- 📦 **superset_tool.utils** (`Module`)
|
||||
- 📝 Utility package for superset_tool.
|
||||
- 🏗️ Layer: Infra
|
||||
- 📦 **main** (`Module`)
|
||||
- 📝 Entry point for the Svelte application.
|
||||
- 🏗️ Layer: UI-Entry
|
||||
@@ -454,6 +110,9 @@
|
||||
- 📝 Handles task creation from dynamic form submission.
|
||||
- ƒ **load** (`Function`)
|
||||
- 📝 Loads initial plugin data for the dashboard.
|
||||
- 🧩 **TaskManagementPage** (`Component`)
|
||||
- 📝 Page for managing and monitoring tasks.
|
||||
- 🏗️ Layer: Page
|
||||
- ƒ **loadInitialData** (`Function`)
|
||||
- 📝 Loads tasks and environments on page initialization.
|
||||
- ƒ **refreshTasks** (`Function`)
|
||||
@@ -579,7 +238,7 @@
|
||||
- ƒ **getSuggestion** (`Function`)
|
||||
- 📝 Finds a suggestion for a source database.
|
||||
- 🧩 **TaskLogViewer** (`Component`)
|
||||
- 📝 Displays detailed logs for a specific task in a modal.
|
||||
- 📝 Displays detailed logs for a specific task in a modal or inline.
|
||||
- 🏗️ Layer: UI
|
||||
- ƒ **fetchLogs** (`Function`)
|
||||
- 📝 Fetches logs for the current task.
|
||||
@@ -745,21 +404,48 @@
|
||||
- ƒ **get_scheduler_service** (`Function`)
|
||||
- 📝 Dependency injector for the SchedulerService.
|
||||
- 📦 **backend.src.core.superset_client** (`Module`)
|
||||
- 📝 Extends the base SupersetClient with database-specific metadata fetching.
|
||||
- 📝 Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
|
||||
- 🏗️ Layer: Core
|
||||
- 🔗 INHERITS_FROM -> `superset_tool.client.SupersetClient`
|
||||
- ℂ **SupersetClient** (`Class`)
|
||||
- 📝 Extended SupersetClient for migration-specific operations.
|
||||
- 📝 Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
|
||||
- ƒ **authenticate** (`Function`)
|
||||
- 📝 Authenticates the client using the configured credentials.
|
||||
- ƒ **headers** (`Function`)
|
||||
- 📝 Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
|
||||
- ƒ **get_dashboards** (`Function`)
|
||||
- 📝 Получает полный список дашбордов, автоматически обрабатывая пагинацию.
|
||||
- ƒ **get_dashboards_summary** (`Function`)
|
||||
- 📝 Fetches dashboard metadata optimized for the grid.
|
||||
- ƒ **export_dashboard** (`Function`)
|
||||
- 📝 Экспортирует дашборд в виде ZIP-архива.
|
||||
- ƒ **import_dashboard** (`Function`)
|
||||
- 📝 Импортирует дашборд из ZIP-файла.
|
||||
- ƒ **delete_dashboard** (`Function`)
|
||||
- 📝 Удаляет дашборд по его ID или slug.
|
||||
- ƒ **get_datasets** (`Function`)
|
||||
- 📝 Получает полный список датасетов, автоматически обрабатывая пагинацию.
|
||||
- ƒ **get_dataset** (`Function`)
|
||||
- 📝 Получает информацию о конкретном датасете по его ID.
|
||||
- ƒ **update_dataset** (`Function`)
|
||||
- 📝 Обновляет данные датасета по его ID.
|
||||
- ƒ **get_databases** (`Function`)
|
||||
- 📝 Получает полный список баз данных.
|
||||
- ƒ **get_database** (`Function`)
|
||||
- 📝 Получает информацию о конкретной базе данных по её ID.
|
||||
- ƒ **get_databases_summary** (`Function`)
|
||||
- 📝 Fetch a summary of databases including uuid, name, and engine.
|
||||
- ƒ **get_database_by_uuid** (`Function`)
|
||||
- 📝 Find a database by its UUID.
|
||||
- ƒ **get_dashboards_summary** (`Function`)
|
||||
- 📝 Fetches dashboard metadata optimized for the grid.
|
||||
- ƒ **get_dataset** (`Function`)
|
||||
- 📝 Fetch full dataset structure including columns and metrics.
|
||||
- ƒ **update_dataset** (`Function`)
|
||||
- 📝 Update dataset metadata.
|
||||
- ƒ **_resolve_target_id_for_delete** (`Function`)
|
||||
- ƒ **_do_import** (`Function`)
|
||||
- ƒ **_validate_export_response** (`Function`)
|
||||
- ƒ **_resolve_export_filename** (`Function`)
|
||||
- ƒ **_validate_query_params** (`Function`)
|
||||
- ƒ **_fetch_total_object_count** (`Function`)
|
||||
- ƒ **_fetch_all_pages** (`Function`)
|
||||
- ƒ **_validate_import_file** (`Function`)
|
||||
- 📦 **ConfigManagerModule** (`Module`)
|
||||
- 📝 Manages application configuration, including loading/saving to JSON and CRUD for environments.
|
||||
- 🏗️ Layer: Core
|
||||
@@ -785,6 +471,8 @@
|
||||
- 📝 Returns the list of configured environments.
|
||||
- ƒ **has_environments** (`Function`)
|
||||
- 📝 Checks if at least one environment is configured.
|
||||
- ƒ **get_environment** (`Function`)
|
||||
- 📝 Returns a single environment by ID.
|
||||
- ƒ **add_environment** (`Function`)
|
||||
- 📝 Adds a new environment to the configuration.
|
||||
- ƒ **update_environment** (`Function`)
|
||||
@@ -862,6 +550,8 @@
|
||||
- 📝 Returns a list of recent log entries from the buffer.
|
||||
- 📦 **Logger** (`Global`)
|
||||
- 📝 The global logger instance for the application, configured with both a console handler and the custom WebSocket handler.
|
||||
- ƒ **believed** (`Function`)
|
||||
- 📝 A decorator that wraps a function in a belief scope.
|
||||
- ℂ **PluginLoader** (`Class`)
|
||||
- 📝 Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface.
|
||||
- 🏗️ Layer: Core
|
||||
@@ -907,12 +597,76 @@
|
||||
- ℂ **PluginConfig** (`Class`)
|
||||
- 📝 A Pydantic model used to represent the validated configuration and metadata of a loaded plugin. This object is what gets exposed to the API layer.
|
||||
- 🏗️ Layer: Core
|
||||
- 📦 **backend.core.utils.fileio** (`Module`)
|
||||
- 📝 Предоставляет набор утилит для управления файловыми операциями, включая работу с временными файлами, архивами ZIP, файлами YAML и очистку директорий.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `backend.src.core.logger`
|
||||
- 🔗 DEPENDS_ON -> `pyyaml`
|
||||
- ℂ **InvalidZipFormatError** (`Class`)
|
||||
- ƒ **create_temp_file** (`Function`)
|
||||
- 📝 Контекстный менеджер для создания временного файла или директории с гарантированным удалением.
|
||||
- ƒ **remove_empty_directories** (`Function`)
|
||||
- 📝 Рекурсивно удаляет все пустые поддиректории, начиная с указанного пути.
|
||||
- ƒ **read_dashboard_from_disk** (`Function`)
|
||||
- 📝 Читает бинарное содержимое файла с диска.
|
||||
- ƒ **calculate_crc32** (`Function`)
|
||||
- 📝 Вычисляет контрольную сумму CRC32 для файла.
|
||||
- 📦 **RetentionPolicy** (`DataClass`)
|
||||
- 📝 Определяет политику хранения для архивов (ежедневные, еженедельные, ежемесячные).
|
||||
- ƒ **archive_exports** (`Function`)
|
||||
- 📝 Управляет архивом экспортированных файлов, применяя политику хранения и дедупликацию.
|
||||
- 🔗 CALLS -> `apply_retention_policy`
|
||||
- 🔗 CALLS -> `calculate_crc32`
|
||||
- ƒ **apply_retention_policy** (`Function`)
|
||||
- 📝 (Helper) Применяет политику хранения к списку файлов, возвращая те, что нужно сохранить.
|
||||
- ƒ **save_and_unpack_dashboard** (`Function`)
|
||||
- 📝 Сохраняет бинарное содержимое ZIP-архива на диск и опционально распаковывает его.
|
||||
- ƒ **update_yamls** (`Function`)
|
||||
- 📝 Обновляет конфигурации в YAML-файлах, заменяя значения или применяя regex.
|
||||
- 🔗 CALLS -> `_update_yaml_file`
|
||||
- ƒ **_update_yaml_file** (`Function`)
|
||||
- 📝 (Helper) Обновляет один YAML файл.
|
||||
- ƒ **create_dashboard_export** (`Function`)
|
||||
- 📝 Создает ZIP-архив из указанных исходных путей.
|
||||
- ƒ **sanitize_filename** (`Function`)
|
||||
- 📝 Очищает строку от символов, недопустимых в именах файлов.
|
||||
- ƒ **get_filename_from_headers** (`Function`)
|
||||
- 📝 Извлекает имя файла из HTTP заголовка 'Content-Disposition'.
|
||||
- ƒ **consolidate_archive_folders** (`Function`)
|
||||
- 📝 Консолидирует директории архивов на основе общего слага в имени.
|
||||
- 📦 **backend.core.utils.network** (`Module`)
|
||||
- 📝 Инкапсулирует низкоуровневую HTTP-логику для взаимодействия с Superset API, включая аутентификацию, управление сессией, retry-логику и обработку ошибок.
|
||||
- 🏗️ Layer: Infra
|
||||
- 🔗 DEPENDS_ON -> `backend.src.core.logger`
|
||||
- 🔗 DEPENDS_ON -> `requests`
|
||||
- ℂ **SupersetAPIError** (`Class`)
|
||||
- ℂ **AuthenticationError** (`Class`)
|
||||
- 📦 **backend.src.core.utils.matching** (`Module`)
|
||||
- 📝 Provides utility functions for fuzzy matching database names.
|
||||
- 🏗️ Layer: Core
|
||||
- 🔗 DEPENDS_ON -> `rapidfuzz`
|
||||
- ƒ **suggest_mappings** (`Function`)
|
||||
- 📝 Suggests mappings between source and target databases using fuzzy matching.
|
||||
- 📦 **backend.core.utils.dataset_mapper** (`Module`)
|
||||
- 📝 Этот модуль отвечает за обновление метаданных (verbose_map) в датасетах Superset, извлекая их из PostgreSQL или XLSX-файлов.
|
||||
- 🏗️ Layer: Domain
|
||||
- 🔗 DEPENDS_ON -> `backend.core.superset_client`
|
||||
- 🔗 DEPENDS_ON -> `pandas`
|
||||
- 🔗 DEPENDS_ON -> `psycopg2`
|
||||
- ℂ **DatasetMapper** (`Class`)
|
||||
- 📝 Класс для меппинга и обновления verbose_map в датасетах Superset.
|
||||
- ƒ **__init__** (`Function`)
|
||||
- 📝 Initializes the mapper.
|
||||
- ƒ **get_postgres_comments** (`Function`)
|
||||
- 📝 Извлекает комментарии к колонкам из системного каталога PostgreSQL.
|
||||
- ƒ **load_excel_mappings** (`Function`)
|
||||
- 📝 Загружает меппинги 'column_name' -> 'column_comment' из XLSX файла.
|
||||
- ƒ **run_mapping** (`Function`)
|
||||
- 📝 Основная функция для выполнения меппинга и обновления verbose_map датасета в Superset.
|
||||
- 🔗 CALLS -> `self.get_postgres_comments`
|
||||
- 🔗 CALLS -> `self.load_excel_mappings`
|
||||
- 🔗 CALLS -> `superset_client.get_dataset`
|
||||
- 🔗 CALLS -> `superset_client.update_dataset`
|
||||
- 📦 **TaskPersistenceModule** (`Module`)
|
||||
- 📝 Handles the persistence of tasks using SQLAlchemy and the tasks.db database.
|
||||
- 🏗️ Layer: Core
|
||||
@@ -1228,10 +982,8 @@
|
||||
- 📝 Returns the JSON schema for migration plugin parameters.
|
||||
- ƒ **execute** (`Function`)
|
||||
- 📝 Executes the dashboard migration logic.
|
||||
- ƒ **test_superset_config_url_normalization** (`Function`)
|
||||
- 📝 Tests that SupersetConfig correctly normalizes the base URL.
|
||||
- ƒ **test_superset_config_invalid_url** (`Function`)
|
||||
- 📝 Tests that SupersetConfig raises ValueError for invalid URLs.
|
||||
- ƒ **test_environment_model** (`Function`)
|
||||
- 📝 Tests that Environment model correctly stores values.
|
||||
- ƒ **test_belief_scope_logs_entry_action_exit** (`Function`)
|
||||
- 📝 Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs.
|
||||
- ƒ **test_belief_scope_error_handling** (`Function`)
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
# [DEF:superset_tool:Module]
|
||||
# @SEMANTICS: package, root
|
||||
# @PURPOSE: Root package for superset_tool.
|
||||
# @LAYER: Domain
|
||||
# @PUBLIC_API: SupersetClient, SupersetConfig
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from .client import SupersetClient
|
||||
from .models import SupersetConfig
|
||||
# [/SECTION]
|
||||
|
||||
__all__ = ["SupersetClient", "SupersetConfig"]
|
||||
|
||||
# [/DEF:superset_tool:Module]
|
||||
@@ -1,508 +0,0 @@
|
||||
# [DEF:superset_tool.client:Module]
|
||||
#
|
||||
# @SEMANTICS: superset, api, client, rest, http, dashboard, dataset, import, export
|
||||
# @PURPOSE: Предоставляет высокоуровневый клиент для взаимодействия с Superset REST API, инкапсулируя логику запросов, обработку ошибок и пагинацию.
|
||||
# @LAYER: Domain
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.models
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.exceptions
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
#
|
||||
# @INVARIANT: All network operations must use the internal APIClient instance.
|
||||
# @CONSTRAINT: No direct use of 'requests' library outside of APIClient.
|
||||
# @PUBLIC_API: SupersetClient
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import json
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union, cast
|
||||
from requests import Response
|
||||
from superset_tool.models import SupersetConfig
|
||||
from superset_tool.exceptions import ExportError, InvalidZipFormatError
|
||||
from superset_tool.utils.fileio import get_filename_from_headers
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from superset_tool.utils.network import APIClient
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:SupersetClient:Class]
|
||||
# @PURPOSE: Класс-обёртка над Superset REST API, предоставляющий методы для работы с дашбордами и датасетами.
|
||||
# @RELATION: CREATES_INSTANCE_OF -> APIClient
|
||||
# @RELATION: USES -> SupersetConfig
|
||||
class SupersetClient:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Инициализирует клиент, проверяет конфигурацию и создает сетевой клиент.
|
||||
# @PRE: `config` должен быть валидным объектом SupersetConfig.
|
||||
# @POST: Атрибуты `logger`, `config`, и `network` созданы и готовы к работе.
|
||||
# @PARAM: config (SupersetConfig) - Конфигурация подключения.
|
||||
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
|
||||
def __init__(self, config: SupersetConfig, logger: Optional[SupersetLogger] = None):
|
||||
with belief_scope("__init__"):
|
||||
self.logger = logger or SupersetLogger(name="SupersetClient")
|
||||
self.logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient.")
|
||||
self._validate_config(config)
|
||||
self.config = config
|
||||
self.network = APIClient(
|
||||
config=config.dict(),
|
||||
verify_ssl=config.verify_ssl,
|
||||
timeout=config.timeout,
|
||||
logger=self.logger,
|
||||
)
|
||||
self.delete_before_reimport: bool = False
|
||||
self.logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_validate_config:Function]
|
||||
# @PURPOSE: Проверяет, что переданный объект конфигурации имеет корректный тип.
|
||||
# @PRE: `config` должен быть передан.
|
||||
# @POST: Если проверка пройдена, выполнение продолжается.
|
||||
# @THROW: TypeError - Если `config` не является экземпляром `SupersetConfig`.
|
||||
# @PARAM: config (SupersetConfig) - Объект для проверки.
|
||||
def _validate_config(self, config: SupersetConfig) -> None:
|
||||
with belief_scope("_validate_config"):
|
||||
self.logger.debug("[_validate_config][Enter] Validating SupersetConfig.")
|
||||
assert isinstance(config, SupersetConfig), "Конфигурация должна быть экземпляром SupersetConfig"
|
||||
self.logger.debug("[_validate_config][Exit] Config is valid.")
|
||||
# [/DEF:_validate_config:Function]
|
||||
|
||||
@property
|
||||
# [DEF:headers:Function]
|
||||
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
|
||||
# @PRE: self.network должен быть инициализирован.
|
||||
# @POST: Возвращаемый словарь содержит актуальные заголовки, включая токен авторизации.
|
||||
def headers(self) -> dict:
|
||||
with belief_scope("headers"):
|
||||
return self.network.headers
|
||||
# [/DEF:headers:Function]
|
||||
|
||||
# [DEF:get_dashboards:Function]
|
||||
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
|
||||
# @RELATION: CALLS -> self._fetch_total_object_count
|
||||
# @RELATION: CALLS -> self._fetch_all_pages
|
||||
# @PRE: self.network должен быть инициализирован.
|
||||
# @POST: Возвращаемый список содержит все дашборды, доступные по API.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса для API.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список дашбордов).
|
||||
def get_dashboards(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_dashboards"):
|
||||
assert self.network, "[get_dashboards][PRE] Network client must be initialized."
|
||||
self.logger.info("[get_dashboards][Enter] Fetching dashboards.")
|
||||
validated_query = self._validate_query_params(query or {})
|
||||
if 'columns' not in validated_query:
|
||||
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
|
||||
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/dashboard/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
self.logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
|
||||
return total_count, paginated_data
|
||||
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
|
||||
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/dashboard/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
self.logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_dashboards:Function]
|
||||
|
||||
# [DEF:export_dashboard:Function]
|
||||
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
|
||||
# @RELATION: CALLS -> self.network.request
|
||||
# @PRE: dashboard_id должен быть положительным целым числом.
|
||||
# @POST: Возвращает бинарное содержимое ZIP-архива и имя файла.
|
||||
# @THROW: ExportError - Если экспорт завершился неудачей.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда для экспорта.
|
||||
# @RETURN: Tuple[bytes, str] - Бинарное содержимое ZIP-архива и имя файла.
|
||||
def export_dashboard(self, dashboard_id: int) -> Tuple[bytes, str]:
|
||||
with belief_scope("export_dashboard"):
|
||||
assert isinstance(dashboard_id, int) and dashboard_id > 0, "[export_dashboard][PRE] dashboard_id must be a positive integer."
|
||||
self.logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id)
|
||||
response = self.network.request(
|
||||
method="GET",
|
||||
endpoint="/dashboard/export/",
|
||||
params={"q": json.dumps([dashboard_id])},
|
||||
stream=True,
|
||||
raw_response=True,
|
||||
)
|
||||
response = cast(Response, response)
|
||||
self._validate_export_response(response, dashboard_id)
|
||||
filename = self._resolve_export_filename(response, dashboard_id)
|
||||
self.logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
|
||||
return response.content, filename
|
||||
# [/DEF:export_dashboard:Function]
|
||||
|
||||
# [DEF:import_dashboard:Function]
|
||||
# @PURPOSE: Импортирует дашборд из ZIP-файла с возможностью автоматического удаления и повторной попытки при ошибке.
|
||||
# @RELATION: CALLS -> self._do_import
|
||||
# @RELATION: CALLS -> self.delete_dashboard
|
||||
# @RELATION: CALLS -> self.get_dashboards
|
||||
# @PRE: Файл, указанный в `file_name`, должен существовать и быть валидным ZIP-архивом Superset.
|
||||
# @POST: Дашборд успешно импортирован, возвращен ответ API.
|
||||
# @THROW: FileNotFoundError - Если файл не найден.
|
||||
# @THROW: InvalidZipFormatError - Если файл не является валидным ZIP-архивом Superset.
|
||||
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-архиву.
|
||||
# @PARAM: dash_id (Optional[int]) - ID дашборда для удаления при сбое.
|
||||
# @PARAM: dash_slug (Optional[str]) - Slug дашборда для поиска ID, если ID не предоставлен.
|
||||
# @RETURN: Dict - Ответ API в случае успеха.
|
||||
def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict:
|
||||
with belief_scope("import_dashboard"):
|
||||
assert file_name, "[import_dashboard][PRE] file_name must be provided."
|
||||
file_path = str(file_name)
|
||||
self._validate_import_file(file_path)
|
||||
try:
|
||||
return self._do_import(file_path)
|
||||
except Exception as exc:
|
||||
self.logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True)
|
||||
if not self.delete_before_reimport:
|
||||
raise
|
||||
|
||||
target_id = self._resolve_target_id_for_delete(dash_id, dash_slug)
|
||||
if target_id is None:
|
||||
self.logger.error("[import_dashboard][Failure] No ID available for delete-retry.")
|
||||
raise
|
||||
|
||||
self.delete_dashboard(target_id)
|
||||
self.logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
|
||||
return self._do_import(file_path)
|
||||
# [/DEF:import_dashboard:Function]
|
||||
|
||||
# [DEF:_resolve_target_id_for_delete:Function]
|
||||
# @PURPOSE: Определяет ID дашборда для удаления, используя ID или slug.
|
||||
# @PARAM: dash_id (Optional[int]) - ID дашборда.
|
||||
# @PARAM: dash_slug (Optional[str]) - Slug дашборда.
|
||||
# @PRE: По крайней мере один из параметров (dash_id или dash_slug) должен быть предоставлен.
|
||||
# @POST: Возвращает ID дашборда, если найден, иначе None.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса при поиске по slug.
|
||||
# @RETURN: Optional[int] - Найденный ID или None.
|
||||
def _resolve_target_id_for_delete(self, dash_id: Optional[int], dash_slug: Optional[str]) -> Optional[int]:
|
||||
with belief_scope("_resolve_target_id_for_delete"):
|
||||
assert dash_id is not None or dash_slug is not None, "[_resolve_target_id_for_delete][PRE] At least one of ID or slug must be provided."
|
||||
if dash_id is not None:
|
||||
return dash_id
|
||||
if dash_slug is not None:
|
||||
self.logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
|
||||
try:
|
||||
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
|
||||
if candidates:
|
||||
target_id = candidates[0]["id"]
|
||||
self.logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id)
|
||||
return target_id
|
||||
except Exception as e:
|
||||
self.logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
|
||||
return None
|
||||
self.logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
|
||||
try:
|
||||
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
|
||||
if candidates:
|
||||
target_id = candidates[0]["id"]
|
||||
self.logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id)
|
||||
return target_id
|
||||
except Exception as e:
|
||||
self.logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
|
||||
return None
|
||||
# [/DEF:_resolve_target_id_for_delete:Function]
|
||||
|
||||
# [DEF:_do_import:Function]
|
||||
# @PURPOSE: Выполняет один запрос на импорт без обработки исключений.
|
||||
# @PRE: Файл должен существовать.
|
||||
# @POST: Файл успешно загружен, возвращен ответ API.
|
||||
# @THROW: FileNotFoundError - Если файл не существует.
|
||||
# @PARAM: file_name (Union[str, Path]) - Путь к файлу.
|
||||
# @RETURN: Dict - Ответ API.
|
||||
def _do_import(self, file_name: Union[str, Path]) -> Dict:
|
||||
with belief_scope("_do_import"):
|
||||
self.logger.debug(f"[_do_import][State] Uploading file: {file_name}")
|
||||
file_path = Path(file_name)
|
||||
if file_path.exists():
|
||||
self.logger.debug(f"[_do_import][State] File size: {file_path.stat().st_size} bytes")
|
||||
else:
|
||||
self.logger.error(f"[_do_import][Failure] File does not exist: {file_name}")
|
||||
raise FileNotFoundError(f"File does not exist: {file_name}")
|
||||
return self.network.upload_file(
|
||||
endpoint="/dashboard/import/",
|
||||
file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"},
|
||||
extra_data={"overwrite": "true"},
|
||||
timeout=self.config.timeout * 2,
|
||||
)
|
||||
# [/DEF:_do_import:Function]
|
||||
|
||||
# [DEF:delete_dashboard:Function]
|
||||
# @PURPOSE: Удаляет дашборд по его ID или slug.
|
||||
# @RELATION: CALLS -> self.network.request
|
||||
# @PRE: dashboard_id должен быть предоставлен.
|
||||
# @POST: Дашборд удален или залогировано предупреждение.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @PARAM: dashboard_id (Union[int, str]) - ID или slug дашборда.
|
||||
def delete_dashboard(self, dashboard_id: Union[int, str]) -> None:
|
||||
with belief_scope("delete_dashboard"):
|
||||
assert dashboard_id, "[delete_dashboard][PRE] dashboard_id must be provided."
|
||||
self.logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id)
|
||||
response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}")
|
||||
response = cast(Dict, response)
|
||||
if response.get("result", True) is not False:
|
||||
self.logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
|
||||
else:
|
||||
self.logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
|
||||
# [/DEF:delete_dashboard:Function]
|
||||
|
||||
# [DEF:_extract_dashboard_id_from_zip:Function]
|
||||
# @PURPOSE: Извлекает ID дашборда из `metadata.yaml` внутри ZIP-архива.
|
||||
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-файлу.
|
||||
# @PRE: Файл, указанный в `file_name`, должен быть валидным ZIP-архивом.
|
||||
# @POST: Возвращает ID дашборда, если найден в metadata.yaml, иначе None.
|
||||
# @THROW: ImportError - Если не установлен `yaml`.
|
||||
# @RETURN: Optional[int] - ID дашборда или None.
|
||||
def _extract_dashboard_id_from_zip(self, file_name: Union[str, Path]) -> Optional[int]:
|
||||
with belief_scope("_extract_dashboard_id_from_zip"):
|
||||
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_id_from_zip][PRE] file_name must be a valid zip file."
|
||||
try:
|
||||
import yaml
|
||||
with zipfile.ZipFile(file_name, "r") as zf:
|
||||
for name in zf.namelist():
|
||||
if name.endswith("metadata.yaml"):
|
||||
with zf.open(name) as meta_file:
|
||||
meta = yaml.safe_load(meta_file)
|
||||
dash_id = meta.get("dashboard_uuid") or meta.get("dashboard_id")
|
||||
if dash_id: return int(dash_id)
|
||||
except Exception as exc:
|
||||
self.logger.error("[_extract_dashboard_id_from_zip][Failure] %s", exc, exc_info=True)
|
||||
return None
|
||||
# [/DEF:_extract_dashboard_id_from_zip:Function]
|
||||
|
||||
# [DEF:_extract_dashboard_slug_from_zip:Function]
|
||||
# @PURPOSE: Извлекает slug дашборда из `metadata.yaml` внутри ZIP-архива.
|
||||
# @PARAM: file_name (Union[str, Path]) - Путь к ZIP-файлу.
|
||||
# @PRE: Файл, указанный в `file_name`, должен быть валидным ZIP-архивом.
|
||||
# @POST: Возвращает slug дашборда, если найден в metadata.yaml, иначе None.
|
||||
# @THROW: ImportError - Если не установлен `yaml`.
|
||||
# @RETURN: Optional[str] - Slug дашборда или None.
|
||||
def _extract_dashboard_slug_from_zip(self, file_name: Union[str, Path]) -> Optional[str]:
|
||||
with belief_scope("_extract_dashboard_slug_from_zip"):
|
||||
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_slug_from_zip][PRE] file_name must be a valid zip file."
|
||||
try:
|
||||
import yaml
|
||||
with zipfile.ZipFile(file_name, "r") as zf:
|
||||
for name in zf.namelist():
|
||||
if name.endswith("metadata.yaml"):
|
||||
with zf.open(name) as meta_file:
|
||||
meta = yaml.safe_load(meta_file)
|
||||
if slug := meta.get("slug"):
|
||||
return str(slug)
|
||||
except Exception as exc:
|
||||
self.logger.error("[_extract_dashboard_slug_from_zip][Failure] %s", exc, exc_info=True)
|
||||
return None
|
||||
# [/DEF:_extract_dashboard_slug_from_zip:Function]
|
||||
|
||||
# [DEF:_validate_export_response:Function]
|
||||
# @PURPOSE: Проверяет, что HTTP-ответ на экспорт является валидным ZIP-архивом.
|
||||
# @PRE: response должен быть объектом requests.Response.
|
||||
# @POST: Проверка пройдена, если ответ является непустым ZIP-архивом.
|
||||
# @THROW: ExportError - Если ответ не является ZIP-архивом или пуст.
|
||||
# @PARAM: response (Response) - HTTP ответ.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда.
|
||||
def _validate_export_response(self, response: Response, dashboard_id: int) -> None:
|
||||
with belief_scope("_validate_export_response"):
|
||||
assert isinstance(response, Response), "[_validate_export_response][PRE] response must be a requests.Response object."
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if "application/zip" not in content_type:
|
||||
raise ExportError(f"Получен не ZIP-архив (Content-Type: {content_type})")
|
||||
if not response.content:
|
||||
raise ExportError("Получены пустые данные при экспорте")
|
||||
# [/DEF:_validate_export_response:Function]
|
||||
|
||||
# [DEF:_resolve_export_filename:Function]
|
||||
# @PURPOSE: Определяет имя файла для экспорта из заголовков или генерирует его.
|
||||
# @PRE: response должен быть объектом requests.Response.
|
||||
# @POST: Возвращает непустое имя файла.
|
||||
# @PARAM: response (Response) - HTTP ответ.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда.
|
||||
# @RETURN: str - Имя файла.
|
||||
def _resolve_export_filename(self, response: Response, dashboard_id: int) -> str:
|
||||
with belief_scope("_resolve_export_filename"):
|
||||
assert isinstance(response, Response), "[_resolve_export_filename][PRE] response must be a requests.Response object."
|
||||
filename = get_filename_from_headers(dict(response.headers))
|
||||
if not filename:
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
|
||||
self.logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
|
||||
return filename
|
||||
# [/DEF:_resolve_export_filename:Function]
|
||||
|
||||
# [DEF:_validate_query_params:Function]
|
||||
# @PURPOSE: Формирует корректный набор параметров запроса с пагинацией.
|
||||
# @PARAM: query (Optional[Dict]) - Исходные параметры.
|
||||
# @PRE: query, если предоставлен, должен быть словарем.
|
||||
# @POST: Возвращает словарь, содержащий базовые параметры пагинации, объединенные с `query`.
|
||||
# @RETURN: Dict - Валидированные параметры.
|
||||
def _validate_query_params(self, query: Optional[Dict]) -> Dict:
|
||||
with belief_scope("_validate_query_params"):
|
||||
assert query is None or isinstance(query, dict), "[_validate_query_params][PRE] query must be a dictionary or None."
|
||||
base_query = {"page": 0, "page_size": 1000}
|
||||
return {**base_query, **(query or {})}
|
||||
# [/DEF:_validate_query_params:Function]
|
||||
|
||||
# [DEF:_fetch_total_object_count:Function]
|
||||
# @PURPOSE: Получает общее количество объектов по указанному эндпоинту для пагинации.
|
||||
# @PARAM: endpoint (str) - API эндпоинт.
|
||||
# @PRE: endpoint должен быть непустой строкой.
|
||||
# @POST: Возвращает общее количество объектов (>= 0).
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @RETURN: int - Количество объектов.
|
||||
def _fetch_total_object_count(self, endpoint: str) -> int:
|
||||
with belief_scope("_fetch_total_object_count"):
|
||||
assert endpoint and isinstance(endpoint, str), "[_fetch_total_object_count][PRE] endpoint must be a non-empty string."
|
||||
return self.network.fetch_paginated_count(
|
||||
endpoint=endpoint,
|
||||
query_params={"page": 0, "page_size": 1},
|
||||
count_field="count",
|
||||
)
|
||||
# [/DEF:_fetch_total_object_count:Function]
|
||||
|
||||
# [DEF:_fetch_all_pages:Function]
|
||||
# @PURPOSE: Итерируется по всем страницам пагинированного API и собирает все данные.
|
||||
# @PARAM: endpoint (str) - API эндпоинт.
|
||||
# @PARAM: pagination_options (Dict) - Опции пагинации.
|
||||
# @PRE: endpoint должен быть непустой строкой, pagination_options - словарем.
|
||||
# @POST: Возвращает полный список объектов.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @RETURN: List[Dict] - Список всех объектов.
|
||||
def _fetch_all_pages(self, endpoint: str, pagination_options: Dict) -> List[Dict]:
|
||||
with belief_scope("_fetch_all_pages"):
|
||||
assert endpoint and isinstance(endpoint, str), "[_fetch_all_pages][PRE] endpoint must be a non-empty string."
|
||||
assert isinstance(pagination_options, dict), "[_fetch_all_pages][PRE] pagination_options must be a dictionary."
|
||||
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
|
||||
# [/DEF:_fetch_all_pages:Function]
|
||||
|
||||
# [DEF:_validate_import_file:Function]
|
||||
# @PURPOSE: Проверяет, что файл существует, является ZIP-архивом и содержит `metadata.yaml`.
|
||||
# @PRE: zip_path должен быть предоставлен.
|
||||
# @POST: Проверка пройдена, если файл существует, является ZIP и содержит `metadata.yaml`.
|
||||
# @THROW: FileNotFoundError - Если файл не найден.
|
||||
# @THROW: InvalidZipFormatError - Если файл не является ZIP или не содержит `metadata.yaml`.
|
||||
# @PARAM: zip_path (Union[str, Path]) - Путь к файлу.
|
||||
def _validate_import_file(self, zip_path: Union[str, Path]) -> None:
|
||||
with belief_scope("_validate_import_file"):
|
||||
assert zip_path, "[_validate_import_file][PRE] zip_path must be provided."
|
||||
path = Path(zip_path)
|
||||
assert path.exists(), f"Файл {zip_path} не существует"
|
||||
assert zipfile.is_zipfile(path), f"Файл {zip_path} не является ZIP-архивом"
|
||||
with zipfile.ZipFile(path, "r") as zf:
|
||||
assert any(n.endswith("metadata.yaml") for n in zf.namelist()), f"Архив {zip_path} не содержит 'metadata.yaml'"
|
||||
# [/DEF:_validate_import_file:Function]
|
||||
|
||||
# [DEF:get_datasets:Function]
|
||||
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
|
||||
# @RELATION: CALLS -> self._fetch_total_object_count
|
||||
# @RELATION: CALLS -> self._fetch_all_pages
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
|
||||
# @PRE: self.network должен быть инициализирован.
|
||||
# @POST: Возвращаемый список содержит все датасеты, доступные по API.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список датасетов).
|
||||
def get_datasets(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_datasets"):
|
||||
assert self.network, "[get_datasets][PRE] Network client must be initialized."
|
||||
self.logger.info("[get_datasets][Enter] Fetching datasets.")
|
||||
validated_query = self._validate_query_params(query)
|
||||
|
||||
total_count = self._fetch_total_object_count(endpoint="/dataset/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/dataset/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
self.logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_datasets:Function]
|
||||
|
||||
# [DEF:get_databases:Function]
|
||||
# @PURPOSE: Получает полный список баз данных, автоматически обрабатывая пагинацию.
|
||||
# @RELATION: CALLS -> self._fetch_total_object_count
|
||||
# @RELATION: CALLS -> self._fetch_all_pages
|
||||
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса.
|
||||
# @PRE: self.network должен быть инициализирован.
|
||||
# @POST: Возвращаемый список содержит все базы данных, доступные по API.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список баз данных).
|
||||
def get_databases(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
|
||||
with belief_scope("get_databases"):
|
||||
assert self.network, "[get_databases][PRE] Network client must be initialized."
|
||||
self.logger.info("[get_databases][Enter] Fetching databases.")
|
||||
validated_query = self._validate_query_params(query or {})
|
||||
if 'columns' not in validated_query:
|
||||
validated_query['columns'] = []
|
||||
total_count = self._fetch_total_object_count(endpoint="/database/")
|
||||
paginated_data = self._fetch_all_pages(
|
||||
endpoint="/database/",
|
||||
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
|
||||
)
|
||||
self.logger.info("[get_databases][Exit] Found %d databases.", total_count)
|
||||
return total_count, paginated_data
|
||||
# [/DEF:get_databases:Function]
|
||||
|
||||
# [DEF:get_dataset:Function]
|
||||
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
|
||||
# @RELATION: CALLS -> self.network.request
|
||||
# @PARAM: dataset_id (int) - ID датасета.
|
||||
# @PRE: dataset_id должен быть положительным целым числом.
|
||||
# @POST: Возвращает словарь с информацией о датасете.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса или если датасет не найден.
|
||||
# @RETURN: Dict - Информация о датасете.
|
||||
def get_dataset(self, dataset_id: int) -> Dict:
|
||||
with belief_scope("get_dataset"):
|
||||
assert isinstance(dataset_id, int) and dataset_id > 0, "[get_dataset][PRE] dataset_id must be a positive integer."
|
||||
self.logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id)
|
||||
response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
|
||||
response = cast(Dict, response)
|
||||
self.logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
|
||||
return response
|
||||
# [/DEF:get_dataset:Function]
|
||||
|
||||
# [DEF:get_database:Function]
|
||||
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
|
||||
# @RELATION: CALLS -> self.network.request
|
||||
# @PARAM: database_id (int) - ID базы данных.
|
||||
# @PRE: database_id должен быть положительным целым числом.
|
||||
# @POST: Возвращает словарь с информацией о базе данных.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса или если база данных не найдена.
|
||||
# @RETURN: Dict - Информация о базе данных.
|
||||
def get_database(self, database_id: int) -> Dict:
|
||||
with belief_scope("get_database"):
|
||||
assert isinstance(database_id, int) and database_id > 0, "[get_database][PRE] database_id must be a positive integer."
|
||||
self.logger.info("[get_database][Enter] Fetching database %s.", database_id)
|
||||
response = self.network.request(method="GET", endpoint=f"/database/{database_id}")
|
||||
response = cast(Dict, response)
|
||||
self.logger.info("[get_database][Exit] Got database %s.", database_id)
|
||||
return response
|
||||
# [/DEF:get_database:Function]
|
||||
|
||||
# [DEF:update_dataset:Function]
|
||||
# @PURPOSE: Обновляет данные датасета по его ID.
|
||||
# @RELATION: CALLS -> self.network.request
|
||||
# @PARAM: dataset_id (int) - ID датасета.
|
||||
# @PARAM: data (Dict) - Данные для обновления.
|
||||
# @PRE: dataset_id должен быть положительным целым числом, data - непустым словарем.
|
||||
# @POST: Датасет успешно обновлен, возвращен ответ API.
|
||||
# @THROW: APIError - В случае ошибки сетевого запроса.
|
||||
# @RETURN: Dict - Ответ API.
|
||||
def update_dataset(self, dataset_id: int, data: Dict) -> Dict:
|
||||
with belief_scope("update_dataset"):
|
||||
assert isinstance(dataset_id, int) and dataset_id > 0, "[update_dataset][PRE] dataset_id must be a positive integer."
|
||||
assert isinstance(data, dict) and data, "[update_dataset][PRE] data must be a non-empty dictionary."
|
||||
self.logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id)
|
||||
response = self.network.request(
|
||||
method="PUT",
|
||||
endpoint=f"/dataset/{dataset_id}",
|
||||
data=json.dumps(data),
|
||||
headers={'Content-Type': 'application/json'}
|
||||
)
|
||||
response = cast(Dict, response)
|
||||
self.logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
|
||||
return response
|
||||
# [/DEF:update_dataset:Function]
|
||||
|
||||
# [/DEF:SupersetClient:Class]
|
||||
|
||||
# [/DEF:superset_tool.client:Module]
|
||||
@@ -1,173 +0,0 @@
|
||||
# [DEF:superset_tool.exceptions:Module]
|
||||
# @PURPOSE: Определяет иерархию пользовательских исключений для всего инструмента, обеспечивая единую точку обработки ошибок.
|
||||
# @SEMANTICS: exception, error, hierarchy
|
||||
# @LAYER: Infra
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any, Union
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:SupersetToolError:Class]
|
||||
# @PURPOSE: Базовый класс для всех ошибок, генерируемых инструментом.
|
||||
# @RELATION: INHERITS_FROM -> Exception
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Optional[Dict[str, Any]]) - Дополнительный контекст ошибки.
|
||||
class SupersetToolError(Exception):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the base tool error.
|
||||
# @PRE: message is a string, context is optional dict.
|
||||
# @POST: Error is initialized with combined message and context.
|
||||
def __init__(self, message: str, context: Optional[Dict[str, Any]] = None):
|
||||
self.context = context or {}
|
||||
super().__init__(f"{message} | Context: {self.context}")
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:SupersetToolError:Class]
|
||||
|
||||
# [DEF:AuthenticationError:Class]
|
||||
# @PURPOSE: Ошибки, связанные с аутентификацией или авторизацией.
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class AuthenticationError(SupersetToolError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes an authentication error.
|
||||
# @PRE: Optional message and context.
|
||||
# @POST: Error is initialized with authentication context.
|
||||
def __init__(self, message: str = "Authentication failed", **context: Any):
|
||||
super().__init__(f"[AUTH_FAILURE] {message}", context={"type": "authentication", **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:AuthenticationError:Class]
|
||||
|
||||
# [DEF:PermissionDeniedError:Class]
|
||||
# @PURPOSE: Ошибка, возникающая при отказе в доступе к ресурсу.
|
||||
# @RELATION: INHERITS_FROM -> AuthenticationError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: required_permission (Optional[str]) - Требуемое разрешение.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class PermissionDeniedError(AuthenticationError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes a permission denied error.
|
||||
# @PRE: Optional message, permission string, and context.
|
||||
# @POST: Error is initialized with permission details.
|
||||
def __init__(self, message: str = "Permission denied", required_permission: Optional[str] = None, **context: Any):
|
||||
full_message = f"Permission denied: {required_permission}" if required_permission else message
|
||||
super().__init__(full_message, context={"required_permission": required_permission, **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:PermissionDeniedError:Class]
|
||||
|
||||
# [DEF:SupersetAPIError:Class]
|
||||
# @PURPOSE: Общие ошибки при взаимодействии с Superset API.
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class SupersetAPIError(SupersetToolError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes a Superset API error.
|
||||
# @PRE: Optional message and context.
|
||||
# @POST: Error is initialized with API failure context.
|
||||
def __init__(self, message: str = "Superset API error", **context: Any):
|
||||
super().__init__(f"[API_FAILURE] {message}", context={"type": "api_call", **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:SupersetAPIError:Class]
|
||||
|
||||
# [DEF:ExportError:Class]
|
||||
# @PURPOSE: Ошибки, специфичные для операций экспорта.
|
||||
# @RELATION: INHERITS_FROM -> SupersetAPIError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class ExportError(SupersetAPIError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes an export error.
|
||||
# @PRE: Optional message and context.
|
||||
# @POST: Error is initialized with export failure subtype.
|
||||
def __init__(self, message: str = "Dashboard export failed", **context: Any):
|
||||
super().__init__(f"[EXPORT_FAILURE] {message}", context={"subtype": "export", **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:ExportError:Class]
|
||||
|
||||
# [DEF:DashboardNotFoundError:Class]
|
||||
# @PURPOSE: Ошибка, когда запрошенный дашборд или ресурс не найден (404).
|
||||
# @RELATION: INHERITS_FROM -> SupersetAPIError
|
||||
# @PARAM: dashboard_id_or_slug (Union[int, str]) - ID или slug дашборда.
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class DashboardNotFoundError(SupersetAPIError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes a dashboard not found error.
|
||||
# @PRE: dashboard_id_or_slug is provided.
|
||||
# @POST: Error is initialized with resource identification.
|
||||
def __init__(self, dashboard_id_or_slug: Union[int, str], message: str = "Dashboard not found", **context: Any):
|
||||
super().__init__(f"[NOT_FOUND] Dashboard '{dashboard_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dashboard_id_or_slug, **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:DashboardNotFoundError:Class]
|
||||
|
||||
# [DEF:DatasetNotFoundError:Class]
|
||||
# @PURPOSE: Ошибка, когда запрашиваемый набор данных не существует (404).
|
||||
# @RELATION: INHERITS_FROM -> SupersetAPIError
|
||||
# @PARAM: dataset_id_or_slug (Union[int, str]) - ID или slug набора данных.
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class DatasetNotFoundError(SupersetAPIError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes a dataset not found error.
|
||||
# @PRE: dataset_id_or_slug is provided.
|
||||
# @POST: Error is initialized with resource identification.
|
||||
def __init__(self, dataset_id_or_slug: Union[int, str], message: str = "Dataset not found", **context: Any):
|
||||
super().__init__(f"[NOT_FOUND] Dataset '{dataset_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dataset_id_or_slug, **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:DatasetNotFoundError:Class]
|
||||
|
||||
# [DEF:InvalidZipFormatError:Class]
|
||||
# @PURPOSE: Ошибка, указывающая на некорректный формат или содержимое ZIP-архива.
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: file_path (Optional[Union[str, Path]]) - Путь к файлу.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class InvalidZipFormatError(SupersetToolError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes an invalid ZIP format error.
|
||||
# @PRE: Optional message, file path, and context.
|
||||
# @POST: Error is initialized with file validation context.
|
||||
def __init__(self, message: str = "Invalid ZIP format or content", file_path: Optional[Union[str, Path]] = None, **context: Any):
|
||||
super().__init__(f"[FILE_ERROR] {message}", context={"type": "file_validation", "file_path": str(file_path) if file_path else "N/A", **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:InvalidZipFormatError:Class]
|
||||
|
||||
# [DEF:NetworkError:Class]
|
||||
# @PURPOSE: Ошибки, связанные с сетевым соединением.
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
# @PARAM: message (str) - Сообщение об ошибке.
|
||||
# @PARAM: context (Any) - Дополнительный контекст ошибки.
|
||||
class NetworkError(SupersetToolError):
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes a network error.
|
||||
# @PRE: Optional message and context.
|
||||
# @POST: Error is initialized with network failure context.
|
||||
def __init__(self, message: str = "Network connection failed", **context: Any):
|
||||
super().__init__(f"[NETWORK_FAILURE] {message}", context={"type": "network", **context})
|
||||
# [/DEF:__init__:Function]
|
||||
# [/DEF:NetworkError:Class]
|
||||
|
||||
# [DEF:FileOperationError:Class]
|
||||
# @PURPOSE: Общие ошибки файловых операций (I/O).
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
class FileOperationError(SupersetToolError):
|
||||
pass
|
||||
# [/DEF:FileOperationError:Class]
|
||||
|
||||
# [DEF:InvalidFileStructureError:Class]
|
||||
# @PURPOSE: Ошибка, указывающая на некорректную структуру файлов или директорий.
|
||||
# @RELATION: INHERITS_FROM -> FileOperationError
|
||||
class InvalidFileStructureError(FileOperationError):
|
||||
pass
|
||||
# [/DEF:InvalidFileStructureError:Class]
|
||||
|
||||
# [DEF:ConfigurationError:Class]
|
||||
# @PURPOSE: Ошибки, связанные с неверной конфигурацией инструмента.
|
||||
# @RELATION: INHERITS_FROM -> SupersetToolError
|
||||
class ConfigurationError(SupersetToolError):
|
||||
pass
|
||||
# [/DEF:ConfigurationError:Class]
|
||||
|
||||
# [/DEF:superset_tool.exceptions:Module]
|
||||
@@ -1,87 +0,0 @@
|
||||
# [DEF:superset_tool.models:Module]
|
||||
#
|
||||
# @SEMANTICS: pydantic, model, config, validation, data-structure
|
||||
# @PURPOSE: Определяет Pydantic-модели для конфигурации инструмента, обеспечивая валидацию данных.
|
||||
# @LAYER: Infra
|
||||
# @RELATION: DEPENDS_ON -> pydantic
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils.logger
|
||||
# @PUBLIC_API: SupersetConfig, DatabaseConfig
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import re
|
||||
from typing import Optional, Dict, Any
|
||||
from pydantic import BaseModel, validator, Field
|
||||
from .utils.logger import SupersetLogger
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:SupersetConfig:Class]
|
||||
# @PURPOSE: Модель конфигурации для подключения к одному экземпляру Superset API.
|
||||
# @RELATION: INHERITS_FROM -> pydantic.BaseModel
|
||||
class SupersetConfig(BaseModel):
|
||||
env: str = Field(..., description="Название окружения (например, dev, prod).")
|
||||
base_url: str = Field(..., description="Базовый URL Superset API, включая /api/v1.")
|
||||
auth: Dict[str, Any] = Field(..., description="Словарь с данными для аутентификации (provider, username, password, refresh).")
|
||||
verify_ssl: bool = Field(True, description="Флаг для проверки SSL-сертификатов.")
|
||||
timeout: int = Field(30, description="Таймаут в секундах для HTTP-запросов.")
|
||||
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
|
||||
|
||||
# [DEF:validate_auth:Function]
|
||||
# @PURPOSE: Проверяет, что словарь `auth` содержит все необходимые для аутентификации поля.
|
||||
# @PRE: `v` должен быть словарем.
|
||||
# @POST: Возвращает `v`, если все обязательные поля (`provider`, `username`, `password`, `refresh`) присутствуют.
|
||||
# @THROW: ValueError - Если отсутствуют обязательные поля.
|
||||
# @PARAM: v (Dict[str, str]) - Значение поля auth.
|
||||
@validator('auth')
|
||||
def validate_auth(cls, v: Dict[str, Any]) -> Dict[str, Any]:
|
||||
required = {'provider', 'username', 'password', 'refresh'}
|
||||
if not required.issubset(v.keys()):
|
||||
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")
|
||||
return v
|
||||
# [/DEF:validate_auth:Function]
|
||||
|
||||
# [DEF:normalize_base_url:Function]
|
||||
# @PURPOSE: Нормализует `base_url`, добавляя `/api/v1`, если он отсутствует.
|
||||
# @PRE: `v` должна быть строкой.
|
||||
# @POST: Возвращает нормализованный `v`.
|
||||
# @THROW: ValueError - Если формат URL невалиден.
|
||||
# @PARAM: v (str) - Значение поля base_url.
|
||||
@validator('base_url')
|
||||
def normalize_base_url(cls, v: str) -> str:
|
||||
v = v.strip()
|
||||
if not v.startswith(('http://', 'https://')):
|
||||
raise ValueError(f"Invalid URL scheme: {v}. Must start with http:// or https://")
|
||||
|
||||
if '/api/v1' not in v:
|
||||
v = f"{v.rstrip('/')}/api/v1"
|
||||
return v
|
||||
# [/DEF:normalize_base_url:Function]
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
# [/DEF:SupersetConfig:Class]
|
||||
|
||||
# [DEF:DatabaseConfig:Class]
|
||||
# @PURPOSE: Модель для параметров трансформации баз данных при миграции дашбордов.
|
||||
# @RELATION: INHERITS_FROM -> pydantic.BaseModel
|
||||
class DatabaseConfig(BaseModel):
|
||||
database_config: Dict[str, Dict[str, Any]] = Field(..., description="Словарь, содержащий 'old' и 'new' конфигурации базы данных.")
|
||||
logger: Optional[SupersetLogger] = Field(None, description="Экземпляр логгера для логирования.")
|
||||
|
||||
# [DEF:validate_config:Function]
|
||||
# @PURPOSE: Проверяет, что словарь `database_config` содержит ключи 'old' и 'new'.
|
||||
# @PRE: `v` должен быть словарем.
|
||||
# @POST: Возвращает `v`, если ключи 'old' и 'new' присутствуют.
|
||||
# @THROW: ValueError - Если отсутствуют обязательные ключи.
|
||||
# @PARAM: v (Dict[str, Dict[str, Any]]) - Значение поля database_config.
|
||||
@validator('database_config')
|
||||
def validate_config(cls, v: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
|
||||
if not {'old', 'new'}.issubset(v.keys()):
|
||||
raise ValueError("'database_config' должен содержать ключи 'old' и 'new'.")
|
||||
return v
|
||||
# [/DEF:validate_config:Function]
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
# [/DEF:DatabaseConfig:Class]
|
||||
|
||||
# [/DEF:superset_tool.models:Module]
|
||||
@@ -1,5 +0,0 @@
|
||||
# [DEF:superset_tool.utils:Module]
|
||||
# @SEMANTICS: package, utils
|
||||
# @PURPOSE: Utility package for superset_tool.
|
||||
# @LAYER: Infra
|
||||
# [/DEF:superset_tool.utils:Module]
|
||||
@@ -1,110 +0,0 @@
|
||||
# [DEF:superset_tool.utils.init_clients:Module]
|
||||
#
|
||||
# @SEMANTICS: utility, factory, client, initialization, configuration
|
||||
# @PURPOSE: Централизованно инициализирует клиенты Superset для различных окружений (DEV, PROD, SBX, PREPROD), используя `keyring` для безопасного доступа к паролям.
|
||||
# @LAYER: Infra
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.models
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> keyring
|
||||
# @PUBLIC_API: setup_clients
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import keyring
|
||||
import os
|
||||
from typing import Dict, List, Optional, Any
|
||||
from superset_tool.models import SupersetConfig
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:setup_clients:Function]
|
||||
# @PURPOSE: Инициализирует и возвращает словарь клиентов `SupersetClient`.
|
||||
# @PRE: `logger` должен быть валидным экземпляром `SupersetLogger`.
|
||||
# @POST: Возвращает словарь с инициализированными клиентами.
|
||||
# @THROW: Exception - При любых других ошибках инициализации.
|
||||
# @RELATION: CREATES_INSTANCE_OF -> SupersetConfig
|
||||
# @RELATION: CREATES_INSTANCE_OF -> SupersetClient
|
||||
# @PARAM: logger (SupersetLogger) - Экземпляр логгера для записи процесса.
|
||||
# @PARAM: custom_envs (List[Dict[str, Any]]) - Список пользовательских настроек окружений.
|
||||
# @RETURN: Dict[str, SupersetClient] - Словарь, где ключ - имя окружения, значение - `SupersetClient`.
|
||||
def setup_clients(logger: SupersetLogger, custom_envs: Optional[List[Any]] = None) -> Dict[str, SupersetClient]:
|
||||
logger.info("[setup_clients][Enter] Starting Superset clients initialization.")
|
||||
clients = {}
|
||||
|
||||
try:
|
||||
# Try to load from ConfigManager if available
|
||||
try:
|
||||
from backend.src.dependencies import get_config_manager
|
||||
config_manager = get_config_manager()
|
||||
envs = config_manager.get_environments()
|
||||
if envs:
|
||||
logger.info("[setup_clients][Action] Loading environments from ConfigManager")
|
||||
for env in envs:
|
||||
logger.debug("[setup_clients][State] Creating config for environment: %s", env.name)
|
||||
config = SupersetConfig(
|
||||
env=env.name,
|
||||
base_url=env.url,
|
||||
auth={"provider": "db", "username": env.username, "password": env.password, "refresh": "true"},
|
||||
verify_ssl=False,
|
||||
timeout=30,
|
||||
logger=logger
|
||||
)
|
||||
clients[env.name] = SupersetClient(config, logger)
|
||||
return clients
|
||||
except (ImportError, Exception) as e:
|
||||
logger.debug(f"[setup_clients][State] ConfigManager not available or failed: {e}")
|
||||
|
||||
if custom_envs:
|
||||
for env in custom_envs:
|
||||
# Handle both dict and object (like Pydantic model)
|
||||
env_name = str(getattr(env, 'name', env.get('name') if isinstance(env, dict) else "unknown"))
|
||||
base_url = str(getattr(env, 'url', env.get('url') if isinstance(env, dict) else ""))
|
||||
username = str(getattr(env, 'username', env.get('username') if isinstance(env, dict) else ""))
|
||||
password = str(getattr(env, 'password', env.get('password') if isinstance(env, dict) else ""))
|
||||
|
||||
logger.debug("[setup_clients][State] Creating config for custom environment: %s", env_name)
|
||||
config = SupersetConfig(
|
||||
env=env_name,
|
||||
base_url=base_url,
|
||||
auth={"provider": "db", "username": username, "password": password, "refresh": "true"},
|
||||
verify_ssl=False,
|
||||
timeout=30,
|
||||
logger=logger
|
||||
)
|
||||
clients[env_name] = SupersetClient(config, logger)
|
||||
else:
|
||||
# Fallback to hardcoded environments with keyring
|
||||
environments = {
|
||||
"dev": "https://devta.bi.dwh.rusal.com/api/v1",
|
||||
"prod": "https://prodta.bi.dwh.rusal.com/api/v1",
|
||||
"sbx": "https://sandboxta.bi.dwh.rusal.com/api/v1",
|
||||
"preprod": "https://preprodta.bi.dwh.rusal.com/api/v1",
|
||||
"uatta": "https://uatta.bi.dwh.rusal.com/api/v1",
|
||||
"dev5":"https://dev.bi.dwh.rusal.com/api/v1"
|
||||
}
|
||||
for env_name, base_url in environments.items():
|
||||
logger.debug("[setup_clients][State] Creating config for environment: %s", env_name.upper())
|
||||
password = keyring.get_password("system", f"{env_name} migrate")
|
||||
if not password:
|
||||
logger.warning(f"Пароль для '{env_name} migrate' не найден в keyring. Пропускаем.")
|
||||
continue
|
||||
|
||||
config = SupersetConfig(
|
||||
env=env_name,
|
||||
base_url=base_url,
|
||||
auth={"provider": "db", "username": "migrate_user", "password": password, "refresh": "true"},
|
||||
verify_ssl=False,
|
||||
timeout=30,
|
||||
logger=logger
|
||||
)
|
||||
clients[env_name] = SupersetClient(config, logger)
|
||||
|
||||
logger.info("[setup_clients][Exit] All clients (%s) initialized successfully.", ', '.join(clients.keys()))
|
||||
return clients
|
||||
|
||||
except Exception as e:
|
||||
logger.critical("[setup_clients][Failure] Critical error during client initialization: %s", e, exc_info=True)
|
||||
raise
|
||||
# [/DEF:setup_clients:Function]
|
||||
|
||||
# [/DEF:superset_tool.utils.init_clients:Module]
|
||||
@@ -1,156 +0,0 @@
|
||||
# [DEF:superset_tool.utils.logger:Module]
|
||||
#
|
||||
# @SEMANTICS: logging, utility, infrastructure, wrapper
|
||||
# @PURPOSE: Предоставляет универсальную обёртку над стандартным `logging.Logger` для унифицированного создания и управления логгерами с выводом в консоль и/или файл.
|
||||
# @LAYER: Infra
|
||||
# @RELATION: WRAPS -> logging.Logger
|
||||
#
|
||||
# @INVARIANT: Логгер всегда должен иметь имя.
|
||||
# @PUBLIC_API: SupersetLogger
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Any, Mapping, Generator
|
||||
from contextlib import contextmanager
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:belief_scope:Function]
|
||||
# @PURPOSE: Context manager for belief state logging to maintain execution coherence.
|
||||
# @PRE: scope_id must be a string.
|
||||
# @POST: Entry and exit actions are logged.
|
||||
# @PARAM: scope_id (str) - Identifier for the logical scope.
|
||||
@contextmanager
|
||||
def belief_scope(scope_id: str) -> Generator[None, None, None]:
|
||||
"""Context manager for belief state logging."""
|
||||
logger = logging.getLogger("superset_tool")
|
||||
logger.debug(f"[BELIEF_ENTRY] {scope_id}")
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
logger.debug(f"[BELIEF_EXIT] {scope_id}")
|
||||
# [/DEF:belief_scope:Function]
|
||||
|
||||
# [DEF:SupersetLogger:Class]
|
||||
# @PURPOSE: Обёртка над `logging.Logger`, которая упрощает конфигурацию и использование логгеров.
|
||||
# @RELATION: WRAPS -> logging.Logger
|
||||
class SupersetLogger:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Конфигурирует и инициализирует логгер, добавляя обработчики для файла и/или консоли.
|
||||
# @PRE: Если log_dir указан, путь должен быть валидным (или создаваемым).
|
||||
# @POST: `self.logger` готов к использованию с настроенными обработчиками.
|
||||
# @PARAM: name (str) - Идентификатор логгера.
|
||||
# @PARAM: log_dir (Optional[Path]) - Директория для сохранения лог-файлов.
|
||||
# @PARAM: level (int) - Уровень логирования (e.g., `logging.INFO`).
|
||||
# @PARAM: console (bool) - Флаг для включения вывода в консоль.
|
||||
def __init__(self, name: str = "superset_tool", log_dir: Optional[Path] = None, level: int = logging.INFO, console: bool = True, logger: Optional[logging.Logger] = None) -> None:
|
||||
with belief_scope("__init__"):
|
||||
if logger:
|
||||
self.logger = logger
|
||||
return
|
||||
|
||||
self.logger = logging.getLogger(name)
|
||||
self.logger.setLevel(level)
|
||||
self.logger.propagate = False
|
||||
|
||||
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
|
||||
|
||||
if self.logger.hasHandlers():
|
||||
self.logger.handlers.clear()
|
||||
|
||||
if log_dir:
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
timestamp = datetime.now().strftime("%Y%m%d")
|
||||
file_handler = logging.FileHandler(log_dir / f"{name}_{timestamp}.log", encoding="utf-8")
|
||||
file_handler.setFormatter(formatter)
|
||||
self.logger.addHandler(file_handler)
|
||||
|
||||
if console:
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_handler.setFormatter(formatter)
|
||||
self.logger.addHandler(console_handler)
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_log:Function]
|
||||
# @PURPOSE: (Helper) Универсальный метод для вызова соответствующего уровня логирования.
|
||||
# @PRE: level_method должен быть вызываемым методом логгера. msg must be a string.
|
||||
# @POST: Сообщение записано в лог.
|
||||
# @PARAM: level_method (Any) - Метод логгера (info, debug, etc).
|
||||
# @PARAM: msg (str) - Сообщение.
|
||||
# @PARAM: args (Any) - Аргументы форматирования.
|
||||
# @PARAM: extra (Optional[Mapping[str, Any]]) - Дополнительные данные.
|
||||
# @PARAM: exc_info (bool) - Добавлять ли информацию об исключении.
|
||||
def _log(self, level_method: Any, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("_log"):
|
||||
level_method(msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:_log:Function]
|
||||
|
||||
# [DEF:info:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня INFO.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение уровня INFO записано.
|
||||
def info(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("info"):
|
||||
self._log(self.logger.info, msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:info:Function]
|
||||
|
||||
# [DEF:debug:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня DEBUG.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение уровня DEBUG записано.
|
||||
def debug(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("debug"):
|
||||
self._log(self.logger.debug, msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:debug:Function]
|
||||
|
||||
# [DEF:warning:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня WARNING.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение уровня WARNING записано.
|
||||
def warning(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("warning"):
|
||||
self._log(self.logger.warning, msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:warning:Function]
|
||||
|
||||
# [DEF:error:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня ERROR.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение уровня ERROR записано.
|
||||
def error(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("error"):
|
||||
self._log(self.logger.error, msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:error:Function]
|
||||
|
||||
# [DEF:critical:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня CRITICAL.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение уровня CRITICAL записано.
|
||||
def critical(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
|
||||
with belief_scope("critical"):
|
||||
self._log(self.logger.critical, msg, *args, extra=extra, exc_info=exc_info)
|
||||
# [/DEF:critical:Function]
|
||||
|
||||
# [DEF:exception:Function]
|
||||
# @PURPOSE: Записывает сообщение уровня ERROR вместе с трассировкой стека текущего исключения.
|
||||
# @PRE: msg должен быть строкой.
|
||||
# @POST: Сообщение об ошибке с traceback записано.
|
||||
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
with belief_scope("exception"):
|
||||
self.logger.exception(msg, *args, **kwargs)
|
||||
# [/DEF:exception:Function]
|
||||
|
||||
# [DEF:belief_scope:Method]
|
||||
# @PURPOSE: Instance method wrapper for belief_scope context manager.
|
||||
# @PRE: scope_id must be a string.
|
||||
# @POST: Enters the belief scope.
|
||||
@contextmanager
|
||||
def belief_scope(self, scope_id: str) -> Generator[None, None, None]:
|
||||
with belief_scope(scope_id):
|
||||
yield
|
||||
# [/DEF:belief_scope:Method]
|
||||
|
||||
# [/DEF:SupersetLogger:Class]
|
||||
|
||||
# [/DEF:superset_tool.utils.logger:Module]
|
||||
@@ -1,157 +0,0 @@
|
||||
# [DEF:superset_tool.utils.whiptail_fallback:Module]
|
||||
#
|
||||
# @SEMANTICS: ui, fallback, console, utility, interactive
|
||||
# @PURPOSE: Предоставляет плотный консольный UI-fallback для интерактивных диалогов, имитируя `whiptail` для систем, где он недоступен.
|
||||
# @LAYER: UI
|
||||
# @PUBLIC_API: menu, checklist, yesno, msgbox, inputbox, gauge
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import sys
|
||||
from typing import List, Tuple, Optional, Any
|
||||
from .logger import belief_scope
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:menu:Function]
|
||||
# @PURPOSE: Отображает меню выбора и возвращает выбранный элемент.
|
||||
# @PARAM: title (str) - Заголовок меню.
|
||||
# @PARAM: prompt (str) - Приглашение к вводу.
|
||||
# @PARAM: choices (List[str]) - Список вариантов для выбора.
|
||||
# @PRE: choices must be a non-empty list of strings.
|
||||
# @POST: Returns a tuple with return code and selected choice.
|
||||
# @RETURN: Tuple[int, Optional[str]] - Кортеж (код возврата, выбранный элемент). rc=0 - успех.
|
||||
def menu(title: str, prompt: str, choices: List[str], **kwargs) -> Tuple[int, Optional[str]]:
|
||||
with belief_scope("menu"):
|
||||
print(f"\n=== {title} ===\n{prompt}")
|
||||
for idx, item in enumerate(choices, 1):
|
||||
print(f"{idx}) {item}")
|
||||
try:
|
||||
raw = input("\nВведите номер (0 – отмена): ").strip()
|
||||
sel = int(raw)
|
||||
return (0, choices[sel - 1]) if 0 < sel <= len(choices) else (1, None)
|
||||
except (ValueError, IndexError):
|
||||
return 1, None
|
||||
# [/DEF:menu:Function]
|
||||
|
||||
# [DEF:checklist:Function]
|
||||
# @PURPOSE: Отображает список с возможностью множественного выбора.
|
||||
# @PARAM: title (str) - Заголовок.
|
||||
# @PARAM: prompt (str) - Приглашение к вводу.
|
||||
# @PARAM: options (List[Tuple[str, str]]) - Список кортежей (значение, метка).
|
||||
# @PRE: options must be a list of (value, label) tuples.
|
||||
# @POST: Returns a list of selected values.
|
||||
# @RETURN: Tuple[int, List[str]] - Кортеж (код возврата, список выбранных значений).
|
||||
def checklist(title: str, prompt: str, options: List[Tuple[str, str]], **kwargs) -> Tuple[int, List[str]]:
|
||||
with belief_scope("checklist"):
|
||||
print(f"\n=== {title} ===\n{prompt}")
|
||||
for idx, (val, label) in enumerate(options, 1):
|
||||
print(f"{idx}) [{val}] {label}")
|
||||
raw = input("\nВведите номера через запятую (пустой ввод → отказ): ").strip()
|
||||
if not raw: return 1, []
|
||||
try:
|
||||
indices = {int(x.strip()) for x in raw.split(",") if x.strip()}
|
||||
selected_values = [options[i - 1][0] for i in indices if 0 < i <= len(options)]
|
||||
return 0, selected_values
|
||||
except (ValueError, IndexError):
|
||||
return 1, []
|
||||
# [/DEF:checklist:Function]
|
||||
|
||||
# [DEF:yesno:Function]
|
||||
# @PURPOSE: Задает вопрос с ответом да/нет.
|
||||
# @PARAM: title (str) - Заголовок.
|
||||
# @PARAM: question (str) - Вопрос для пользователя.
|
||||
# @PRE: question must be a string.
|
||||
# @POST: Returns boolean based on user input.
|
||||
# @RETURN: bool - `True`, если пользователь ответил "да".
|
||||
def yesno(title: str, question: str, **kwargs) -> bool:
|
||||
with belief_scope("yesno"):
|
||||
ans = input(f"\n=== {title} ===\n{question} (y/n): ").strip().lower()
|
||||
return ans in ("y", "yes", "да", "д")
|
||||
# [/DEF:yesno:Function]
|
||||
|
||||
# [DEF:msgbox:Function]
|
||||
# @PURPOSE: Отображает информационное сообщение.
|
||||
# @PARAM: title (str) - Заголовок.
|
||||
# @PARAM: msg (str) - Текст сообщения.
|
||||
# @PRE: msg must be a string.
|
||||
# @POST: Message is printed to console.
|
||||
def msgbox(title: str, msg: str, **kwargs) -> None:
|
||||
with belief_scope("msgbox"):
|
||||
print(f"\n=== {title} ===\n{msg}\n")
|
||||
# [/DEF:msgbox:Function]
|
||||
|
||||
# [DEF:inputbox:Function]
|
||||
# @PURPOSE: Запрашивает у пользователя текстовый ввод.
|
||||
# @PARAM: title (str) - Заголовок.
|
||||
# @PARAM: prompt (str) - Приглашение к вводу.
|
||||
# @PRE: prompt must be a string.
|
||||
# @POST: Returns user input string.
|
||||
# @RETURN: Tuple[int, Optional[str]] - Кортеж (код возврата, введенная строка).
|
||||
def inputbox(title: str, prompt: str, **kwargs) -> Tuple[int, Optional[str]]:
|
||||
with belief_scope("inputbox"):
|
||||
print(f"\n=== {title} ===")
|
||||
val = input(f"{prompt}\n")
|
||||
return (0, val) if val else (1, None)
|
||||
# [/DEF:inputbox:Function]
|
||||
|
||||
# [DEF:_ConsoleGauge:Class]
|
||||
# @PURPOSE: Контекстный менеджер для имитации `whiptail gauge` в консоли.
|
||||
class _ConsoleGauge:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the gauge.
|
||||
# @PRE: title must be a string.
|
||||
# @POST: Instance initialized.
|
||||
def __init__(self, title: str, **kwargs):
|
||||
with belief_scope("__init__"):
|
||||
self.title = title
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:__enter__:Function]
|
||||
# @PURPOSE: Enters the context.
|
||||
# @PRE: Instance initialized.
|
||||
# @POST: Header printed, returns self.
|
||||
def __enter__(self):
|
||||
with belief_scope("__enter__"):
|
||||
print(f"\n=== {self.title} ===")
|
||||
return self
|
||||
# [/DEF:__enter__:Function]
|
||||
|
||||
# [DEF:__exit__:Function]
|
||||
# @PURPOSE: Exits the context.
|
||||
# @PRE: Context entered.
|
||||
# @POST: Newline printed.
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
with belief_scope("__exit__"):
|
||||
sys.stdout.write("\n"); sys.stdout.flush()
|
||||
# [/DEF:__exit__:Function]
|
||||
|
||||
# [DEF:set_text:Function]
|
||||
# @PURPOSE: Sets the gauge text.
|
||||
# @PRE: txt must be a string.
|
||||
# @POST: Text written to stdout.
|
||||
def set_text(self, txt: str) -> None:
|
||||
with belief_scope("set_text"):
|
||||
sys.stdout.write(f"\r{txt} "); sys.stdout.flush()
|
||||
# [/DEF:set_text:Function]
|
||||
|
||||
# [DEF:set_percent:Function]
|
||||
# @PURPOSE: Sets the gauge percentage.
|
||||
# @PRE: percent must be an integer.
|
||||
# @POST: Percentage written to stdout.
|
||||
def set_percent(self, percent: int) -> None:
|
||||
with belief_scope("set_percent"):
|
||||
sys.stdout.write(f"{percent}%"); sys.stdout.flush()
|
||||
# [/DEF:set_percent:Function]
|
||||
# [/DEF:_ConsoleGauge:Class]
|
||||
|
||||
# [DEF:gauge:Function]
|
||||
# @PURPOSE: Создает и возвращает экземпляр `_ConsoleGauge`.
|
||||
# @PRE: title must be a string.
|
||||
# @POST: Returns an instance of _ConsoleGauge.
|
||||
# @PARAM: title (str) - Заголовок для индикатора прогресса.
|
||||
# @RETURN: _ConsoleGauge - Экземпляр контекстного менеджера.
|
||||
def gauge(title: str, **kwargs) -> _ConsoleGauge:
|
||||
with belief_scope("gauge"):
|
||||
return _ConsoleGauge(title, **kwargs)
|
||||
# [/DEF:gauge:Function]
|
||||
|
||||
# [/DEF:superset_tool.utils.whiptail_fallback:Module]
|
||||
Reference in New Issue
Block a user