Files
ss-tools/backend/src/plugins/backup.py
2025-12-30 22:02:51 +03:00

146 lines
5.7 KiB
Python
Executable File

# [DEF:BackupPlugin:Module]
# @SEMANTICS: backup, superset, automation, dashboard, plugin
# @PURPOSE: A plugin that provides functionality to back up Superset dashboards.
# @LAYER: App
# @RELATION: IMPLEMENTS -> PluginBase
# @RELATION: DEPENDS_ON -> superset_tool.client
# @RELATION: DEPENDS_ON -> superset_tool.utils
from typing import Dict, Any
from pathlib import Path
from requests.exceptions import RequestException
from ..core.plugin_base import PluginBase
from superset_tool.client import SupersetClient
from superset_tool.exceptions import SupersetAPIError
from superset_tool.utils.logger import SupersetLogger
from superset_tool.utils.fileio import (
save_and_unpack_dashboard,
archive_exports,
sanitize_filename,
consolidate_archive_folders,
remove_empty_directories,
RetentionPolicy
)
from superset_tool.utils.init_clients import setup_clients
from ..dependencies import get_config_manager
class BackupPlugin(PluginBase):
"""
A plugin to back up Superset dashboards.
"""
@property
def id(self) -> str:
return "superset-backup"
@property
def name(self) -> str:
return "Superset Dashboard Backup"
@property
def description(self) -> str:
return "Backs up all dashboards from a Superset instance."
@property
def version(self) -> str:
return "1.0.0"
def get_schema(self) -> Dict[str, Any]:
config_manager = get_config_manager()
envs = [e.name for e in config_manager.get_environments()]
default_path = config_manager.get_config().settings.backup_path
return {
"type": "object",
"properties": {
"env": {
"type": "string",
"title": "Environment",
"description": "The Superset environment to back up.",
"enum": envs if envs else [],
},
"backup_path": {
"type": "string",
"title": "Backup Path",
"description": "The root directory to save backups to.",
"default": default_path
}
},
"required": ["env", "backup_path"],
}
async def execute(self, params: Dict[str, Any]):
config_manager = get_config_manager()
env_id = params.get("environment_id")
# Resolve environment name if environment_id is provided
if env_id:
env_config = next((e for e in config_manager.get_environments() if e.id == env_id), None)
if env_config:
params["env"] = env_config.name
env = params.get("env")
if not env:
raise KeyError("env")
backup_path_str = params.get("backup_path") or config_manager.get_config().settings.backup_path
backup_path = Path(backup_path_str)
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
try:
config_manager = get_config_manager()
if not config_manager.has_environments():
raise ValueError("No Superset environments configured. Please add an environment in Settings.")
clients = setup_clients(logger, custom_envs=config_manager.get_environments())
client = clients.get(env)
if not client:
raise ValueError(f"Environment '{env}' not found in configuration.")
dashboard_count, dashboard_meta = client.get_dashboards()
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
if dashboard_count == 0:
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
return
for db in dashboard_meta:
dashboard_id = db.get('id')
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
if not dashboard_id:
continue
try:
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
dashboard_dir.mkdir(parents=True, exist_ok=True)
zip_content, filename = client.export_dashboard(dashboard_id)
save_and_unpack_dashboard(
zip_content=zip_content,
original_filename=filename,
output_dir=dashboard_dir,
unpack=False,
logger=logger
)
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
continue
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
except (RequestException, IOError, KeyError) as e:
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
raise e
# [/DEF:BackupPlugin]