WIP: Staged all changes
This commit is contained in:
121
backend/src/plugins/backup.py
Normal file
121
backend/src/plugins/backup.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# [DEF:BackupPlugin:Module]
|
||||
# @SEMANTICS: backup, superset, automation, dashboard, plugin
|
||||
# @PURPOSE: A plugin that provides functionality to back up Superset dashboards.
|
||||
# @LAYER: App
|
||||
# @RELATION: IMPLEMENTS -> PluginBase
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
|
||||
from typing import Dict, Any
|
||||
from pathlib import Path
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.exceptions import SupersetAPIError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from superset_tool.utils.fileio import (
|
||||
save_and_unpack_dashboard,
|
||||
archive_exports,
|
||||
sanitize_filename,
|
||||
consolidate_archive_folders,
|
||||
remove_empty_directories,
|
||||
RetentionPolicy
|
||||
)
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
|
||||
class BackupPlugin(PluginBase):
|
||||
"""
|
||||
A plugin to back up Superset dashboards.
|
||||
"""
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
return "superset-backup"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Superset Dashboard Backup"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return "Backs up all dashboards from a Superset instance."
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"env": {
|
||||
"type": "string",
|
||||
"title": "Environment",
|
||||
"description": "The Superset environment to back up (e.g., 'dev', 'prod').",
|
||||
"enum": ["dev", "sbx", "prod", "preprod"],
|
||||
},
|
||||
"backup_path": {
|
||||
"type": "string",
|
||||
"title": "Backup Path",
|
||||
"description": "The root directory to save backups to.",
|
||||
"default": "P:\\Superset\\010 Бекапы"
|
||||
}
|
||||
},
|
||||
"required": ["env", "backup_path"],
|
||||
}
|
||||
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
env = params["env"]
|
||||
backup_path = Path(params["backup_path"])
|
||||
|
||||
logger = SupersetLogger(log_dir=backup_path / "Logs", console=True)
|
||||
logger.info(f"[BackupPlugin][Entry] Starting backup for {env}.")
|
||||
|
||||
try:
|
||||
clients = setup_clients(logger)
|
||||
client = clients[env]
|
||||
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[BackupPlugin][Progress] Found {dashboard_count} dashboards to export in {env}.")
|
||||
|
||||
if dashboard_count == 0:
|
||||
logger.info("[BackupPlugin][Exit] No dashboards to back up.")
|
||||
return
|
||||
|
||||
for db in dashboard_meta:
|
||||
dashboard_id = db.get('id')
|
||||
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
|
||||
if not dashboard_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
|
||||
dashboard_dir = backup_path / env.upper() / dashboard_base_dir_name
|
||||
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
zip_content, filename = client.export_dashboard(dashboard_id)
|
||||
|
||||
save_and_unpack_dashboard(
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
)
|
||||
|
||||
archive_exports(str(dashboard_dir), policy=RetentionPolicy(), logger=logger)
|
||||
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[BackupPlugin][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
consolidate_archive_folders(backup_path / env.upper(), logger=logger)
|
||||
remove_empty_directories(str(backup_path / env.upper()), logger=logger)
|
||||
|
||||
logger.info(f"[BackupPlugin][CoherenceCheck:Passed] Backup logic completed for {env}.")
|
||||
|
||||
except (RequestException, IOError, KeyError) as e:
|
||||
logger.critical(f"[BackupPlugin][Failure] Fatal error during backup for {env}: {e}", exc_info=True)
|
||||
raise e
|
||||
# [/DEF:BackupPlugin]
|
||||
150
backend/src/plugins/migration.py
Normal file
150
backend/src/plugins/migration.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# [DEF:MigrationPlugin:Module]
|
||||
# @SEMANTICS: migration, superset, automation, dashboard, plugin
|
||||
# @PURPOSE: A plugin that provides functionality to migrate Superset dashboards between environments.
|
||||
# @LAYER: App
|
||||
# @RELATION: IMPLEMENTS -> PluginBase
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
|
||||
from typing import Dict, Any, List
|
||||
from pathlib import Path
|
||||
import zipfile
|
||||
import re
|
||||
|
||||
from ..core.plugin_base import PluginBase
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
from superset_tool.utils.fileio import create_temp_file, update_yamls, create_dashboard_export
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
|
||||
class MigrationPlugin(PluginBase):
|
||||
"""
|
||||
A plugin to migrate Superset dashboards between environments.
|
||||
"""
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
return "superset-migration"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Superset Dashboard Migration"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return "Migrates dashboards between Superset environments."
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
return "1.0.0"
|
||||
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"from_env": {
|
||||
"type": "string",
|
||||
"title": "Source Environment",
|
||||
"description": "The environment to migrate from.",
|
||||
"enum": ["dev", "sbx", "prod", "preprod"],
|
||||
},
|
||||
"to_env": {
|
||||
"type": "string",
|
||||
"title": "Target Environment",
|
||||
"description": "The environment to migrate to.",
|
||||
"enum": ["dev", "sbx", "prod", "preprod"],
|
||||
},
|
||||
"dashboard_regex": {
|
||||
"type": "string",
|
||||
"title": "Dashboard Regex",
|
||||
"description": "A regular expression to filter dashboards to migrate.",
|
||||
},
|
||||
"replace_db_config": {
|
||||
"type": "boolean",
|
||||
"title": "Replace DB Config",
|
||||
"description": "Whether to replace the database configuration.",
|
||||
"default": False,
|
||||
},
|
||||
"from_db_id": {
|
||||
"type": "integer",
|
||||
"title": "Source DB ID",
|
||||
"description": "The ID of the source database to replace (if replacing).",
|
||||
},
|
||||
"to_db_id": {
|
||||
"type": "integer",
|
||||
"title": "Target DB ID",
|
||||
"description": "The ID of the target database to replace with (if replacing).",
|
||||
},
|
||||
},
|
||||
"required": ["from_env", "to_env", "dashboard_regex"],
|
||||
}
|
||||
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
from_env = params["from_env"]
|
||||
to_env = params["to_env"]
|
||||
dashboard_regex = params["dashboard_regex"]
|
||||
replace_db_config = params.get("replace_db_config", False)
|
||||
from_db_id = params.get("from_db_id")
|
||||
to_db_id = params.get("to_db_id")
|
||||
|
||||
logger = SupersetLogger(log_dir=Path.cwd() / "logs", console=True)
|
||||
logger.info(f"[MigrationPlugin][Entry] Starting migration from {from_env} to {to_env}.")
|
||||
|
||||
try:
|
||||
all_clients = setup_clients(logger)
|
||||
from_c = all_clients[from_env]
|
||||
to_c = all_clients[to_env]
|
||||
|
||||
_, all_dashboards = from_c.get_dashboards()
|
||||
|
||||
regex_str = str(dashboard_regex)
|
||||
dashboards_to_migrate = [
|
||||
d for d in all_dashboards if re.search(regex_str, d["dashboard_title"], re.IGNORECASE)
|
||||
]
|
||||
|
||||
if not dashboards_to_migrate:
|
||||
logger.warning("[MigrationPlugin][State] No dashboards found matching the regex.")
|
||||
return
|
||||
|
||||
db_config_replacement = None
|
||||
if replace_db_config:
|
||||
if from_db_id is None or to_db_id is None:
|
||||
raise ValueError("Source and target database IDs are required when replacing database configuration.")
|
||||
from_db = from_c.get_database(int(from_db_id))
|
||||
to_db = to_c.get_database(int(to_db_id))
|
||||
old_result = from_db.get("result", {})
|
||||
new_result = to_db.get("result", {})
|
||||
db_config_replacement = {
|
||||
"old": {"database_name": old_result.get("database_name"), "uuid": old_result.get("uuid"), "id": str(from_db.get("id"))},
|
||||
"new": {"database_name": new_result.get("database_name"), "uuid": new_result.get("uuid"), "id": str(to_db.get("id"))}
|
||||
}
|
||||
|
||||
for dash in dashboards_to_migrate:
|
||||
dash_id, dash_slug, title = dash["id"], dash.get("slug"), dash["dashboard_title"]
|
||||
|
||||
try:
|
||||
exported_content, _ = from_c.export_dashboard(dash_id)
|
||||
with create_temp_file(content=exported_content, dry_run=True, suffix=".zip", logger=logger) as tmp_zip_path:
|
||||
if not db_config_replacement:
|
||||
to_c.import_dashboard(file_name=tmp_zip_path, dash_id=dash_id, dash_slug=dash_slug)
|
||||
else:
|
||||
with create_temp_file(suffix=".dir", logger=logger) as tmp_unpack_dir:
|
||||
with zipfile.ZipFile(tmp_zip_path, "r") as zip_ref:
|
||||
zip_ref.extractall(tmp_unpack_dir)
|
||||
|
||||
update_yamls(db_configs=[db_config_replacement], path=str(tmp_unpack_dir))
|
||||
|
||||
with create_temp_file(suffix=".zip", dry_run=True, logger=logger) as tmp_new_zip:
|
||||
create_dashboard_export(zip_path=tmp_new_zip, source_paths=[str(p) for p in Path(tmp_unpack_dir).glob("**/*")])
|
||||
to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
|
||||
|
||||
logger.info(f"[MigrationPlugin][Success] Dashboard {title} imported.")
|
||||
except Exception as exc:
|
||||
logger.error(f"[MigrationPlugin][Failure] Failed to migrate dashboard {title}: {exc}", exc_info=True)
|
||||
|
||||
logger.info("[MigrationPlugin][Exit] Migration finished.")
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"[MigrationPlugin][Failure] Fatal error during migration: {e}", exc_info=True)
|
||||
raise e
|
||||
# [/DEF:MigrationPlugin]
|
||||
Reference in New Issue
Block a user