feat: implement plugin architecture and application settings with Svelte UI
- Added plugin base and loader for backend extensibility - Implemented application settings management with config persistence - Created Svelte-based frontend with Dashboard and Settings pages - Added API routes for plugins, tasks, and settings - Updated documentation and specifications - Improved project structure and developer tools
This commit is contained in:
326
backup_script.py
Normal file → Executable file
326
backup_script.py
Normal file → Executable file
@@ -1,163 +1,163 @@
|
||||
# [DEF:backup_script:Module]
|
||||
#
|
||||
# @SEMANTICS: backup, superset, automation, dashboard
|
||||
# @PURPOSE: Этот модуль отвечает за автоматизированное резервное копирование дашбордов Superset.
|
||||
# @LAYER: App
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
# @PUBLIC_API: BackupConfig, backup_dashboards, main
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass,field
|
||||
from requests.exceptions import RequestException
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.exceptions import SupersetAPIError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from superset_tool.utils.fileio import (
|
||||
save_and_unpack_dashboard,
|
||||
archive_exports,
|
||||
sanitize_filename,
|
||||
consolidate_archive_folders,
|
||||
remove_empty_directories,
|
||||
RetentionPolicy
|
||||
)
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:BackupConfig:DataClass]
|
||||
# @PURPOSE: Хранит конфигурацию для процесса бэкапа.
|
||||
@dataclass
|
||||
class BackupConfig:
|
||||
"""Конфигурация для процесса бэкапа."""
|
||||
consolidate: bool = True
|
||||
rotate_archive: bool = True
|
||||
clean_folders: bool = True
|
||||
retention_policy: RetentionPolicy = field(default_factory=RetentionPolicy)
|
||||
# [/DEF:BackupConfig]
|
||||
|
||||
# [DEF:backup_dashboards:Function]
|
||||
# @PURPOSE: Выполняет бэкап всех доступных дашбордов для заданного клиента и окружения, пропуская ошибки экспорта.
|
||||
# @PRE: `client` должен быть инициализированным экземпляром `SupersetClient`.
|
||||
# @PRE: `env_name` должен быть строкой, обозначающей окружение.
|
||||
# @PRE: `backup_root` должен быть валидным путем к корневой директории бэкапа.
|
||||
# @POST: Дашборды экспортируются и сохраняются. Ошибки экспорта логируются и не приводят к остановке скрипта.
|
||||
# @RELATION: CALLS -> client.get_dashboards
|
||||
# @RELATION: CALLS -> client.export_dashboard
|
||||
# @RELATION: CALLS -> save_and_unpack_dashboard
|
||||
# @RELATION: CALLS -> archive_exports
|
||||
# @RELATION: CALLS -> consolidate_archive_folders
|
||||
# @RELATION: CALLS -> remove_empty_directories
|
||||
# @PARAM: client (SupersetClient) - Клиент для доступа к API Superset.
|
||||
# @PARAM: env_name (str) - Имя окружения (e.g., 'PROD').
|
||||
# @PARAM: backup_root (Path) - Корневая директория для сохранения бэкапов.
|
||||
# @PARAM: logger (SupersetLogger) - Инстанс логгера.
|
||||
# @PARAM: config (BackupConfig) - Конфигурация процесса бэкапа.
|
||||
# @RETURN: bool - `True` если все дашборды были экспортированы без критических ошибок, `False` иначе.
|
||||
def backup_dashboards(
|
||||
client: SupersetClient,
|
||||
env_name: str,
|
||||
backup_root: Path,
|
||||
logger: SupersetLogger,
|
||||
config: BackupConfig
|
||||
) -> bool:
|
||||
logger.info(f"[backup_dashboards][Entry] Starting backup for {env_name}.")
|
||||
try:
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[backup_dashboards][Progress] Found {dashboard_count} dashboards to export in {env_name}.")
|
||||
if dashboard_count == 0:
|
||||
return True
|
||||
|
||||
success_count = 0
|
||||
for db in dashboard_meta:
|
||||
dashboard_id = db.get('id')
|
||||
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
|
||||
if not dashboard_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
|
||||
dashboard_dir = backup_root / env_name / dashboard_base_dir_name
|
||||
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
zip_content, filename = client.export_dashboard(dashboard_id)
|
||||
|
||||
save_and_unpack_dashboard(
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
)
|
||||
|
||||
if config.rotate_archive:
|
||||
archive_exports(str(dashboard_dir), policy=config.retention_policy, logger=logger)
|
||||
|
||||
success_count += 1
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[backup_dashboards][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
if config.consolidate:
|
||||
consolidate_archive_folders(backup_root / env_name , logger=logger)
|
||||
|
||||
if config.clean_folders:
|
||||
remove_empty_directories(str(backup_root / env_name), logger=logger)
|
||||
|
||||
logger.info(f"[backup_dashboards][CoherenceCheck:Passed] Backup logic completed.")
|
||||
return success_count == dashboard_count
|
||||
except (RequestException, IOError) as e:
|
||||
logger.critical(f"[backup_dashboards][Failure] Fatal error during backup for {env_name}: {e}", exc_info=True)
|
||||
return False
|
||||
# [/DEF:backup_dashboards]
|
||||
|
||||
# [DEF:main:Function]
|
||||
# @PURPOSE: Основная точка входа для запуска процесса резервного копирования.
|
||||
# @RELATION: CALLS -> setup_clients
|
||||
# @RELATION: CALLS -> backup_dashboards
|
||||
# @RETURN: int - Код выхода (0 - успех, 1 - ошибка).
|
||||
def main() -> int:
|
||||
log_dir = Path("P:\\Superset\\010 Бекапы\\Logs")
|
||||
logger = SupersetLogger(log_dir=log_dir, level=logging.INFO, console=True)
|
||||
logger.info("[main][Entry] Starting Superset backup process.")
|
||||
|
||||
exit_code = 0
|
||||
try:
|
||||
clients = setup_clients(logger)
|
||||
superset_backup_repo = Path("P:\\Superset\\010 Бекапы")
|
||||
superset_backup_repo.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
results = {}
|
||||
environments = ['dev', 'sbx', 'prod', 'preprod']
|
||||
backup_config = BackupConfig(rotate_archive=True)
|
||||
|
||||
for env in environments:
|
||||
try:
|
||||
results[env] = backup_dashboards(
|
||||
clients[env],
|
||||
env.upper(),
|
||||
superset_backup_repo,
|
||||
logger=logger,
|
||||
config=backup_config
|
||||
)
|
||||
except Exception as env_error:
|
||||
logger.critical(f"[main][Failure] Critical error for environment {env}: {env_error}", exc_info=True)
|
||||
results[env] = False
|
||||
|
||||
if not all(results.values()):
|
||||
exit_code = 1
|
||||
|
||||
except (RequestException, IOError) as e:
|
||||
logger.critical(f"[main][Failure] Fatal error in main execution: {e}", exc_info=True)
|
||||
exit_code = 1
|
||||
|
||||
logger.info("[main][Exit] Superset backup process finished.")
|
||||
return exit_code
|
||||
# [/DEF:main]
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
# [/DEF:backup_script]
|
||||
# [DEF:backup_script:Module]
|
||||
#
|
||||
# @SEMANTICS: backup, superset, automation, dashboard
|
||||
# @PURPOSE: Этот модуль отвечает за автоматизированное резервное копирование дашбордов Superset.
|
||||
# @LAYER: App
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.client
|
||||
# @RELATION: DEPENDS_ON -> superset_tool.utils
|
||||
# @PUBLIC_API: BackupConfig, backup_dashboards, main
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass,field
|
||||
from requests.exceptions import RequestException
|
||||
from superset_tool.client import SupersetClient
|
||||
from superset_tool.exceptions import SupersetAPIError
|
||||
from superset_tool.utils.logger import SupersetLogger
|
||||
from superset_tool.utils.fileio import (
|
||||
save_and_unpack_dashboard,
|
||||
archive_exports,
|
||||
sanitize_filename,
|
||||
consolidate_archive_folders,
|
||||
remove_empty_directories,
|
||||
RetentionPolicy
|
||||
)
|
||||
from superset_tool.utils.init_clients import setup_clients
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:BackupConfig:DataClass]
|
||||
# @PURPOSE: Хранит конфигурацию для процесса бэкапа.
|
||||
@dataclass
|
||||
class BackupConfig:
|
||||
"""Конфигурация для процесса бэкапа."""
|
||||
consolidate: bool = True
|
||||
rotate_archive: bool = True
|
||||
clean_folders: bool = True
|
||||
retention_policy: RetentionPolicy = field(default_factory=RetentionPolicy)
|
||||
# [/DEF:BackupConfig]
|
||||
|
||||
# [DEF:backup_dashboards:Function]
|
||||
# @PURPOSE: Выполняет бэкап всех доступных дашбордов для заданного клиента и окружения, пропуская ошибки экспорта.
|
||||
# @PRE: `client` должен быть инициализированным экземпляром `SupersetClient`.
|
||||
# @PRE: `env_name` должен быть строкой, обозначающей окружение.
|
||||
# @PRE: `backup_root` должен быть валидным путем к корневой директории бэкапа.
|
||||
# @POST: Дашборды экспортируются и сохраняются. Ошибки экспорта логируются и не приводят к остановке скрипта.
|
||||
# @RELATION: CALLS -> client.get_dashboards
|
||||
# @RELATION: CALLS -> client.export_dashboard
|
||||
# @RELATION: CALLS -> save_and_unpack_dashboard
|
||||
# @RELATION: CALLS -> archive_exports
|
||||
# @RELATION: CALLS -> consolidate_archive_folders
|
||||
# @RELATION: CALLS -> remove_empty_directories
|
||||
# @PARAM: client (SupersetClient) - Клиент для доступа к API Superset.
|
||||
# @PARAM: env_name (str) - Имя окружения (e.g., 'PROD').
|
||||
# @PARAM: backup_root (Path) - Корневая директория для сохранения бэкапов.
|
||||
# @PARAM: logger (SupersetLogger) - Инстанс логгера.
|
||||
# @PARAM: config (BackupConfig) - Конфигурация процесса бэкапа.
|
||||
# @RETURN: bool - `True` если все дашборды были экспортированы без критических ошибок, `False` иначе.
|
||||
def backup_dashboards(
|
||||
client: SupersetClient,
|
||||
env_name: str,
|
||||
backup_root: Path,
|
||||
logger: SupersetLogger,
|
||||
config: BackupConfig
|
||||
) -> bool:
|
||||
logger.info(f"[backup_dashboards][Entry] Starting backup for {env_name}.")
|
||||
try:
|
||||
dashboard_count, dashboard_meta = client.get_dashboards()
|
||||
logger.info(f"[backup_dashboards][Progress] Found {dashboard_count} dashboards to export in {env_name}.")
|
||||
if dashboard_count == 0:
|
||||
return True
|
||||
|
||||
success_count = 0
|
||||
for db in dashboard_meta:
|
||||
dashboard_id = db.get('id')
|
||||
dashboard_title = db.get('dashboard_title', 'Unknown Dashboard')
|
||||
if not dashboard_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
dashboard_base_dir_name = sanitize_filename(f"{dashboard_title}")
|
||||
dashboard_dir = backup_root / env_name / dashboard_base_dir_name
|
||||
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
zip_content, filename = client.export_dashboard(dashboard_id)
|
||||
|
||||
save_and_unpack_dashboard(
|
||||
zip_content=zip_content,
|
||||
original_filename=filename,
|
||||
output_dir=dashboard_dir,
|
||||
unpack=False,
|
||||
logger=logger
|
||||
)
|
||||
|
||||
if config.rotate_archive:
|
||||
archive_exports(str(dashboard_dir), policy=config.retention_policy, logger=logger)
|
||||
|
||||
success_count += 1
|
||||
except (SupersetAPIError, RequestException, IOError, OSError) as db_error:
|
||||
logger.error(f"[backup_dashboards][Failure] Failed to export dashboard {dashboard_title} (ID: {dashboard_id}): {db_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
if config.consolidate:
|
||||
consolidate_archive_folders(backup_root / env_name , logger=logger)
|
||||
|
||||
if config.clean_folders:
|
||||
remove_empty_directories(str(backup_root / env_name), logger=logger)
|
||||
|
||||
logger.info(f"[backup_dashboards][CoherenceCheck:Passed] Backup logic completed.")
|
||||
return success_count == dashboard_count
|
||||
except (RequestException, IOError) as e:
|
||||
logger.critical(f"[backup_dashboards][Failure] Fatal error during backup for {env_name}: {e}", exc_info=True)
|
||||
return False
|
||||
# [/DEF:backup_dashboards]
|
||||
|
||||
# [DEF:main:Function]
|
||||
# @PURPOSE: Основная точка входа для запуска процесса резервного копирования.
|
||||
# @RELATION: CALLS -> setup_clients
|
||||
# @RELATION: CALLS -> backup_dashboards
|
||||
# @RETURN: int - Код выхода (0 - успех, 1 - ошибка).
|
||||
def main() -> int:
|
||||
log_dir = Path("P:\\Superset\\010 Бекапы\\Logs")
|
||||
logger = SupersetLogger(log_dir=log_dir, level=logging.INFO, console=True)
|
||||
logger.info("[main][Entry] Starting Superset backup process.")
|
||||
|
||||
exit_code = 0
|
||||
try:
|
||||
clients = setup_clients(logger)
|
||||
superset_backup_repo = Path("P:\\Superset\\010 Бекапы")
|
||||
superset_backup_repo.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
results = {}
|
||||
environments = ['dev', 'sbx', 'prod', 'preprod']
|
||||
backup_config = BackupConfig(rotate_archive=True)
|
||||
|
||||
for env in environments:
|
||||
try:
|
||||
results[env] = backup_dashboards(
|
||||
clients[env],
|
||||
env.upper(),
|
||||
superset_backup_repo,
|
||||
logger=logger,
|
||||
config=backup_config
|
||||
)
|
||||
except Exception as env_error:
|
||||
logger.critical(f"[main][Failure] Critical error for environment {env}: {env_error}", exc_info=True)
|
||||
results[env] = False
|
||||
|
||||
if not all(results.values()):
|
||||
exit_code = 1
|
||||
|
||||
except (RequestException, IOError) as e:
|
||||
logger.critical(f"[main][Failure] Fatal error in main execution: {e}", exc_info=True)
|
||||
exit_code = 1
|
||||
|
||||
logger.info("[main][Exit] Superset backup process finished.")
|
||||
return exit_code
|
||||
# [/DEF:main]
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
# [/DEF:backup_script]
|
||||
|
||||
Reference in New Issue
Block a user