project map script | semantic parcer

This commit is contained in:
2026-01-01 16:58:21 +03:00
parent a747a163c8
commit 4c6fc8256d
84 changed files with 10178 additions and 537 deletions

View File

@@ -11,4 +11,4 @@ from .models import SupersetConfig
__all__ = ["SupersetClient", "SupersetConfig"]
# [/DEF:superset_tool]
# [/DEF:superset_tool:Module]

View File

@@ -48,7 +48,7 @@ class SupersetClient:
)
self.delete_before_reimport: bool = False
self.logger.info("[SupersetClient.__init__][Exit] SupersetClient initialized.")
# [/DEF:SupersetClient.__init__]
# [/DEF:SupersetClient.__init__:Function]
# [DEF:SupersetClient._validate_config:Function]
# @PURPOSE: Проверяет, что переданный объект конфигурации имеет корректный тип.
@@ -60,7 +60,7 @@ class SupersetClient:
self.logger.debug("[_validate_config][Enter] Validating SupersetConfig.")
assert isinstance(config, SupersetConfig), "Конфигурация должна быть экземпляром SupersetConfig"
self.logger.debug("[_validate_config][Exit] Config is valid.")
# [/DEF:SupersetClient._validate_config]
# [/DEF:SupersetClient._validate_config:Function]
@property
def headers(self) -> dict:
@@ -69,7 +69,7 @@ class SupersetClient:
# @PRE: self.network должен быть инициализирован.
# @POST: Возвращаемый словарь содержит актуальные заголовки, включая токен авторизации.
return self.network.headers
# [/DEF:SupersetClient.headers]
# [/DEF:SupersetClient.headers:Function]
# [DEF:SupersetClient.get_dashboards:Function]
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
@@ -93,7 +93,7 @@ class SupersetClient:
)
self.logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_dashboards]
# [/DEF:SupersetClient.get_dashboards:Function]
# [DEF:SupersetClient.export_dashboard:Function]
# @PURPOSE: Экспортирует дашборд в виде ZIP-архива.
@@ -118,7 +118,7 @@ class SupersetClient:
filename = self._resolve_export_filename(response, dashboard_id)
self.logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
return response.content, filename
# [/DEF:SupersetClient.export_dashboard]
# [/DEF:SupersetClient.export_dashboard:Function]
# [DEF:SupersetClient.import_dashboard:Function]
# @PURPOSE: Импортирует дашборд из ZIP-файла с возможностью автоматического удаления и повторной попытки при ошибке.
@@ -152,7 +152,7 @@ class SupersetClient:
self.delete_dashboard(target_id)
self.logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
return self._do_import(file_path)
# [/DEF:SupersetClient.import_dashboard]
# [/DEF:SupersetClient.import_dashboard:Function]
# [DEF:SupersetClient._resolve_target_id_for_delete:Function]
# @PURPOSE: Определяет ID дашборда для удаления, используя ID или slug.
@@ -177,7 +177,7 @@ class SupersetClient:
except Exception as e:
self.logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
return None
# [/DEF:SupersetClient._resolve_target_id_for_delete]
# [/DEF:SupersetClient._resolve_target_id_for_delete:Function]
# [DEF:SupersetClient._do_import:Function]
# @PURPOSE: Выполняет один запрос на импорт без обработки исключений.
@@ -200,7 +200,7 @@ class SupersetClient:
extra_data={"overwrite": "true"},
timeout=self.config.timeout * 2,
)
# [/DEF:SupersetClient._do_import]
# [/DEF:SupersetClient._do_import:Function]
# [DEF:SupersetClient.delete_dashboard:Function]
# @PURPOSE: Удаляет дашборд по его ID или slug.
@@ -218,7 +218,7 @@ class SupersetClient:
self.logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
else:
self.logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
# [/DEF:SupersetClient.delete_dashboard]
# [/DEF:SupersetClient.delete_dashboard:Function]
# [DEF:SupersetClient._extract_dashboard_id_from_zip:Function]
# @PURPOSE: Извлекает ID дашборда из `metadata.yaml` внутри ZIP-архива.
@@ -241,7 +241,7 @@ class SupersetClient:
except Exception as exc:
self.logger.error("[_extract_dashboard_id_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:SupersetClient._extract_dashboard_id_from_zip]
# [/DEF:SupersetClient._extract_dashboard_id_from_zip:Function]
# [DEF:SupersetClient._extract_dashboard_slug_from_zip:Function]
# @PURPOSE: Извлекает slug дашборда из `metadata.yaml` внутри ZIP-архива.
@@ -264,7 +264,7 @@ class SupersetClient:
except Exception as exc:
self.logger.error("[_extract_dashboard_slug_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:SupersetClient._extract_dashboard_slug_from_zip]
# [/DEF:SupersetClient._extract_dashboard_slug_from_zip:Function]
# [DEF:SupersetClient._validate_export_response:Function]
# @PURPOSE: Проверяет, что HTTP-ответ на экспорт является валидным ZIP-архивом.
@@ -280,7 +280,7 @@ class SupersetClient:
raise ExportError(f"Получен не ZIP-архив (Content-Type: {content_type})")
if not response.content:
raise ExportError("Получены пустые данные при экспорте")
# [/DEF:SupersetClient._validate_export_response]
# [/DEF:SupersetClient._validate_export_response:Function]
# [DEF:SupersetClient._resolve_export_filename:Function]
# @PURPOSE: Определяет имя файла для экспорта из заголовков или генерирует его.
@@ -298,7 +298,7 @@ class SupersetClient:
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
self.logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
return filename
# [/DEF:SupersetClient._resolve_export_filename]
# [/DEF:SupersetClient._resolve_export_filename:Function]
# [DEF:SupersetClient._validate_query_params:Function]
# @PURPOSE: Формирует корректный набор параметров запроса с пагинацией.
@@ -310,7 +310,7 @@ class SupersetClient:
assert query is None or isinstance(query, dict), "[_validate_query_params][PRE] query must be a dictionary or None."
base_query = {"page": 0, "page_size": 1000}
return {**base_query, **(query or {})}
# [/DEF:SupersetClient._validate_query_params]
# [/DEF:SupersetClient._validate_query_params:Function]
# [DEF:SupersetClient._fetch_total_object_count:Function]
# @PURPOSE: Получает общее количество объектов по указанному эндпоинту для пагинации.
@@ -326,7 +326,7 @@ class SupersetClient:
query_params={"page": 0, "page_size": 1},
count_field="count",
)
# [/DEF:SupersetClient._fetch_total_object_count]
# [/DEF:SupersetClient._fetch_total_object_count:Function]
# [DEF:SupersetClient._fetch_all_pages:Function]
# @PURPOSE: Итерируется по всем страницам пагинированного API и собирает все данные.
@@ -340,7 +340,7 @@ class SupersetClient:
assert endpoint and isinstance(endpoint, str), "[_fetch_all_pages][PRE] endpoint must be a non-empty string."
assert isinstance(pagination_options, dict), "[_fetch_all_pages][PRE] pagination_options must be a dictionary."
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
# [/DEF:SupersetClient._fetch_all_pages]
# [/DEF:SupersetClient._fetch_all_pages:Function]
# [DEF:SupersetClient._validate_import_file:Function]
# @PURPOSE: Проверяет, что файл существует, является ZIP-архивом и содержит `metadata.yaml`.
@@ -356,7 +356,7 @@ class SupersetClient:
assert zipfile.is_zipfile(path), f"Файл {zip_path} не является ZIP-архивом"
with zipfile.ZipFile(path, "r") as zf:
assert any(n.endswith("metadata.yaml") for n in zf.namelist()), f"Архив {zip_path} не содержит 'metadata.yaml'"
# [/DEF:SupersetClient._validate_import_file]
# [/DEF:SupersetClient._validate_import_file:Function]
# [DEF:SupersetClient.get_datasets:Function]
# @PURPOSE: Получает полный список датасетов, автоматически обрабатывая пагинацию.
@@ -379,7 +379,7 @@ class SupersetClient:
)
self.logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_datasets]
# [/DEF:SupersetClient.get_datasets:Function]
# [DEF:SupersetClient.get_databases:Function]
# @PURPOSE: Получает полный список баз данных, автоматически обрабатывая пагинацию.
@@ -403,7 +403,7 @@ class SupersetClient:
)
self.logger.info("[get_databases][Exit] Found %d databases.", total_count)
return total_count, paginated_data
# [/DEF:SupersetClient.get_databases]
# [/DEF:SupersetClient.get_databases:Function]
# [DEF:SupersetClient.get_dataset:Function]
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
@@ -420,7 +420,7 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
return response
# [/DEF:SupersetClient.get_dataset]
# [/DEF:SupersetClient.get_dataset:Function]
# [DEF:SupersetClient.get_database:Function]
# @PURPOSE: Получает информацию о конкретной базе данных по её ID.
@@ -437,7 +437,7 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[get_database][Exit] Got database %s.", database_id)
return response
# [/DEF:SupersetClient.get_database]
# [/DEF:SupersetClient.get_database:Function]
# [DEF:SupersetClient.update_dataset:Function]
# @PURPOSE: Обновляет данные датасета по его ID.
@@ -461,8 +461,8 @@ class SupersetClient:
response = cast(Dict, response)
self.logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
return response
# [/DEF:SupersetClient.update_dataset]
# [/DEF:SupersetClient.update_dataset:Function]
# [/DEF:SupersetClient]
# [/DEF:SupersetClient:Class]
# [/DEF:superset_tool.client]
# [/DEF:superset_tool.client:Module]

View File

@@ -17,7 +17,7 @@ class SupersetToolError(Exception):
def __init__(self, message: str, context: Optional[Dict[str, Any]] = None):
self.context = context or {}
super().__init__(f"{message} | Context: {self.context}")
# [/DEF:SupersetToolError]
# [/DEF:SupersetToolError:Class]
# [DEF:AuthenticationError:Class]
# @PURPOSE: Ошибки, связанные с аутентификацией или авторизацией.
@@ -27,7 +27,7 @@ class SupersetToolError(Exception):
class AuthenticationError(SupersetToolError):
def __init__(self, message: str = "Authentication failed", **context: Any):
super().__init__(f"[AUTH_FAILURE] {message}", context={"type": "authentication", **context})
# [/DEF:AuthenticationError]
# [/DEF:AuthenticationError:Class]
# [DEF:PermissionDeniedError:Class]
# @PURPOSE: Ошибка, возникающая при отказе в доступе к ресурсу.
@@ -39,7 +39,7 @@ class PermissionDeniedError(AuthenticationError):
def __init__(self, message: str = "Permission denied", required_permission: Optional[str] = None, **context: Any):
full_message = f"Permission denied: {required_permission}" if required_permission else message
super().__init__(full_message, context={"required_permission": required_permission, **context})
# [/DEF:PermissionDeniedError]
# [/DEF:PermissionDeniedError:Class]
# [DEF:SupersetAPIError:Class]
# @PURPOSE: Общие ошибки при взаимодействии с Superset API.
@@ -49,7 +49,7 @@ class PermissionDeniedError(AuthenticationError):
class SupersetAPIError(SupersetToolError):
def __init__(self, message: str = "Superset API error", **context: Any):
super().__init__(f"[API_FAILURE] {message}", context={"type": "api_call", **context})
# [/DEF:SupersetAPIError]
# [/DEF:SupersetAPIError:Class]
# [DEF:ExportError:Class]
# @PURPOSE: Ошибки, специфичные для операций экспорта.
@@ -59,7 +59,7 @@ class SupersetAPIError(SupersetToolError):
class ExportError(SupersetAPIError):
def __init__(self, message: str = "Dashboard export failed", **context: Any):
super().__init__(f"[EXPORT_FAILURE] {message}", context={"subtype": "export", **context})
# [/DEF:ExportError]
# [/DEF:ExportError:Class]
# [DEF:DashboardNotFoundError:Class]
# @PURPOSE: Ошибка, когда запрошенный дашборд или ресурс не найден (404).
@@ -70,7 +70,7 @@ class ExportError(SupersetAPIError):
class DashboardNotFoundError(SupersetAPIError):
def __init__(self, dashboard_id_or_slug: Union[int, str], message: str = "Dashboard not found", **context: Any):
super().__init__(f"[NOT_FOUND] Dashboard '{dashboard_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dashboard_id_or_slug, **context})
# [/DEF:DashboardNotFoundError]
# [/DEF:DashboardNotFoundError:Class]
# [DEF:DatasetNotFoundError:Class]
# @PURPOSE: Ошибка, когда запрашиваемый набор данных не существует (404).
@@ -81,7 +81,7 @@ class DashboardNotFoundError(SupersetAPIError):
class DatasetNotFoundError(SupersetAPIError):
def __init__(self, dataset_id_or_slug: Union[int, str], message: str = "Dataset not found", **context: Any):
super().__init__(f"[NOT_FOUND] Dataset '{dataset_id_or_slug}' {message}", context={"subtype": "not_found", "resource_id": dataset_id_or_slug, **context})
# [/DEF:DatasetNotFoundError]
# [/DEF:DatasetNotFoundError:Class]
# [DEF:InvalidZipFormatError:Class]
# @PURPOSE: Ошибка, указывающая на некорректный формат или содержимое ZIP-архива.
@@ -92,7 +92,7 @@ class DatasetNotFoundError(SupersetAPIError):
class InvalidZipFormatError(SupersetToolError):
def __init__(self, message: str = "Invalid ZIP format or content", file_path: Optional[Union[str, Path]] = None, **context: Any):
super().__init__(f"[FILE_ERROR] {message}", context={"type": "file_validation", "file_path": str(file_path) if file_path else "N/A", **context})
# [/DEF:InvalidZipFormatError]
# [/DEF:InvalidZipFormatError:Class]
# [DEF:NetworkError:Class]
# @PURPOSE: Ошибки, связанные с сетевым соединением.
@@ -102,27 +102,27 @@ class InvalidZipFormatError(SupersetToolError):
class NetworkError(SupersetToolError):
def __init__(self, message: str = "Network connection failed", **context: Any):
super().__init__(f"[NETWORK_FAILURE] {message}", context={"type": "network", **context})
# [/DEF:NetworkError]
# [/DEF:NetworkError:Class]
# [DEF:FileOperationError:Class]
# @PURPOSE: Общие ошибки файловых операций (I/O).
# @RELATION: INHERITS_FROM -> SupersetToolError
class FileOperationError(SupersetToolError):
pass
# [/DEF:FileOperationError]
# [/DEF:FileOperationError:Class]
# [DEF:InvalidFileStructureError:Class]
# @PURPOSE: Ошибка, указывающая на некорректную структуру файлов или директорий.
# @RELATION: INHERITS_FROM -> FileOperationError
class InvalidFileStructureError(FileOperationError):
pass
# [/DEF:InvalidFileStructureError]
# [/DEF:InvalidFileStructureError:Class]
# [DEF:ConfigurationError:Class]
# @PURPOSE: Ошибки, связанные с неверной конфигурацией инструмента.
# @RELATION: INHERITS_FROM -> SupersetToolError
class ConfigurationError(SupersetToolError):
pass
# [/DEF:ConfigurationError]
# [/DEF:ConfigurationError:Class]
# [/DEF:superset_tool.exceptions]
# [/DEF:superset_tool.exceptions:Module]

View File

@@ -37,7 +37,7 @@ class SupersetConfig(BaseModel):
if not required.issubset(v.keys()):
raise ValueError(f"Словарь 'auth' должен содержать поля: {required}. Отсутствующие: {required - v.keys()}")
return v
# [/DEF:SupersetConfig.validate_auth]
# [/DEF:SupersetConfig.validate_auth:Function]
# [DEF:SupersetConfig.normalize_base_url:Function]
# @PURPOSE: Нормализует `base_url`, добавляя `/api/v1`, если он отсутствует.
@@ -54,11 +54,11 @@ class SupersetConfig(BaseModel):
if '/api/v1' not in v:
v = f"{v.rstrip('/')}/api/v1"
return v
# [/DEF:SupersetConfig.normalize_base_url]
# [/DEF:SupersetConfig.normalize_base_url:Function]
class Config:
arbitrary_types_allowed = True
# [/DEF:SupersetConfig]
# [/DEF:SupersetConfig:Class]
# [DEF:DatabaseConfig:Class]
# @PURPOSE: Модель для параметров трансформации баз данных при миграции дашбордов.
@@ -78,10 +78,10 @@ class DatabaseConfig(BaseModel):
if not {'old', 'new'}.issubset(v.keys()):
raise ValueError("'database_config' должен содержать ключи 'old' и 'new'.")
return v
# [/DEF:DatabaseConfig.validate_config]
# [/DEF:DatabaseConfig.validate_config:Function]
class Config:
arbitrary_types_allowed = True
# [/DEF:DatabaseConfig]
# [/DEF:DatabaseConfig:Class]
# [/DEF:superset_tool.models]
# [/DEF:superset_tool.models:Module]

View File

@@ -2,4 +2,4 @@
# @SEMANTICS: package, utils
# @PURPOSE: Utility package for superset_tool.
# @LAYER: Infra
# [/DEF:superset_tool.utils]
# [/DEF:superset_tool.utils:Module]

View File

@@ -85,7 +85,7 @@ class DatasetMapper:
self.logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
raise
return comments
# [/DEF:DatasetMapper.get_postgres_comments]
# [/DEF:DatasetMapper.get_postgres_comments:Function]
# [DEF:DatasetMapper.load_excel_mappings:Function]
# @PURPOSE: Загружает меппинги 'column_name' -> 'column_comment' из XLSX файла.
@@ -104,7 +104,7 @@ class DatasetMapper:
except Exception as e:
self.logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
raise
# [/DEF:DatasetMapper.load_excel_mappings]
# [/DEF:DatasetMapper.load_excel_mappings:Function]
# [DEF:DatasetMapper.run_mapping:Function]
# @PURPOSE: Основная функция для выполнения меппинга и обновления verbose_map датасета в Superset.
@@ -223,7 +223,7 @@ class DatasetMapper:
except (AssertionError, FileNotFoundError, Exception) as e:
self.logger.error("[run_mapping][Failure] %s", e, exc_info=True)
return
# [/DEF:DatasetMapper.run_mapping]
# [/DEF:DatasetMapper]
# [/DEF:DatasetMapper.run_mapping:Function]
# [/DEF:DatasetMapper:Class]
# [/DEF:superset_tool.utils.dataset_mapper]
# [/DEF:superset_tool.utils.dataset_mapper:Module]

View File

@@ -64,7 +64,7 @@ def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode
logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
except OSError as e:
logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
# [/DEF:create_temp_file]
# [/DEF:create_temp_file:Function]
# [DEF:remove_empty_directories:Function]
# @PURPOSE: Рекурсивно удаляет все пустые поддиректории, начиная с указанного пути.
@@ -88,7 +88,7 @@ def remove_empty_directories(root_dir: str, logger: Optional[SupersetLogger] = N
logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
return removed_count
# [/DEF:remove_empty_directories]
# [/DEF:remove_empty_directories:Function]
# [DEF:read_dashboard_from_disk:Function]
# @PURPOSE: Читает бинарное содержимое файла с диска.
@@ -105,7 +105,7 @@ def read_dashboard_from_disk(file_path: str, logger: Optional[SupersetLogger] =
if not content:
logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
return content, path.name
# [/DEF:read_dashboard_from_disk]
# [/DEF:read_dashboard_from_disk:Function]
# [DEF:calculate_crc32:Function]
# @PURPOSE: Вычисляет контрольную сумму CRC32 для файла.
@@ -116,7 +116,7 @@ def calculate_crc32(file_path: Path) -> str:
with open(file_path, 'rb') as f:
crc32_value = zlib.crc32(f.read())
return f"{crc32_value:08x}"
# [/DEF:calculate_crc32]
# [/DEF:calculate_crc32:Function]
# [DEF:RetentionPolicy:DataClass]
# @PURPOSE: Определяет политику хранения для архивов (ежедневные, еженедельные, ежемесячные).
@@ -125,7 +125,7 @@ class RetentionPolicy:
daily: int = 7
weekly: int = 4
monthly: int = 12
# [/DEF:RetentionPolicy]
# [/DEF:RetentionPolicy:DataClass]
# [DEF:archive_exports:Function]
# @PURPOSE: Управляет архивом экспортированных файлов, применяя политику хранения и дедупликацию.
@@ -207,7 +207,7 @@ def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool
logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
except OSError as e:
logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
# [/DEF:archive_exports]
# [/DEF:archive_exports:Function]
# [DEF:apply_retention_policy:Function]
# @PURPOSE: (Helper) Применяет политику хранения к списку файлов, возвращая те, что нужно сохранить.
@@ -240,7 +240,7 @@ def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: Re
files_to_keep.update(monthly_files[:policy.monthly])
logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
return files_to_keep
# [/DEF:apply_retention_policy]
# [/DEF:apply_retention_policy:Function]
# [DEF:save_and_unpack_dashboard:Function]
# @PURPOSE: Сохраняет бинарное содержимое ZIP-архива на диск и опционально распаковывает его.
@@ -270,7 +270,7 @@ def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path],
except zipfile.BadZipFile as e:
logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
raise InvalidZipFormatError(f"Invalid ZIP file: {e}") from e
# [/DEF:save_and_unpack_dashboard]
# [/DEF:save_and_unpack_dashboard:Function]
# [DEF:update_yamls:Function]
# @PURPOSE: Обновляет конфигурации в YAML-файлах, заменяя значения или применяя regex.
@@ -291,7 +291,7 @@ def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str =
for file_path in dir_path.rglob("*.yaml"):
_update_yaml_file(file_path, configs, regexp_pattern, replace_string, logger)
# [/DEF:update_yamls]
# [/DEF:update_yamls:Function]
# [DEF:_update_yaml_file:Function]
# @PURPOSE: (Helper) Обновляет один YAML файл.
@@ -352,7 +352,7 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
f.write(modified_content)
except Exception as e:
logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
# [/DEF:_update_yaml_file]
# [/DEF:_update_yaml_file:Function]
# [DEF:create_dashboard_export:Function]
# @PURPOSE: Создает ZIP-архив из указанных исходных путей.
@@ -379,7 +379,7 @@ def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union
except (IOError, zipfile.BadZipFile, AssertionError) as e:
logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
return False
# [/DEF:create_dashboard_export]
# [/DEF:create_dashboard_export:Function]
# [DEF:sanitize_filename:Function]
# @PURPOSE: Очищает строку от символов, недопустимых в именах файлов.
@@ -387,7 +387,7 @@ def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union
# @RETURN: str - Очищенная строка.
def sanitize_filename(filename: str) -> str:
return re.sub(r'[\\/*?:"<>|]', "_", filename).strip()
# [/DEF:sanitize_filename]
# [/DEF:sanitize_filename:Function]
# [DEF:get_filename_from_headers:Function]
# @PURPOSE: Извлекает имя файла из HTTP заголовка 'Content-Disposition'.
@@ -398,7 +398,7 @@ def get_filename_from_headers(headers: dict) -> Optional[str]:
if match := re.search(r'filename="?([^"]+)"?', content_disposition):
return match.group(1).strip()
return None
# [/DEF:get_filename_from_headers]
# [/DEF:get_filename_from_headers:Function]
# [DEF:consolidate_archive_folders:Function]
# @PURPOSE: Консолидирует директории архивов на основе общего слага в имени.
@@ -453,6 +453,6 @@ def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetL
logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
except Exception as e:
logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
# [/DEF:consolidate_archive_folders]
# [/DEF:consolidate_archive_folders:Function]
# [/DEF:superset_tool.utils.fileio]
# [/DEF:superset_tool.utils.fileio:Module]

View File

@@ -105,6 +105,6 @@ def setup_clients(logger: SupersetLogger, custom_envs: Optional[List[Any]] = Non
except Exception as e:
logger.critical("[setup_clients][Failure] Critical error during client initialization: %s", e, exc_info=True)
raise
# [/DEF:setup_clients]
# [/DEF:setup_clients:Function]
# [/DEF:superset_tool.utils.init_clients]
# [/DEF:superset_tool.utils.init_clients:Module]

View File

@@ -53,7 +53,7 @@ class SupersetLogger:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
# [/DEF:SupersetLogger.__init__]
# [/DEF:SupersetLogger.__init__:Function]
# [DEF:SupersetLogger._log:Function]
# @PURPOSE: (Helper) Универсальный метод для вызова соответствующего уровня логирования.
@@ -64,44 +64,44 @@ class SupersetLogger:
# @PARAM: exc_info (bool) - Добавлять ли информацию об исключении.
def _log(self, level_method: Any, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
level_method(msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger._log]
# [/DEF:SupersetLogger._log:Function]
# [DEF:SupersetLogger.info:Function]
# @PURPOSE: Записывает сообщение уровня INFO.
def info(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.info, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.info]
# [/DEF:SupersetLogger.info:Function]
# [DEF:SupersetLogger.debug:Function]
# @PURPOSE: Записывает сообщение уровня DEBUG.
def debug(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.debug, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.debug]
# [/DEF:SupersetLogger.debug:Function]
# [DEF:SupersetLogger.warning:Function]
# @PURPOSE: Записывает сообщение уровня WARNING.
def warning(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.warning, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.warning]
# [/DEF:SupersetLogger.warning:Function]
# [DEF:SupersetLogger.error:Function]
# @PURPOSE: Записывает сообщение уровня ERROR.
def error(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.error, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.error]
# [/DEF:SupersetLogger.error:Function]
# [DEF:SupersetLogger.critical:Function]
# @PURPOSE: Записывает сообщение уровня CRITICAL.
def critical(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.critical, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.critical]
# [/DEF:SupersetLogger.critical:Function]
# [DEF:SupersetLogger.exception:Function]
# @PURPOSE: Записывает сообщение уровня ERROR вместе с трассировкой стека текущего исключения.
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.logger.exception(msg, *args, **kwargs)
# [/DEF:SupersetLogger.exception]
# [/DEF:SupersetLogger.exception:Function]
# [/DEF:SupersetLogger]
# [/DEF:SupersetLogger:Class]
# [/DEF:superset_tool.utils.logger]
# [/DEF:superset_tool.utils.logger:Module]

View File

@@ -42,7 +42,7 @@ class APIClient:
self._tokens: Dict[str, str] = {}
self._authenticated = False
self.logger.info("[APIClient.__init__][Exit] APIClient initialized.")
# [/DEF:APIClient.__init__]
# [/DEF:APIClient.__init__:Function]
# [DEF:APIClient._init_session:Function]
# @PURPOSE: Создает и настраивает `requests.Session` с retry-логикой.
@@ -58,7 +58,7 @@ class APIClient:
self.logger.warning("[_init_session][State] SSL verification disabled.")
session.verify = self.request_settings["verify_ssl"]
return session
# [/DEF:APIClient._init_session]
# [/DEF:APIClient._init_session:Function]
# [DEF:APIClient.authenticate:Function]
# @PURPOSE: Выполняет аутентификацию в Superset API и получает access и CSRF токены.
@@ -85,7 +85,7 @@ class APIClient:
raise AuthenticationError(f"Authentication failed: {e}") from e
except (requests.exceptions.RequestException, KeyError) as e:
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
# [/DEF:APIClient.authenticate]
# [/DEF:APIClient.authenticate:Function]
@property
def headers(self) -> Dict[str, str]:
@@ -98,7 +98,7 @@ class APIClient:
"Referer": self.base_url,
"Content-Type": "application/json"
}
# [/DEF:APIClient.headers]
# [/DEF:APIClient.headers:Function]
# [DEF:APIClient.request:Function]
# @PURPOSE: Выполняет универсальный HTTP-запрос к API.
@@ -121,7 +121,7 @@ class APIClient:
self._handle_http_error(e, endpoint)
except requests.exceptions.RequestException as e:
self._handle_network_error(e, full_url)
# [/DEF:APIClient.request]
# [/DEF:APIClient.request:Function]
# [DEF:APIClient._handle_http_error:Function]
# @PURPOSE: (Helper) Преобразует HTTP ошибки в кастомные исключения.
@@ -133,7 +133,7 @@ class APIClient:
if status_code == 403: raise PermissionDeniedError() from e
if status_code == 401: raise AuthenticationError() from e
raise SupersetAPIError(f"API Error {status_code}: {e.response.text}") from e
# [/DEF:APIClient._handle_http_error]
# [/DEF:APIClient._handle_http_error:Function]
# [DEF:APIClient._handle_network_error:Function]
# @PURPOSE: (Helper) Преобразует сетевые ошибки в `NetworkError`.
@@ -144,7 +144,7 @@ class APIClient:
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:APIClient._handle_network_error]
# [/DEF:APIClient._handle_network_error:Function]
# [DEF:APIClient.upload_file:Function]
# @PURPOSE: Загружает файл на сервер через multipart/form-data.
@@ -170,7 +170,7 @@ class APIClient:
raise TypeError(f"Unsupported file_obj type: {type(file_obj)}")
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
# [/DEF:APIClient.upload_file]
# [/DEF:APIClient.upload_file:Function]
# [DEF:APIClient._perform_upload:Function]
# @PURPOSE: (Helper) Выполняет POST запрос с файлом.
@@ -196,7 +196,7 @@ class APIClient:
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
except requests.exceptions.RequestException as e:
raise NetworkError(f"Network error during upload: {e}", url=url) from e
# [/DEF:APIClient._perform_upload]
# [/DEF:APIClient._perform_upload:Function]
# [DEF:APIClient.fetch_paginated_count:Function]
# @PURPOSE: Получает общее количество элементов для пагинации.
@@ -207,7 +207,7 @@ class APIClient:
def fetch_paginated_count(self, endpoint: str, query_params: Dict, count_field: str = "count") -> int:
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query_params)}))
return response_json.get(count_field, 0)
# [/DEF:APIClient.fetch_paginated_count]
# [/DEF:APIClient.fetch_paginated_count:Function]
# [DEF:APIClient.fetch_paginated_data:Function]
# @PURPOSE: Автоматически собирает данные со всех страниц пагинированного эндпоинта.
@@ -225,8 +225,8 @@ class APIClient:
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query)}))
results.extend(response_json.get(results_field, []))
return results
# [/DEF:APIClient.fetch_paginated_data]
# [/DEF:APIClient.fetch_paginated_data:Function]
# [/DEF:APIClient]
# [/DEF:APIClient:Class]
# [/DEF:superset_tool.utils.network]
# [/DEF:superset_tool.utils.network:Module]

View File

@@ -26,7 +26,7 @@ def menu(title: str, prompt: str, choices: List[str], **kwargs) -> Tuple[int, Op
return (0, choices[sel - 1]) if 0 < sel <= len(choices) else (1, None)
except (ValueError, IndexError):
return 1, None
# [/DEF:menu]
# [/DEF:menu:Function]
# [DEF:checklist:Function]
# @PURPOSE: Отображает список с возможностью множественного выбора.
@@ -46,7 +46,7 @@ def checklist(title: str, prompt: str, options: List[Tuple[str, str]], **kwargs)
return 0, selected_values
except (ValueError, IndexError):
return 1, []
# [/DEF:checklist]
# [/DEF:checklist:Function]
# [DEF:yesno:Function]
# @PURPOSE: Задает вопрос с ответом да/нет.
@@ -56,7 +56,7 @@ def checklist(title: str, prompt: str, options: List[Tuple[str, str]], **kwargs)
def yesno(title: str, question: str, **kwargs) -> bool:
ans = input(f"\n=== {title} ===\n{question} (y/n): ").strip().lower()
return ans in ("y", "yes", "да", "д")
# [/DEF:yesno]
# [/DEF:yesno:Function]
# [DEF:msgbox:Function]
# @PURPOSE: Отображает информационное сообщение.
@@ -64,7 +64,7 @@ def yesno(title: str, question: str, **kwargs) -> bool:
# @PARAM: msg (str) - Текст сообщения.
def msgbox(title: str, msg: str, **kwargs) -> None:
print(f"\n=== {title} ===\n{msg}\n")
# [/DEF:msgbox]
# [/DEF:msgbox:Function]
# [DEF:inputbox:Function]
# @PURPOSE: Запрашивает у пользователя текстовый ввод.
@@ -75,7 +75,7 @@ def inputbox(title: str, prompt: str, **kwargs) -> Tuple[int, Optional[str]]:
print(f"\n=== {title} ===")
val = input(f"{prompt}\n")
return (0, val) if val else (1, None)
# [/DEF:inputbox]
# [/DEF:inputbox:Function]
# [DEF:_ConsoleGauge:Class]
# @PURPOSE: Контекстный менеджер для имитации `whiptail gauge` в консоли.
@@ -91,7 +91,7 @@ class _ConsoleGauge:
sys.stdout.write(f"\r{txt} "); sys.stdout.flush()
def set_percent(self, percent: int) -> None:
sys.stdout.write(f"{percent}%"); sys.stdout.flush()
# [/DEF:_ConsoleGauge]
# [/DEF:_ConsoleGauge:Class]
# [DEF:gauge:Function]
# @PURPOSE: Создает и возвращает экземпляр `_ConsoleGauge`.
@@ -99,6 +99,6 @@ class _ConsoleGauge:
# @RETURN: _ConsoleGauge - Экземпляр контекстного менеджера.
def gauge(title: str, **kwargs) -> _ConsoleGauge:
return _ConsoleGauge(title, **kwargs)
# [/DEF:gauge]
# [/DEF:gauge:Function]
# [/DEF:superset_tool.utils.whiptail_fallback]
# [/DEF:superset_tool.utils.whiptail_fallback:Module]