semantic markup update

This commit is contained in:
2026-01-18 21:29:54 +03:00
parent 11c59fb420
commit 76baeb1038
85 changed files with 7020 additions and 5953 deletions

View File

@@ -36,9 +36,10 @@ class SupersetClient:
# @PARAM: config (SupersetConfig) - Конфигурация подключения.
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
def __init__(self, config: SupersetConfig, logger: Optional[SupersetLogger] = None):
self.logger = logger or SupersetLogger(name="SupersetClient")
self.logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient.")
self._validate_config(config)
with belief_scope("__init__"):
self.logger = logger or SupersetLogger(name="SupersetClient")
self.logger.info("[SupersetClient.__init__][Enter] Initializing SupersetClient.")
self._validate_config(config)
self.config = config
self.network = APIClient(
config=config.dict(),
@@ -57,19 +58,21 @@ class SupersetClient:
# @THROW: TypeError - Если `config` не является экземпляром `SupersetConfig`.
# @PARAM: config (SupersetConfig) - Объект для проверки.
def _validate_config(self, config: SupersetConfig) -> None:
self.logger.debug("[_validate_config][Enter] Validating SupersetConfig.")
assert isinstance(config, SupersetConfig), "Конфигурация должна быть экземпляром SupersetConfig"
self.logger.debug("[_validate_config][Exit] Config is valid.")
with belief_scope("_validate_config"):
self.logger.debug("[_validate_config][Enter] Validating SupersetConfig.")
assert isinstance(config, SupersetConfig), "Конфигурация должна быть экземпляром SupersetConfig"
self.logger.debug("[_validate_config][Exit] Config is valid.")
# [/DEF:_validate_config:Function]
@property
# [DEF:headers:Function]
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
# @PRE: self.network должен быть инициализирован.
# @POST: Возвращаемый словарь содержит актуальные заголовки, включая токен авторизации.
def headers(self) -> dict:
# [DEF:headers:Function]
# @PURPOSE: Возвращает базовые HTTP-заголовки, используемые сетевым клиентом.
# @PRE: self.network должен быть инициализирован.
# @POST: Возвращаемый словарь содержит актуальные заголовки, включая токен авторизации.
return self.network.headers
# [/DEF:headers:Function]
with belief_scope("headers"):
return self.network.headers
# [/DEF:headers:Function]
# [DEF:get_dashboards:Function]
# @PURPOSE: Получает полный список дашбордов, автоматически обрабатывая пагинацию.
@@ -81,10 +84,19 @@ class SupersetClient:
# @PARAM: query (Optional[Dict]) - Дополнительные параметры запроса для API.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список дашбордов).
def get_dashboards(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
assert self.network, "[get_dashboards][PRE] Network client must be initialized."
self.logger.info("[get_dashboards][Enter] Fetching dashboards.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
with belief_scope("get_dashboards"):
assert self.network, "[get_dashboards][PRE] Network client must be initialized."
self.logger.info("[get_dashboards][Enter] Fetching dashboards.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
paginated_data = self._fetch_all_pages(
endpoint="/dashboard/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
self.logger.info("[get_dashboards][Exit] Found %d dashboards.", total_count)
return total_count, paginated_data
validated_query['columns'] = ["slug", "id", "changed_on_utc", "dashboard_title", "published"]
total_count = self._fetch_total_object_count(endpoint="/dashboard/")
paginated_data = self._fetch_all_pages(
@@ -104,20 +116,21 @@ class SupersetClient:
# @PARAM: dashboard_id (int) - ID дашборда для экспорта.
# @RETURN: Tuple[bytes, str] - Бинарное содержимое ZIP-архива и имя файла.
def export_dashboard(self, dashboard_id: int) -> Tuple[bytes, str]:
assert isinstance(dashboard_id, int) and dashboard_id > 0, "[export_dashboard][PRE] dashboard_id must be a positive integer."
self.logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id)
response = self.network.request(
method="GET",
endpoint="/dashboard/export/",
params={"q": json.dumps([dashboard_id])},
stream=True,
raw_response=True,
)
response = cast(Response, response)
self._validate_export_response(response, dashboard_id)
filename = self._resolve_export_filename(response, dashboard_id)
self.logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
return response.content, filename
with belief_scope("export_dashboard"):
assert isinstance(dashboard_id, int) and dashboard_id > 0, "[export_dashboard][PRE] dashboard_id must be a positive integer."
self.logger.info("[export_dashboard][Enter] Exporting dashboard %s.", dashboard_id)
response = self.network.request(
method="GET",
endpoint="/dashboard/export/",
params={"q": json.dumps([dashboard_id])},
stream=True,
raw_response=True,
)
response = cast(Response, response)
self._validate_export_response(response, dashboard_id)
filename = self._resolve_export_filename(response, dashboard_id)
self.logger.info("[export_dashboard][Exit] Exported dashboard %s to %s.", dashboard_id, filename)
return response.content, filename
# [/DEF:export_dashboard:Function]
# [DEF:import_dashboard:Function]
@@ -134,24 +147,25 @@ class SupersetClient:
# @PARAM: dash_slug (Optional[str]) - Slug дашборда для поиска ID, если ID не предоставлен.
# @RETURN: Dict - Ответ API в случае успеха.
def import_dashboard(self, file_name: Union[str, Path], dash_id: Optional[int] = None, dash_slug: Optional[str] = None) -> Dict:
assert file_name, "[import_dashboard][PRE] file_name must be provided."
file_path = str(file_name)
self._validate_import_file(file_path)
try:
return self._do_import(file_path)
except Exception as exc:
self.logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True)
if not self.delete_before_reimport:
raise
with belief_scope("import_dashboard"):
assert file_name, "[import_dashboard][PRE] file_name must be provided."
file_path = str(file_name)
self._validate_import_file(file_path)
try:
return self._do_import(file_path)
except Exception as exc:
self.logger.error("[import_dashboard][Failure] First import attempt failed: %s", exc, exc_info=True)
if not self.delete_before_reimport:
raise
target_id = self._resolve_target_id_for_delete(dash_id, dash_slug)
if target_id is None:
self.logger.error("[import_dashboard][Failure] No ID available for delete-retry.")
raise
target_id = self._resolve_target_id_for_delete(dash_id, dash_slug)
if target_id is None:
self.logger.error("[import_dashboard][Failure] No ID available for delete-retry.")
raise
self.delete_dashboard(target_id)
self.logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
return self._do_import(file_path)
self.delete_dashboard(target_id)
self.logger.info("[import_dashboard][State] Deleted dashboard ID %s, retrying import.", target_id)
return self._do_import(file_path)
# [/DEF:import_dashboard:Function]
# [DEF:_resolve_target_id_for_delete:Function]
@@ -163,10 +177,21 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса при поиске по slug.
# @RETURN: Optional[int] - Найденный ID или None.
def _resolve_target_id_for_delete(self, dash_id: Optional[int], dash_slug: Optional[str]) -> Optional[int]:
assert dash_id is not None or dash_slug is not None, "[_resolve_target_id_for_delete][PRE] At least one of ID or slug must be provided."
if dash_id is not None:
return dash_id
if dash_slug is not None:
with belief_scope("_resolve_target_id_for_delete"):
assert dash_id is not None or dash_slug is not None, "[_resolve_target_id_for_delete][PRE] At least one of ID or slug must be provided."
if dash_id is not None:
return dash_id
if dash_slug is not None:
self.logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
try:
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
if candidates:
target_id = candidates[0]["id"]
self.logger.debug("[_resolve_target_id_for_delete][Success] Resolved slug to ID %s.", target_id)
return target_id
except Exception as e:
self.logger.warning("[_resolve_target_id_for_delete][Warning] Could not resolve slug '%s' to ID: %s", dash_slug, e)
return None
self.logger.debug("[_resolve_target_id_for_delete][State] Resolving ID by slug '%s'.", dash_slug)
try:
_, candidates = self.get_dashboards(query={"filters": [{"col": "slug", "op": "eq", "value": dash_slug}]})
@@ -187,19 +212,20 @@ class SupersetClient:
# @PARAM: file_name (Union[str, Path]) - Путь к файлу.
# @RETURN: Dict - Ответ API.
def _do_import(self, file_name: Union[str, Path]) -> Dict:
self.logger.debug(f"[_do_import][State] Uploading file: {file_name}")
file_path = Path(file_name)
if file_path.exists():
self.logger.debug(f"[_do_import][State] File size: {file_path.stat().st_size} bytes")
else:
self.logger.error(f"[_do_import][Failure] File does not exist: {file_name}")
raise FileNotFoundError(f"File does not exist: {file_name}")
return self.network.upload_file(
endpoint="/dashboard/import/",
file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"},
extra_data={"overwrite": "true"},
timeout=self.config.timeout * 2,
)
with belief_scope("_do_import"):
self.logger.debug(f"[_do_import][State] Uploading file: {file_name}")
file_path = Path(file_name)
if file_path.exists():
self.logger.debug(f"[_do_import][State] File size: {file_path.stat().st_size} bytes")
else:
self.logger.error(f"[_do_import][Failure] File does not exist: {file_name}")
raise FileNotFoundError(f"File does not exist: {file_name}")
return self.network.upload_file(
endpoint="/dashboard/import/",
file_info={"file_obj": file_path, "file_name": file_path.name, "form_field": "formData"},
extra_data={"overwrite": "true"},
timeout=self.config.timeout * 2,
)
# [/DEF:_do_import:Function]
# [DEF:delete_dashboard:Function]
@@ -210,14 +236,15 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @PARAM: dashboard_id (Union[int, str]) - ID или slug дашборда.
def delete_dashboard(self, dashboard_id: Union[int, str]) -> None:
assert dashboard_id, "[delete_dashboard][PRE] dashboard_id must be provided."
self.logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id)
response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}")
response = cast(Dict, response)
if response.get("result", True) is not False:
self.logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
else:
self.logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
with belief_scope("delete_dashboard"):
assert dashboard_id, "[delete_dashboard][PRE] dashboard_id must be provided."
self.logger.info("[delete_dashboard][Enter] Deleting dashboard %s.", dashboard_id)
response = self.network.request(method="DELETE", endpoint=f"/dashboard/{dashboard_id}")
response = cast(Dict, response)
if response.get("result", True) is not False:
self.logger.info("[delete_dashboard][Success] Dashboard %s deleted.", dashboard_id)
else:
self.logger.warning("[delete_dashboard][Warning] Unexpected response while deleting %s: %s", dashboard_id, response)
# [/DEF:delete_dashboard:Function]
# [DEF:_extract_dashboard_id_from_zip:Function]
@@ -228,19 +255,20 @@ class SupersetClient:
# @THROW: ImportError - Если не установлен `yaml`.
# @RETURN: Optional[int] - ID дашборда или None.
def _extract_dashboard_id_from_zip(self, file_name: Union[str, Path]) -> Optional[int]:
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_id_from_zip][PRE] file_name must be a valid zip file."
try:
import yaml
with zipfile.ZipFile(file_name, "r") as zf:
for name in zf.namelist():
if name.endswith("metadata.yaml"):
with zf.open(name) as meta_file:
meta = yaml.safe_load(meta_file)
dash_id = meta.get("dashboard_uuid") or meta.get("dashboard_id")
if dash_id: return int(dash_id)
except Exception as exc:
self.logger.error("[_extract_dashboard_id_from_zip][Failure] %s", exc, exc_info=True)
return None
with belief_scope("_extract_dashboard_id_from_zip"):
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_id_from_zip][PRE] file_name must be a valid zip file."
try:
import yaml
with zipfile.ZipFile(file_name, "r") as zf:
for name in zf.namelist():
if name.endswith("metadata.yaml"):
with zf.open(name) as meta_file:
meta = yaml.safe_load(meta_file)
dash_id = meta.get("dashboard_uuid") or meta.get("dashboard_id")
if dash_id: return int(dash_id)
except Exception as exc:
self.logger.error("[_extract_dashboard_id_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:_extract_dashboard_id_from_zip:Function]
# [DEF:_extract_dashboard_slug_from_zip:Function]
@@ -251,19 +279,20 @@ class SupersetClient:
# @THROW: ImportError - Если не установлен `yaml`.
# @RETURN: Optional[str] - Slug дашборда или None.
def _extract_dashboard_slug_from_zip(self, file_name: Union[str, Path]) -> Optional[str]:
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_slug_from_zip][PRE] file_name must be a valid zip file."
try:
import yaml
with zipfile.ZipFile(file_name, "r") as zf:
for name in zf.namelist():
if name.endswith("metadata.yaml"):
with zf.open(name) as meta_file:
meta = yaml.safe_load(meta_file)
if slug := meta.get("slug"):
return str(slug)
except Exception as exc:
self.logger.error("[_extract_dashboard_slug_from_zip][Failure] %s", exc, exc_info=True)
return None
with belief_scope("_extract_dashboard_slug_from_zip"):
assert zipfile.is_zipfile(file_name), "[_extract_dashboard_slug_from_zip][PRE] file_name must be a valid zip file."
try:
import yaml
with zipfile.ZipFile(file_name, "r") as zf:
for name in zf.namelist():
if name.endswith("metadata.yaml"):
with zf.open(name) as meta_file:
meta = yaml.safe_load(meta_file)
if slug := meta.get("slug"):
return str(slug)
except Exception as exc:
self.logger.error("[_extract_dashboard_slug_from_zip][Failure] %s", exc, exc_info=True)
return None
# [/DEF:_extract_dashboard_slug_from_zip:Function]
# [DEF:_validate_export_response:Function]
@@ -274,12 +303,13 @@ class SupersetClient:
# @PARAM: response (Response) - HTTP ответ.
# @PARAM: dashboard_id (int) - ID дашборда.
def _validate_export_response(self, response: Response, dashboard_id: int) -> None:
assert isinstance(response, Response), "[_validate_export_response][PRE] response must be a requests.Response object."
content_type = response.headers.get("Content-Type", "")
if "application/zip" not in content_type:
raise ExportError(f"Получен не ZIP-архив (Content-Type: {content_type})")
if not response.content:
raise ExportError("Получены пустые данные при экспорте")
with belief_scope("_validate_export_response"):
assert isinstance(response, Response), "[_validate_export_response][PRE] response must be a requests.Response object."
content_type = response.headers.get("Content-Type", "")
if "application/zip" not in content_type:
raise ExportError(f"Получен не ZIP-архив (Content-Type: {content_type})")
if not response.content:
raise ExportError("Получены пустые данные при экспорте")
# [/DEF:_validate_export_response:Function]
# [DEF:_resolve_export_filename:Function]
@@ -290,14 +320,15 @@ class SupersetClient:
# @PARAM: dashboard_id (int) - ID дашборда.
# @RETURN: str - Имя файла.
def _resolve_export_filename(self, response: Response, dashboard_id: int) -> str:
assert isinstance(response, Response), "[_resolve_export_filename][PRE] response must be a requests.Response object."
filename = get_filename_from_headers(dict(response.headers))
if not filename:
from datetime import datetime
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
self.logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
return filename
with belief_scope("_resolve_export_filename"):
assert isinstance(response, Response), "[_resolve_export_filename][PRE] response must be a requests.Response object."
filename = get_filename_from_headers(dict(response.headers))
if not filename:
from datetime import datetime
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
filename = f"dashboard_export_{dashboard_id}_{timestamp}.zip"
self.logger.warning("[_resolve_export_filename][Warning] Generated filename: %s", filename)
return filename
# [/DEF:_resolve_export_filename:Function]
# [DEF:_validate_query_params:Function]
@@ -307,9 +338,10 @@ class SupersetClient:
# @POST: Возвращает словарь, содержащий базовые параметры пагинации, объединенные с `query`.
# @RETURN: Dict - Валидированные параметры.
def _validate_query_params(self, query: Optional[Dict]) -> Dict:
assert query is None or isinstance(query, dict), "[_validate_query_params][PRE] query must be a dictionary or None."
base_query = {"page": 0, "page_size": 1000}
return {**base_query, **(query or {})}
with belief_scope("_validate_query_params"):
assert query is None or isinstance(query, dict), "[_validate_query_params][PRE] query must be a dictionary or None."
base_query = {"page": 0, "page_size": 1000}
return {**base_query, **(query or {})}
# [/DEF:_validate_query_params:Function]
# [DEF:_fetch_total_object_count:Function]
@@ -320,12 +352,13 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @RETURN: int - Количество объектов.
def _fetch_total_object_count(self, endpoint: str) -> int:
assert endpoint and isinstance(endpoint, str), "[_fetch_total_object_count][PRE] endpoint must be a non-empty string."
return self.network.fetch_paginated_count(
endpoint=endpoint,
query_params={"page": 0, "page_size": 1},
count_field="count",
)
with belief_scope("_fetch_total_object_count"):
assert endpoint and isinstance(endpoint, str), "[_fetch_total_object_count][PRE] endpoint must be a non-empty string."
return self.network.fetch_paginated_count(
endpoint=endpoint,
query_params={"page": 0, "page_size": 1},
count_field="count",
)
# [/DEF:_fetch_total_object_count:Function]
# [DEF:_fetch_all_pages:Function]
@@ -337,9 +370,10 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @RETURN: List[Dict] - Список всех объектов.
def _fetch_all_pages(self, endpoint: str, pagination_options: Dict) -> List[Dict]:
assert endpoint and isinstance(endpoint, str), "[_fetch_all_pages][PRE] endpoint must be a non-empty string."
assert isinstance(pagination_options, dict), "[_fetch_all_pages][PRE] pagination_options must be a dictionary."
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
with belief_scope("_fetch_all_pages"):
assert endpoint and isinstance(endpoint, str), "[_fetch_all_pages][PRE] endpoint must be a non-empty string."
assert isinstance(pagination_options, dict), "[_fetch_all_pages][PRE] pagination_options must be a dictionary."
return self.network.fetch_paginated_data(endpoint=endpoint, pagination_options=pagination_options)
# [/DEF:_fetch_all_pages:Function]
# [DEF:_validate_import_file:Function]
@@ -350,12 +384,13 @@ class SupersetClient:
# @THROW: InvalidZipFormatError - Если файл не является ZIP или не содержит `metadata.yaml`.
# @PARAM: zip_path (Union[str, Path]) - Путь к файлу.
def _validate_import_file(self, zip_path: Union[str, Path]) -> None:
assert zip_path, "[_validate_import_file][PRE] zip_path must be provided."
path = Path(zip_path)
assert path.exists(), f"Файл {zip_path} не существует"
assert zipfile.is_zipfile(path), f"Файл {zip_path} не является ZIP-архивом"
with zipfile.ZipFile(path, "r") as zf:
assert any(n.endswith("metadata.yaml") for n in zf.namelist()), f"Архив {zip_path} не содержит 'metadata.yaml'"
with belief_scope("_validate_import_file"):
assert zip_path, "[_validate_import_file][PRE] zip_path must be provided."
path = Path(zip_path)
assert path.exists(), f"Файл {zip_path} не существует"
assert zipfile.is_zipfile(path), f"Файл {zip_path} не является ZIP-архивом"
with zipfile.ZipFile(path, "r") as zf:
assert any(n.endswith("metadata.yaml") for n in zf.namelist()), f"Архив {zip_path} не содержит 'metadata.yaml'"
# [/DEF:_validate_import_file:Function]
# [DEF:get_datasets:Function]
@@ -368,17 +403,18 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список датасетов).
def get_datasets(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
assert self.network, "[get_datasets][PRE] Network client must be initialized."
self.logger.info("[get_datasets][Enter] Fetching datasets.")
validated_query = self._validate_query_params(query)
with belief_scope("get_datasets"):
assert self.network, "[get_datasets][PRE] Network client must be initialized."
self.logger.info("[get_datasets][Enter] Fetching datasets.")
validated_query = self._validate_query_params(query)
total_count = self._fetch_total_object_count(endpoint="/dataset/")
paginated_data = self._fetch_all_pages(
endpoint="/dataset/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
self.logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
return total_count, paginated_data
total_count = self._fetch_total_object_count(endpoint="/dataset/")
paginated_data = self._fetch_all_pages(
endpoint="/dataset/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
self.logger.info("[get_datasets][Exit] Found %d datasets.", total_count)
return total_count, paginated_data
# [/DEF:get_datasets:Function]
# [DEF:get_databases:Function]
@@ -391,18 +427,19 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @RETURN: Tuple[int, List[Dict]] - Кортеж (общее количество, список баз данных).
def get_databases(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
assert self.network, "[get_databases][PRE] Network client must be initialized."
self.logger.info("[get_databases][Enter] Fetching databases.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
validated_query['columns'] = []
total_count = self._fetch_total_object_count(endpoint="/database/")
paginated_data = self._fetch_all_pages(
endpoint="/database/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
self.logger.info("[get_databases][Exit] Found %d databases.", total_count)
return total_count, paginated_data
with belief_scope("get_databases"):
assert self.network, "[get_databases][PRE] Network client must be initialized."
self.logger.info("[get_databases][Enter] Fetching databases.")
validated_query = self._validate_query_params(query or {})
if 'columns' not in validated_query:
validated_query['columns'] = []
total_count = self._fetch_total_object_count(endpoint="/database/")
paginated_data = self._fetch_all_pages(
endpoint="/database/",
pagination_options={"base_query": validated_query, "total_count": total_count, "results_field": "result"},
)
self.logger.info("[get_databases][Exit] Found %d databases.", total_count)
return total_count, paginated_data
# [/DEF:get_databases:Function]
# [DEF:get_dataset:Function]
@@ -414,12 +451,13 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса или если датасет не найден.
# @RETURN: Dict - Информация о датасете.
def get_dataset(self, dataset_id: int) -> Dict:
assert isinstance(dataset_id, int) and dataset_id > 0, "[get_dataset][PRE] dataset_id must be a positive integer."
self.logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id)
response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
response = cast(Dict, response)
self.logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
return response
with belief_scope("get_dataset"):
assert isinstance(dataset_id, int) and dataset_id > 0, "[get_dataset][PRE] dataset_id must be a positive integer."
self.logger.info("[get_dataset][Enter] Fetching dataset %s.", dataset_id)
response = self.network.request(method="GET", endpoint=f"/dataset/{dataset_id}")
response = cast(Dict, response)
self.logger.info("[get_dataset][Exit] Got dataset %s.", dataset_id)
return response
# [/DEF:get_dataset:Function]
# [DEF:get_database:Function]
@@ -431,12 +469,13 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса или если база данных не найдена.
# @RETURN: Dict - Информация о базе данных.
def get_database(self, database_id: int) -> Dict:
assert isinstance(database_id, int) and database_id > 0, "[get_database][PRE] database_id must be a positive integer."
self.logger.info("[get_database][Enter] Fetching database %s.", database_id)
response = self.network.request(method="GET", endpoint=f"/database/{database_id}")
response = cast(Dict, response)
self.logger.info("[get_database][Exit] Got database %s.", database_id)
return response
with belief_scope("get_database"):
assert isinstance(database_id, int) and database_id > 0, "[get_database][PRE] database_id must be a positive integer."
self.logger.info("[get_database][Enter] Fetching database %s.", database_id)
response = self.network.request(method="GET", endpoint=f"/database/{database_id}")
response = cast(Dict, response)
self.logger.info("[get_database][Exit] Got database %s.", database_id)
return response
# [/DEF:get_database:Function]
# [DEF:update_dataset:Function]
@@ -449,18 +488,19 @@ class SupersetClient:
# @THROW: APIError - В случае ошибки сетевого запроса.
# @RETURN: Dict - Ответ API.
def update_dataset(self, dataset_id: int, data: Dict) -> Dict:
assert isinstance(dataset_id, int) and dataset_id > 0, "[update_dataset][PRE] dataset_id must be a positive integer."
assert isinstance(data, dict) and data, "[update_dataset][PRE] data must be a non-empty dictionary."
self.logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id)
response = self.network.request(
method="PUT",
endpoint=f"/dataset/{dataset_id}",
data=json.dumps(data),
headers={'Content-Type': 'application/json'}
)
response = cast(Dict, response)
self.logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
return response
with belief_scope("update_dataset"):
assert isinstance(dataset_id, int) and dataset_id > 0, "[update_dataset][PRE] dataset_id must be a positive integer."
assert isinstance(data, dict) and data, "[update_dataset][PRE] data must be a non-empty dictionary."
self.logger.info("[update_dataset][Enter] Updating dataset %s.", dataset_id)
response = self.network.request(
method="PUT",
endpoint=f"/dataset/{dataset_id}",
data=json.dumps(data),
headers={'Content-Type': 'application/json'}
)
response = cast(Dict, response)
self.logger.info("[update_dataset][Exit] Updated dataset %s.", dataset_id)
return response
# [/DEF:update_dataset:Function]
# [/DEF:SupersetClient:Class]

View File

@@ -146,6 +146,7 @@ class NetworkError(SupersetToolError):
# @POST: Error is initialized with network failure context.
def __init__(self, message: str = "Network connection failed", **context: Any):
super().__init__(f"[NETWORK_FAILURE] {message}", context={"type": "network", **context})
# [/DEF:__init__:Function]
# [/DEF:NetworkError:Class]
# [DEF:FileOperationError:Class]

View File

@@ -7,7 +7,7 @@
# @RELATION: DEPENDS_ON -> pandas
# @RELATION: DEPENDS_ON -> psycopg2
# @PUBLIC_API: DatasetMapper
# [SECTION: IMPORTS]
import pandas as pd # type: ignore
import psycopg2 # type: ignore
@@ -16,26 +16,32 @@ from superset_tool.utils.init_clients import setup_clients
from superset_tool.utils.logger import SupersetLogger
from typing import Dict, List, Optional, Any
# [/SECTION]
# [DEF:DatasetMapper:Class]
# @PURPOSE: Класс для меппинга и обновления verbose_map в датасетах Superset.
class DatasetMapper:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the mapper.
# @PRE: logger должен быть экземпляром SupersetLogger.
# @POST: Объект DatasetMapper инициализирован.
def __init__(self, logger: SupersetLogger):
self.logger = logger
# [DEF:DatasetMapper.get_postgres_comments:Function]
# [/DEF:__init__:Function]
# [DEF:get_postgres_comments:Function]
# @PURPOSE: Извлекает комментарии к колонкам из системного каталога PostgreSQL.
# @PRE: `db_config` должен содержать валидные креды для подключения к PostgreSQL.
# @PRE: `table_name` и `table_schema` должны быть строками.
# @POST: Возвращается словарь с меппингом `column_name` -> `column_comment`.
# @PRE: db_config должен содержать валидные параметры подключения (host, port, user, password, dbname).
# @PRE: table_name и table_schema должны быть строками.
# @POST: Возвращается словарь, где ключи - имена колонок, значения - комментарии из БД.
# @THROW: Exception - При ошибках подключения или выполнения запроса к БД.
# @PARAM: db_config (Dict) - Конфигурация для подключения к БД.
# @PARAM: table_name (str) - Имя таблицы.
# @PARAM: table_schema (str) - Схема таблицы.
# @RETURN: Dict[str, str] - Словарь с комментариями к колонкам.
def get_postgres_comments(self, db_config: Dict, table_name: str, table_schema: str) -> Dict[str, str]:
self.logger.info("[get_postgres_comments][Enter] Fetching comments from PostgreSQL for %s.%s.", table_schema, table_name)
query = f"""
with self.logger.belief_scope("Fetch comments from PostgreSQL"):
self.logger.info("[get_postgres_comments][Enter] Fetching comments from PostgreSQL for %s.%s.", table_schema, table_name)
query = f"""
SELECT
cols.column_name,
CASE
@@ -73,41 +79,45 @@ class DatasetMapper:
information_schema.columns cols
WHERE cols.table_catalog = '{db_config.get('dbname')}' AND cols.table_name = '{table_name}' AND cols.table_schema = '{table_schema}';
"""
comments = {}
try:
with psycopg2.connect(**db_config) as conn, conn.cursor() as cursor:
cursor.execute(query)
for row in cursor.fetchall():
if row[1]:
comments[row[0]] = row[1]
self.logger.info("[get_postgres_comments][Success] Fetched %d comments.", len(comments))
except Exception as e:
self.logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
raise
return comments
# [/DEF:DatasetMapper.get_postgres_comments:Function]
# [DEF:DatasetMapper.load_excel_mappings:Function]
comments = {}
try:
with psycopg2.connect(**db_config) as conn, conn.cursor() as cursor:
cursor.execute(query)
for row in cursor.fetchall():
if row[1]:
comments[row[0]] = row[1]
self.logger.info("[get_postgres_comments][Success] Fetched %d comments.", len(comments))
except Exception as e:
self.logger.error("[get_postgres_comments][Failure] %s", e, exc_info=True)
raise
return comments
# [/DEF:get_postgres_comments:Function]
# [DEF:load_excel_mappings:Function]
# @PURPOSE: Загружает меппинги 'column_name' -> 'column_comment' из XLSX файла.
# @PRE: `file_path` должен быть валидным путем к XLSX файлу с колонками 'column_name' и 'column_comment'.
# @POST: Возвращается словарь с меппингами.
# @PRE: file_path должен указывать на существующий XLSX файл.
# @POST: Возвращается словарь с меппингами из файла.
# @THROW: Exception - При ошибках чтения файла или парсинга.
# @PARAM: file_path (str) - Путь к XLSX файлу.
# @RETURN: Dict[str, str] - Словарь с меппингами.
def load_excel_mappings(self, file_path: str) -> Dict[str, str]:
self.logger.info("[load_excel_mappings][Enter] Loading mappings from %s.", file_path)
try:
df = pd.read_excel(file_path)
mappings = df.set_index('column_name')['verbose_name'].to_dict()
self.logger.info("[load_excel_mappings][Success] Loaded %d mappings.", len(mappings))
return mappings
except Exception as e:
self.logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
raise
# [/DEF:DatasetMapper.load_excel_mappings:Function]
# [DEF:DatasetMapper.run_mapping:Function]
with self.logger.belief_scope("Load mappings from Excel"):
self.logger.info("[load_excel_mappings][Enter] Loading mappings from %s.", file_path)
try:
df = pd.read_excel(file_path)
mappings = df.set_index('column_name')['verbose_name'].to_dict()
self.logger.info("[load_excel_mappings][Success] Loaded %d mappings.", len(mappings))
return mappings
except Exception as e:
self.logger.error("[load_excel_mappings][Failure] %s", e, exc_info=True)
raise
# [/DEF:load_excel_mappings:Function]
# [DEF:run_mapping:Function]
# @PURPOSE: Основная функция для выполнения меппинга и обновления verbose_map датасета в Superset.
# @PRE: superset_client должен быть авторизован.
# @PRE: dataset_id должен быть существующим ID в Superset.
# @POST: Если найдены изменения, датасет в Superset обновлен через API.
# @RELATION: CALLS -> self.get_postgres_comments
# @RELATION: CALLS -> self.load_excel_mappings
# @RELATION: CALLS -> superset_client.get_dataset
@@ -120,110 +130,111 @@ class DatasetMapper:
# @PARAM: table_name (Optional[str]) - Имя таблицы в PostgreSQL.
# @PARAM: table_schema (Optional[str]) - Схема таблицы в PostgreSQL.
def run_mapping(self, superset_client: SupersetClient, dataset_id: int, source: str, postgres_config: Optional[Dict] = None, excel_path: Optional[str] = None, table_name: Optional[str] = None, table_schema: Optional[str] = None):
self.logger.info("[run_mapping][Enter] Starting dataset mapping for ID %d from source '%s'.", dataset_id, source)
mappings: Dict[str, str] = {}
try:
if source in ['postgres', 'both']:
assert postgres_config and table_name and table_schema, "Postgres config is required."
mappings.update(self.get_postgres_comments(postgres_config, table_name, table_schema))
if source in ['excel', 'both']:
assert excel_path, "Excel path is required."
mappings.update(self.load_excel_mappings(excel_path))
if source not in ['postgres', 'excel', 'both']:
self.logger.error("[run_mapping][Failure] Invalid source: %s.", source)
return
dataset_response = superset_client.get_dataset(dataset_id)
dataset_data = dataset_response['result']
with self.logger.belief_scope(f"Run dataset mapping for ID {dataset_id}"):
self.logger.info("[run_mapping][Enter] Starting dataset mapping for ID %d from source '%s'.", dataset_id, source)
mappings: Dict[str, str] = {}
original_columns = dataset_data.get('columns', [])
updated_columns = []
changes_made = False
for column in original_columns:
col_name = column.get('column_name')
try:
if source in ['postgres', 'both']:
assert postgres_config and table_name and table_schema, "Postgres config is required."
mappings.update(self.get_postgres_comments(postgres_config, table_name, table_schema))
if source in ['excel', 'both']:
assert excel_path, "Excel path is required."
mappings.update(self.load_excel_mappings(excel_path))
if source not in ['postgres', 'excel', 'both']:
self.logger.error("[run_mapping][Failure] Invalid source: %s.", source)
return
dataset_response = superset_client.get_dataset(dataset_id)
dataset_data = dataset_response['result']
new_column = {
"column_name": col_name,
"id": column.get("id"),
"advanced_data_type": column.get("advanced_data_type"),
"description": column.get("description"),
"expression": column.get("expression"),
"extra": column.get("extra"),
"filterable": column.get("filterable"),
"groupby": column.get("groupby"),
"is_active": column.get("is_active"),
"is_dttm": column.get("is_dttm"),
"python_date_format": column.get("python_date_format"),
"type": column.get("type"),
"uuid": column.get("uuid"),
"verbose_name": column.get("verbose_name"),
}
new_column = {k: v for k, v in new_column.items() if v is not None}
if col_name in mappings:
mapping_value = mappings[col_name]
if isinstance(mapping_value, str) and new_column.get('verbose_name') != mapping_value:
new_column['verbose_name'] = mapping_value
changes_made = True
updated_columns.append(new_column)
updated_metrics = []
for metric in dataset_data.get("metrics", []):
new_metric = {
"id": metric.get("id"),
"metric_name": metric.get("metric_name"),
"expression": metric.get("expression"),
"verbose_name": metric.get("verbose_name"),
"description": metric.get("description"),
"d3format": metric.get("d3format"),
"currency": metric.get("currency"),
"extra": metric.get("extra"),
"warning_text": metric.get("warning_text"),
"metric_type": metric.get("metric_type"),
"uuid": metric.get("uuid"),
}
updated_metrics.append({k: v for k, v in new_metric.items() if v is not None})
if changes_made:
payload_for_update = {
"database_id": dataset_data.get("database", {}).get("id"),
"table_name": dataset_data.get("table_name"),
"schema": dataset_data.get("schema"),
"columns": updated_columns,
"owners": [owner["id"] for owner in dataset_data.get("owners", [])],
"metrics": updated_metrics,
"extra": dataset_data.get("extra"),
"description": dataset_data.get("description"),
"sql": dataset_data.get("sql"),
"cache_timeout": dataset_data.get("cache_timeout"),
"catalog": dataset_data.get("catalog"),
"default_endpoint": dataset_data.get("default_endpoint"),
"external_url": dataset_data.get("external_url"),
"fetch_values_predicate": dataset_data.get("fetch_values_predicate"),
"filter_select_enabled": dataset_data.get("filter_select_enabled"),
"is_managed_externally": dataset_data.get("is_managed_externally"),
"is_sqllab_view": dataset_data.get("is_sqllab_view"),
"main_dttm_col": dataset_data.get("main_dttm_col"),
"normalize_columns": dataset_data.get("normalize_columns"),
"offset": dataset_data.get("offset"),
"template_params": dataset_data.get("template_params"),
}
payload_for_update = {k: v for k, v in payload_for_update.items() if v is not None}
superset_client.update_dataset(dataset_id, payload_for_update)
self.logger.info("[run_mapping][Success] Dataset %d columns' verbose_name updated.", dataset_id)
else:
self.logger.info("[run_mapping][State] No changes in columns' verbose_name, skipping update.")
except (AssertionError, FileNotFoundError, Exception) as e:
self.logger.error("[run_mapping][Failure] %s", e, exc_info=True)
return
# [/DEF:DatasetMapper.run_mapping:Function]
original_columns = dataset_data.get('columns', [])
updated_columns = []
changes_made = False
for column in original_columns:
col_name = column.get('column_name')
new_column = {
"column_name": col_name,
"id": column.get("id"),
"advanced_data_type": column.get("advanced_data_type"),
"description": column.get("description"),
"expression": column.get("expression"),
"extra": column.get("extra"),
"filterable": column.get("filterable"),
"groupby": column.get("groupby"),
"is_active": column.get("is_active"),
"is_dttm": column.get("is_dttm"),
"python_date_format": column.get("python_date_format"),
"type": column.get("type"),
"uuid": column.get("uuid"),
"verbose_name": column.get("verbose_name"),
}
new_column = {k: v for k, v in new_column.items() if v is not None}
if col_name in mappings:
mapping_value = mappings[col_name]
if isinstance(mapping_value, str) and new_column.get('verbose_name') != mapping_value:
new_column['verbose_name'] = mapping_value
changes_made = True
updated_columns.append(new_column)
updated_metrics = []
for metric in dataset_data.get("metrics", []):
new_metric = {
"id": metric.get("id"),
"metric_name": metric.get("metric_name"),
"expression": metric.get("expression"),
"verbose_name": metric.get("verbose_name"),
"description": metric.get("description"),
"d3format": metric.get("d3format"),
"currency": metric.get("currency"),
"extra": metric.get("extra"),
"warning_text": metric.get("warning_text"),
"metric_type": metric.get("metric_type"),
"uuid": metric.get("uuid"),
}
updated_metrics.append({k: v for k, v in new_metric.items() if v is not None})
if changes_made:
payload_for_update = {
"database_id": dataset_data.get("database", {}).get("id"),
"table_name": dataset_data.get("table_name"),
"schema": dataset_data.get("schema"),
"columns": updated_columns,
"owners": [owner["id"] for owner in dataset_data.get("owners", [])],
"metrics": updated_metrics,
"extra": dataset_data.get("extra"),
"description": dataset_data.get("description"),
"sql": dataset_data.get("sql"),
"cache_timeout": dataset_data.get("cache_timeout"),
"catalog": dataset_data.get("catalog"),
"default_endpoint": dataset_data.get("default_endpoint"),
"external_url": dataset_data.get("external_url"),
"fetch_values_predicate": dataset_data.get("fetch_values_predicate"),
"filter_select_enabled": dataset_data.get("filter_select_enabled"),
"is_managed_externally": dataset_data.get("is_managed_externally"),
"is_sqllab_view": dataset_data.get("is_sqllab_view"),
"main_dttm_col": dataset_data.get("main_dttm_col"),
"normalize_columns": dataset_data.get("normalize_columns"),
"offset": dataset_data.get("offset"),
"template_params": dataset_data.get("template_params"),
}
payload_for_update = {k: v for k, v in payload_for_update.items() if v is not None}
superset_client.update_dataset(dataset_id, payload_for_update)
self.logger.info("[run_mapping][Success] Dataset %d columns' verbose_name updated.", dataset_id)
else:
self.logger.info("[run_mapping][State] No changes in columns' verbose_name, skipping update.")
except (AssertionError, FileNotFoundError, Exception) as e:
self.logger.error("[run_mapping][Failure] %s", e, exc_info=True)
return
# [/DEF:run_mapping:Function]
# [/DEF:DatasetMapper:Class]
# [/DEF:superset_tool.utils.dataset_mapper:Module]

View File

@@ -28,6 +28,8 @@ from superset_tool.utils.logger import SupersetLogger
# [DEF:create_temp_file:Function]
# @PURPOSE: Контекстный менеджер для создания временного файла или директории с гарантированным удалением.
# @PRE: suffix должен быть строкой, определяющей тип ресурса.
# @POST: Временный ресурс создан и путь к нему возвращен; ресурс удален после выхода из контекста.
# @PARAM: content (Optional[bytes]) - Бинарное содержимое для записи во временный файл.
# @PARAM: suffix (str) - Суффикс ресурса. Если `.dir`, создается директория.
# @PARAM: mode (str) - Режим записи в файл (e.g., 'wb').
@@ -37,87 +39,99 @@ from superset_tool.utils.logger import SupersetLogger
@contextmanager
def create_temp_file(content: Optional[bytes] = None, suffix: str = ".zip", mode: str = 'wb', dry_run = False, logger: Optional[SupersetLogger] = None) -> Generator[Path, None, None]:
logger = logger or SupersetLogger(name="fileio")
resource_path = None
is_dir = suffix.startswith('.dir')
try:
if is_dir:
with tempfile.TemporaryDirectory(suffix=suffix) as temp_dir:
resource_path = Path(temp_dir)
logger.debug("[create_temp_file][State] Created temporary directory: %s", resource_path)
with logger.belief_scope("Create temporary resource"):
resource_path = None
is_dir = suffix.startswith('.dir')
try:
if is_dir:
with tempfile.TemporaryDirectory(suffix=suffix) as temp_dir:
resource_path = Path(temp_dir)
logger.debug("[create_temp_file][State] Created temporary directory: %s", resource_path)
yield resource_path
else:
fd, temp_path_str = tempfile.mkstemp(suffix=suffix)
resource_path = Path(temp_path_str)
os.close(fd)
if content:
resource_path.write_bytes(content)
logger.debug("[create_temp_file][State] Created temporary file: %s", resource_path)
yield resource_path
else:
fd, temp_path_str = tempfile.mkstemp(suffix=suffix)
resource_path = Path(temp_path_str)
os.close(fd)
if content:
resource_path.write_bytes(content)
logger.debug("[create_temp_file][State] Created temporary file: %s", resource_path)
yield resource_path
finally:
if resource_path and resource_path.exists() and not dry_run:
try:
if resource_path.is_dir():
shutil.rmtree(resource_path)
logger.debug("[create_temp_file][Cleanup] Removed temporary directory: %s", resource_path)
else:
resource_path.unlink()
logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
except OSError as e:
logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
finally:
if resource_path and resource_path.exists() and not dry_run:
try:
if resource_path.is_dir():
shutil.rmtree(resource_path)
logger.debug("[create_temp_file][Cleanup] Removed temporary directory: %s", resource_path)
else:
resource_path.unlink()
logger.debug("[create_temp_file][Cleanup] Removed temporary file: %s", resource_path)
except OSError as e:
logger.error("[create_temp_file][Failure] Error during cleanup of %s: %s", resource_path, e)
# [/DEF:create_temp_file:Function]
# [DEF:remove_empty_directories:Function]
# @PURPOSE: Рекурсивно удаляет все пустые поддиректории, начиная с указанного пути.
# @PRE: root_dir должен быть путем к существующей директории.
# @POST: Все пустые поддиректории удалены, возвращено их количество.
# @PARAM: root_dir (str) - Путь к корневой директории для очистки.
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
# @RETURN: int - Количество удаленных директорий.
def remove_empty_directories(root_dir: str, logger: Optional[SupersetLogger] = None) -> int:
logger = logger or SupersetLogger(name="fileio")
logger.info("[remove_empty_directories][Enter] Starting cleanup of empty directories in %s", root_dir)
removed_count = 0
if not os.path.isdir(root_dir):
logger.error("[remove_empty_directories][Failure] Directory not found: %s", root_dir)
return 0
for current_dir, _, _ in os.walk(root_dir, topdown=False):
if not os.listdir(current_dir):
try:
os.rmdir(current_dir)
removed_count += 1
logger.info("[remove_empty_directories][State] Removed empty directory: %s", current_dir)
except OSError as e:
logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
return removed_count
with logger.belief_scope(f"Remove empty directories in {root_dir}"):
logger.info("[remove_empty_directories][Enter] Starting cleanup of empty directories in %s", root_dir)
removed_count = 0
if not os.path.isdir(root_dir):
logger.error("[remove_empty_directories][Failure] Directory not found: %s", root_dir)
return 0
for current_dir, _, _ in os.walk(root_dir, topdown=False):
if not os.listdir(current_dir):
try:
os.rmdir(current_dir)
removed_count += 1
logger.info("[remove_empty_directories][State] Removed empty directory: %s", current_dir)
except OSError as e:
logger.error("[remove_empty_directories][Failure] Failed to remove %s: %s", current_dir, e)
logger.info("[remove_empty_directories][Exit] Removed %d empty directories.", removed_count)
return removed_count
# [/DEF:remove_empty_directories:Function]
# [DEF:read_dashboard_from_disk:Function]
# @PURPOSE: Читает бинарное содержимое файла с диска.
# @PRE: file_path должен указывать на существующий файл.
# @POST: Возвращает байты содержимого и имя файла.
# @PARAM: file_path (str) - Путь к файлу.
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
# @RETURN: Tuple[bytes, str] - Кортеж (содержимое, имя файла).
# @THROW: FileNotFoundError - Если файл не найден.
def read_dashboard_from_disk(file_path: str, logger: Optional[SupersetLogger] = None) -> Tuple[bytes, str]:
logger = logger or SupersetLogger(name="fileio")
path = Path(file_path)
assert path.is_file(), f"Файл дашборда не найден: {file_path}"
logger.info("[read_dashboard_from_disk][Enter] Reading file: %s", file_path)
content = path.read_bytes()
if not content:
logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
return content, path.name
with logger.belief_scope(f"Read dashboard from {file_path}"):
path = Path(file_path)
assert path.is_file(), f"Файл дашборда не найден: {file_path}"
logger.info("[read_dashboard_from_disk][Enter] Reading file: %s", file_path)
content = path.read_bytes()
if not content:
logger.warning("[read_dashboard_from_disk][Warning] File is empty: %s", file_path)
return content, path.name
# [/DEF:read_dashboard_from_disk:Function]
# [DEF:calculate_crc32:Function]
# @PURPOSE: Вычисляет контрольную сумму CRC32 для файла.
# @PRE: file_path должен быть объектом Path к существующему файлу.
# @POST: Возвращает 8-значную hex-строку CRC32.
# @PARAM: file_path (Path) - Путь к файлу.
# @RETURN: str - 8-значное шестнадцатеричное представление CRC32.
# @THROW: IOError - При ошибках чтения файла.
def calculate_crc32(file_path: Path) -> str:
with open(file_path, 'rb') as f:
crc32_value = zlib.crc32(f.read())
return f"{crc32_value:08x}"
logger = SupersetLogger(name="fileio")
with logger.belief_scope(f"Calculate CRC32 for {file_path}"):
with open(file_path, 'rb') as f:
crc32_value = zlib.crc32(f.read())
return f"{crc32_value:08x}"
# [/DEF:calculate_crc32:Function]
# [SECTION: DATA_CLASSES]
# [DEF:RetentionPolicy:DataClass]
# @PURPOSE: Определяет политику хранения для архивов (ежедневные, еженедельные, ежемесячные).
@dataclass
@@ -126,9 +140,12 @@ class RetentionPolicy:
weekly: int = 4
monthly: int = 12
# [/DEF:RetentionPolicy:DataClass]
# [/SECTION]
# [DEF:archive_exports:Function]
# @PURPOSE: Управляет архивом экспортированных файлов, применяя политику хранения и дедупликацию.
# @PRE: output_dir должен быть путем к существующей директории.
# @POST: Старые или дублирующиеся архивы удалены согласно политике.
# @RELATION: CALLS -> apply_retention_policy
# @RELATION: CALLS -> calculate_crc32
# @PARAM: output_dir (str) - Директория с архивами.
@@ -137,113 +154,119 @@ class RetentionPolicy:
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
def archive_exports(output_dir: str, policy: RetentionPolicy, deduplicate: bool = False, logger: Optional[SupersetLogger] = None) -> None:
logger = logger or SupersetLogger(name="fileio")
output_path = Path(output_dir)
if not output_path.is_dir():
logger.warning("[archive_exports][Skip] Archive directory not found: %s", output_dir)
return
with logger.belief_scope(f"Archive exports in {output_dir}"):
output_path = Path(output_dir)
if not output_path.is_dir():
logger.warning("[archive_exports][Skip] Archive directory not found: %s", output_dir)
return
logger.info("[archive_exports][Enter] Managing archive in %s", output_dir)
# 1. Collect all zip files
zip_files = list(output_path.glob("*.zip"))
if not zip_files:
logger.info("[archive_exports][State] No zip files found in %s", output_dir)
return
logger.info("[archive_exports][Enter] Managing archive in %s", output_dir)
# 1. Collect all zip files
zip_files = list(output_path.glob("*.zip"))
if not zip_files:
logger.info("[archive_exports][State] No zip files found in %s", output_dir)
return
# 2. Deduplication
if deduplicate:
logger.info("[archive_exports][State] Starting deduplication...")
checksums = {}
files_to_remove = []
# Sort by modification time (newest first) to keep the latest version
zip_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
# 2. Deduplication
if deduplicate:
logger.info("[archive_exports][State] Starting deduplication...")
checksums = {}
files_to_remove = []
# Sort by modification time (newest first) to keep the latest version
zip_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
for file_path in zip_files:
try:
crc = calculate_crc32(file_path)
if crc in checksums:
files_to_remove.append(file_path)
logger.debug("[archive_exports][State] Duplicate found: %s (same as %s)", file_path.name, checksums[crc].name)
else:
checksums[crc] = file_path
except Exception as e:
logger.error("[archive_exports][Failure] Failed to calculate CRC32 for %s: %s", file_path, e)
for f in files_to_remove:
try:
f.unlink()
zip_files.remove(f)
logger.info("[archive_exports][State] Removed duplicate: %s", f.name)
except OSError as e:
logger.error("[archive_exports][Failure] Failed to remove duplicate %s: %s", f, e)
# 3. Retention Policy
files_with_dates = []
for file_path in zip_files:
try:
crc = calculate_crc32(file_path)
if crc in checksums:
files_to_remove.append(file_path)
logger.debug("[archive_exports][State] Duplicate found: %s (same as %s)", file_path.name, checksums[crc].name)
else:
checksums[crc] = file_path
except Exception as e:
logger.error("[archive_exports][Failure] Failed to calculate CRC32 for %s: %s", file_path, e)
for f in files_to_remove:
try:
f.unlink()
zip_files.remove(f)
logger.info("[archive_exports][State] Removed duplicate: %s", f.name)
except OSError as e:
logger.error("[archive_exports][Failure] Failed to remove duplicate %s: %s", f, e)
# Try to extract date from filename
# Pattern: ..._YYYYMMDD_HHMMSS.zip or ..._YYYYMMDD.zip
match = re.search(r'_(\d{8})_', file_path.name)
file_date = None
if match:
try:
date_str = match.group(1)
file_date = datetime.strptime(date_str, "%Y%m%d").date()
except ValueError:
pass
if not file_date:
# Fallback to modification time
file_date = datetime.fromtimestamp(file_path.stat().st_mtime).date()
files_with_dates.append((file_path, file_date))
# 3. Retention Policy
files_with_dates = []
for file_path in zip_files:
# Try to extract date from filename
# Pattern: ..._YYYYMMDD_HHMMSS.zip or ..._YYYYMMDD.zip
match = re.search(r'_(\d{8})_', file_path.name)
file_date = None
if match:
try:
date_str = match.group(1)
file_date = datetime.strptime(date_str, "%Y%m%d").date()
except ValueError:
pass
files_to_keep = apply_retention_policy(files_with_dates, policy, logger)
if not file_date:
# Fallback to modification time
file_date = datetime.fromtimestamp(file_path.stat().st_mtime).date()
files_with_dates.append((file_path, file_date))
files_to_keep = apply_retention_policy(files_with_dates, policy, logger)
for file_path, _ in files_with_dates:
if file_path not in files_to_keep:
try:
file_path.unlink()
logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
except OSError as e:
logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
for file_path, _ in files_with_dates:
if file_path not in files_to_keep:
try:
file_path.unlink()
logger.info("[archive_exports][State] Removed by retention policy: %s", file_path.name)
except OSError as e:
logger.error("[archive_exports][Failure] Failed to remove %s: %s", file_path, e)
# [/DEF:archive_exports:Function]
# [DEF:apply_retention_policy:Function]
# @PURPOSE: (Helper) Применяет политику хранения к списку файлов, возвращая те, что нужно сохранить.
# @PRE: files_with_dates is a list of (Path, date) tuples.
# @POST: Returns a set of files to keep.
# @PARAM: files_with_dates (List[Tuple[Path, date]]) - Список файлов с датами.
# @PARAM: policy (RetentionPolicy) - Политика хранения.
# @PARAM: logger (SupersetLogger) - Логгер.
# @RETURN: set - Множество путей к файлам, которые должны быть сохранены.
def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: RetentionPolicy, logger: SupersetLogger) -> set:
# Сортируем по дате (от новой к старой)
sorted_files = sorted(files_with_dates, key=lambda x: x[1], reverse=True)
# Словарь для хранения файлов по категориям
daily_files = []
weekly_files = []
monthly_files = []
today = date.today()
for file_path, file_date in sorted_files:
# Ежедневные
if (today - file_date).days < policy.daily:
daily_files.append(file_path)
# Еженедельные
elif (today - file_date).days < policy.weekly * 7:
weekly_files.append(file_path)
# Ежемесячные
elif (today - file_date).days < policy.monthly * 30:
monthly_files.append(file_path)
# Возвращаем множество файлов, которые нужно сохранить
files_to_keep = set()
files_to_keep.update(daily_files)
files_to_keep.update(weekly_files[:policy.weekly])
files_to_keep.update(monthly_files[:policy.monthly])
logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
return files_to_keep
def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: RetentionPolicy, logger: SupersetLogger) -> set:
with logger.belief_scope("Apply retention policy"):
# Сортируем по дате (от новой к старой)
sorted_files = sorted(files_with_dates, key=lambda x: x[1], reverse=True)
# Словарь для хранения файлов по категориям
daily_files = []
weekly_files = []
monthly_files = []
today = date.today()
for file_path, file_date in sorted_files:
# Ежедневные
if (today - file_date).days < policy.daily:
daily_files.append(file_path)
# Еженедельные
elif (today - file_date).days < policy.weekly * 7:
weekly_files.append(file_path)
# Ежемесячные
elif (today - file_date).days < policy.monthly * 30:
monthly_files.append(file_path)
# Возвращаем множество файлов, которые нужно сохранить
files_to_keep = set()
files_to_keep.update(daily_files)
files_to_keep.update(weekly_files[:policy.weekly])
files_to_keep.update(monthly_files[:policy.monthly])
logger.debug("[apply_retention_policy][State] Keeping %d files according to retention policy", len(files_to_keep))
return files_to_keep
# [/DEF:apply_retention_policy:Function]
# [DEF:save_and_unpack_dashboard:Function]
# @PURPOSE: Сохраняет бинарное содержимое ZIP-архива на диск и опционально распаковывает его.
# @PRE: zip_content должен быть байтами валидного ZIP-архива.
# @POST: ZIP-файл сохранен, и если unpack=True, он распакован в output_dir.
# @PARAM: zip_content (bytes) - Содержимое ZIP-архива.
# @PARAM: output_dir (Union[str, Path]) - Директория для сохранения.
# @PARAM: unpack (bool) - Флаг, нужно ли распаковывать архив.
@@ -253,27 +276,30 @@ def apply_retention_policy(files_with_dates: List[Tuple[Path, date]], policy: Re
# @THROW: InvalidZipFormatError - При ошибке формата ZIP.
def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path], unpack: bool = False, original_filename: Optional[str] = None, logger: Optional[SupersetLogger] = None) -> Tuple[Path, Optional[Path]]:
logger = logger or SupersetLogger(name="fileio")
logger.info("[save_and_unpack_dashboard][Enter] Processing dashboard. Unpack: %s", unpack)
try:
output_path = Path(output_dir)
output_path.mkdir(parents=True, exist_ok=True)
zip_name = sanitize_filename(original_filename) if original_filename else f"dashboard_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip"
zip_path = output_path / zip_name
zip_path.write_bytes(zip_content)
logger.info("[save_and_unpack_dashboard][State] Dashboard saved to: %s", zip_path)
if unpack:
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(output_path)
logger.info("[save_and_unpack_dashboard][State] Dashboard unpacked to: %s", output_path)
return zip_path, output_path
return zip_path, None
except zipfile.BadZipFile as e:
logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
raise InvalidZipFormatError(f"Invalid ZIP file: {e}") from e
with logger.belief_scope("Save and unpack dashboard"):
logger.info("[save_and_unpack_dashboard][Enter] Processing dashboard. Unpack: %s", unpack)
try:
output_path = Path(output_dir)
output_path.mkdir(parents=True, exist_ok=True)
zip_name = sanitize_filename(original_filename) if original_filename else f"dashboard_export_{datetime.now().strftime('%Y%m%d_%H%M%S')}.zip"
zip_path = output_path / zip_name
zip_path.write_bytes(zip_content)
logger.info("[save_and_unpack_dashboard][State] Dashboard saved to: %s", zip_path)
if unpack:
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
zip_ref.extractall(output_path)
logger.info("[save_and_unpack_dashboard][State] Dashboard unpacked to: %s", output_path)
return zip_path, output_path
return zip_path, None
except zipfile.BadZipFile as e:
logger.error("[save_and_unpack_dashboard][Failure] Invalid ZIP archive: %s", e)
raise InvalidZipFormatError(f"Invalid ZIP file: {e}") from e
# [/DEF:save_and_unpack_dashboard:Function]
# [DEF:update_yamls:Function]
# @PURPOSE: Обновляет конфигурации в YAML-файлах, заменяя значения или применяя regex.
# @PRE: path должен быть существующей директорией.
# @POST: Все YAML файлы в директории обновлены согласно переданным параметрам.
# @RELATION: CALLS -> _update_yaml_file
# @THROW: FileNotFoundError - Если `path` не существует.
# @PARAM: db_configs (Optional[List[Dict]]) - Список конфигураций для замены.
@@ -283,79 +309,90 @@ def save_and_unpack_dashboard(zip_content: bytes, output_dir: Union[str, Path],
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
def update_yamls(db_configs: Optional[List[Dict[str, Any]]] = None, path: str = "dashboards", regexp_pattern: Optional[LiteralString] = None, replace_string: Optional[LiteralString] = None, logger: Optional[SupersetLogger] = None) -> None:
logger = logger or SupersetLogger(name="fileio")
logger.info("[update_yamls][Enter] Starting YAML configuration update.")
dir_path = Path(path)
assert dir_path.is_dir(), f"Путь {path} не существует или не является директорией"
configs: List[Dict[str, Any]] = db_configs or []
for file_path in dir_path.rglob("*.yaml"):
_update_yaml_file(file_path, configs, regexp_pattern, replace_string, logger)
with logger.belief_scope("Update YAML configurations"):
logger.info("[update_yamls][Enter] Starting YAML configuration update.")
dir_path = Path(path)
assert dir_path.is_dir(), f"Путь {path} не существует или не является директорией"
configs: List[Dict[str, Any]] = db_configs or []
for file_path in dir_path.rglob("*.yaml"):
_update_yaml_file(file_path, configs, regexp_pattern, replace_string, logger)
# [/DEF:update_yamls:Function]
# [DEF:_update_yaml_file:Function]
# @PURPOSE: (Helper) Обновляет один YAML файл.
# @PRE: file_path должен быть объектом Path к существующему YAML файлу.
# @POST: Файл обновлен согласно переданным конфигурациям или регулярному выражению.
# @PARAM: file_path (Path) - Путь к файлу.
# @PARAM: db_configs (List[Dict]) - Конфигурации.
# @PARAM: regexp_pattern (Optional[str]) - Паттерн.
# @PARAM: replace_string (Optional[str]) - Замена.
# @PARAM: logger (SupersetLogger) - Логгер.
def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_pattern: Optional[str], replace_string: Optional[str], logger: SupersetLogger) -> None:
# Читаем содержимое файла
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
except Exception as e:
logger.error("[_update_yaml_file][Failure] Failed to read %s: %s", file_path, e)
return
# Если задан pattern и replace_string, применяем замену по регулярному выражению
if regexp_pattern and replace_string:
with logger.belief_scope(f"Update YAML file: {file_path}"):
# Читаем содержимое файла
try:
new_content = re.sub(regexp_pattern, replace_string, content)
if new_content != content:
with open(file_path, 'w', encoding='utf-8') as f:
f.write(new_content)
logger.info("[_update_yaml_file][State] Updated %s using regex pattern", file_path)
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
except Exception as e:
logger.error("[_update_yaml_file][Failure] Error applying regex to %s: %s", file_path, e)
# Если заданы конфигурации, заменяем значения (поддержка old/new)
if db_configs:
try:
# Прямой текстовый заменитель для старых/новых значений, чтобы сохранить структуру файла
modified_content = content
for cfg in db_configs:
# Ожидаем структуру: {'old': {...}, 'new': {...}}
old_cfg = cfg.get('old', {})
new_cfg = cfg.get('new', {})
for key, old_val in old_cfg.items():
if key in new_cfg:
new_val = new_cfg[key]
# Заменяем только точные совпадения старого значения в тексте YAML, используя ключ для контекста
if isinstance(old_val, str):
# Ищем паттерн: key: "value" или key: value
key_pattern = re.escape(key)
val_pattern = re.escape(old_val)
# Группы: 1=ключ+разделитель, 2=открывающая кавычка (опц), 3=значение, 4=закрывающая кавычка (опц)
pattern = rf'({key_pattern}\s*:\s*)(["\']?)({val_pattern})(["\']?)'
# Функция замены, сохраняющая кавычки если они были
def replacer(match):
prefix = match.group(1)
quote_open = match.group(2)
quote_close = match.group(4)
return f"{prefix}{quote_open}{new_val}{quote_close}"
logger.error("[_update_yaml_file][Failure] Failed to read %s: %s", file_path, e)
return
# Если задан pattern и replace_string, применяем замену по регулярному выражению
if regexp_pattern and replace_string:
try:
new_content = re.sub(regexp_pattern, replace_string, content)
if new_content != content:
with open(file_path, 'w', encoding='utf-8') as f:
f.write(new_content)
logger.info("[_update_yaml_file][State] Updated %s using regex pattern", file_path)
except Exception as e:
logger.error("[_update_yaml_file][Failure] Error applying regex to %s: %s", file_path, e)
# Если заданы конфигурации, заменяем значения (поддержка old/new)
if db_configs:
try:
# Прямой текстовый заменитель для старых/новых значений, чтобы сохранить структуру файла
modified_content = content
for cfg in db_configs:
# Ожидаем структуру: {'old': {...}, 'new': {...}}
old_cfg = cfg.get('old', {})
new_cfg = cfg.get('new', {})
for key, old_val in old_cfg.items():
if key in new_cfg:
new_val = new_cfg[key]
# Заменяем только точные совпадения старого значения в тексте YAML, используя ключ для контекста
if isinstance(old_val, str):
# Ищем паттерн: key: "value" или key: value
key_pattern = re.escape(key)
val_pattern = re.escape(old_val)
# Группы: 1=ключ+разделитель, 2=открывающая кавычка (опц), 3=значение, 4=закрывающая кавычка (опц)
pattern = rf'({key_pattern}\s*:\s*)(["\']?)({val_pattern})(["\']?)'
# [DEF:replacer:Function]
# @PURPOSE: Функция замены, сохраняющая кавычки если они были.
# @PRE: match должен быть объектом совпадения регулярного выражения.
# @POST: Возвращает строку с новым значением, сохраняя префикс и кавычки.
def replacer(match):
with logger.belief_scope("replacer"):
prefix = match.group(1)
quote_open = match.group(2)
quote_close = match.group(4)
return f"{prefix}{quote_open}{new_val}{quote_close}"
# [/DEF:replacer:Function]
modified_content = re.sub(pattern, replacer, modified_content)
logger.info("[_update_yaml_file][State] Replaced '%s' with '%s' for key %s in %s", old_val, new_val, key, file_path)
# Записываем обратно изменённый контент без парсинга YAML, сохраняем оригинальное форматирование
with open(file_path, 'w', encoding='utf-8') as f:
f.write(modified_content)
except Exception as e:
logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
modified_content = re.sub(pattern, replacer, modified_content)
logger.info("[_update_yaml_file][State] Replaced '%s' with '%s' for key %s in %s", old_val, new_val, key, file_path)
# Записываем обратно изменённый контент без парсинга YAML, сохраняем оригинальное форматирование
with open(file_path, 'w', encoding='utf-8') as f:
f.write(modified_content)
except Exception as e:
logger.error("[_update_yaml_file][Failure] Error performing raw replacement in %s: %s", file_path, e)
# [/DEF:_update_yaml_file:Function]
# [DEF:create_dashboard_export:Function]
# @PURPOSE: Создает ZIP-архив из указанных исходных путей.
# @PRE: source_paths должен содержать существующие пути.
# @POST: ZIP-архив создан по пути zip_path.
# @PARAM: zip_path (Union[str, Path]) - Путь для сохранения ZIP архива.
# @PARAM: source_paths (List[Union[str, Path]]) - Список исходных путей для архивации.
# @PARAM: exclude_extensions (Optional[List[str]]) - Список расширений для исключения.
@@ -363,96 +400,108 @@ def _update_yaml_file(file_path: Path, db_configs: List[Dict[str, Any]], regexp_
# @RETURN: bool - `True` при успехе, `False` при ошибке.
def create_dashboard_export(zip_path: Union[str, Path], source_paths: List[Union[str, Path]], exclude_extensions: Optional[List[str]] = None, logger: Optional[SupersetLogger] = None) -> bool:
logger = logger or SupersetLogger(name="fileio")
logger.info("[create_dashboard_export][Enter] Packing dashboard: %s -> %s", source_paths, zip_path)
try:
exclude_ext = [ext.lower() for ext in exclude_extensions or []]
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for src_path_str in source_paths:
src_path = Path(src_path_str)
assert src_path.exists(), f"Путь не найден: {src_path}"
for item in src_path.rglob('*'):
if item.is_file() and item.suffix.lower() not in exclude_ext:
arcname = item.relative_to(src_path.parent)
zipf.write(item, arcname)
logger.info("[create_dashboard_export][Exit] Archive created: %s", zip_path)
return True
except (IOError, zipfile.BadZipFile, AssertionError) as e:
logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
return False
with logger.belief_scope(f"Create dashboard export: {zip_path}"):
logger.info("[create_dashboard_export][Enter] Packing dashboard: %s -> %s", source_paths, zip_path)
try:
exclude_ext = [ext.lower() for ext in exclude_extensions or []]
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
for src_path_str in source_paths:
src_path = Path(src_path_str)
assert src_path.exists(), f"Путь не найден: {src_path}"
for item in src_path.rglob('*'):
if item.is_file() and item.suffix.lower() not in exclude_ext:
arcname = item.relative_to(src_path.parent)
zipf.write(item, arcname)
logger.info("[create_dashboard_export][Exit] Archive created: %s", zip_path)
return True
except (IOError, zipfile.BadZipFile, AssertionError) as e:
logger.error("[create_dashboard_export][Failure] Error: %s", e, exc_info=True)
return False
# [/DEF:create_dashboard_export:Function]
# [DEF:sanitize_filename:Function]
# @PURPOSE: Очищает строку от символов, недопустимых в именах файлов.
# @PRE: filename должен быть строкой.
# @POST: Возвращает строку без спецсимволов.
# @PARAM: filename (str) - Исходное имя файла.
# @RETURN: str - Очищенная строка.
def sanitize_filename(filename: str) -> str:
return re.sub(r'[\\/*?:"<>|]', "_", filename).strip()
logger = SupersetLogger(name="fileio")
with logger.belief_scope(f"Sanitize filename: {filename}"):
return re.sub(r'[\\/*?:"<>|]', "_", filename).strip()
# [/DEF:sanitize_filename:Function]
# [DEF:get_filename_from_headers:Function]
# @PURPOSE: Извлекает имя файла из HTTP заголовка 'Content-Disposition'.
# @PRE: headers должен быть словарем заголовков.
# @POST: Возвращает имя файла или None, если заголовок отсутствует.
# @PARAM: headers (dict) - Словарь HTTP заголовков.
# @RETURN: Optional[str] - Имя файла или `None`.
# @RETURN: Optional[str] - Имя файла or `None`.
def get_filename_from_headers(headers: dict) -> Optional[str]:
content_disposition = headers.get("Content-Disposition", "")
if match := re.search(r'filename="?([^"]+)"?', content_disposition):
return match.group(1).strip()
return None
logger = SupersetLogger(name="fileio")
with logger.belief_scope("Get filename from headers"):
content_disposition = headers.get("Content-Disposition", "")
if match := re.search(r'filename="?([^"]+)"?', content_disposition):
return match.group(1).strip()
return None
# [/DEF:get_filename_from_headers:Function]
# [DEF:consolidate_archive_folders:Function]
# @PURPOSE: Консолидирует директории архивов на основе общего слага в имени.
# @PRE: root_directory должен быть объектом Path к существующей директории.
# @POST: Директории с одинаковым префиксом объединены в одну.
# @THROW: TypeError, ValueError - Если `root_directory` невалиден.
# @PARAM: root_directory (Path) - Корневая директория для консолидации.
# @PARAM: logger (Optional[SupersetLogger]) - Экземпляр логгера.
def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetLogger] = None) -> None:
def consolidate_archive_folders(root_directory: Path, logger: Optional[SupersetLogger] = None) -> None:
logger = logger or SupersetLogger(name="fileio")
assert isinstance(root_directory, Path), "root_directory must be a Path object."
assert root_directory.is_dir(), "root_directory must be an existing directory."
logger.info("[consolidate_archive_folders][Enter] Consolidating archives in %s", root_directory)
# Собираем все директории с архивами
archive_dirs = []
for item in root_directory.iterdir():
if item.is_dir():
# Проверяем, есть ли в директории ZIP-архивы
if any(item.glob("*.zip")):
archive_dirs.append(item)
# Группируем по слагу (части имени до первого '_')
slug_groups = {}
for dir_path in archive_dirs:
dir_name = dir_path.name
slug = dir_name.split('_')[0] if '_' in dir_name else dir_name
if slug not in slug_groups:
slug_groups[slug] = []
slug_groups[slug].append(dir_path)
# Для каждой группы консолидируем
for slug, dirs in slug_groups.items():
if len(dirs) <= 1:
continue
# Создаем целевую директорию
target_dir = root_directory / slug
target_dir.mkdir(exist_ok=True)
logger.info("[consolidate_archive_folders][State] Consolidating %d directories under %s", len(dirs), target_dir)
# Перемещаем содержимое
for source_dir in dirs:
if source_dir == target_dir:
with logger.belief_scope(f"Consolidate archives in {root_directory}"):
assert isinstance(root_directory, Path), "root_directory must be a Path object."
assert root_directory.is_dir(), "root_directory must be an existing directory."
logger.info("[consolidate_archive_folders][Enter] Consolidating archives in %s", root_directory)
# Собираем все директории с архивами
archive_dirs = []
for item in root_directory.iterdir():
if item.is_dir():
# Проверяем, есть ли в директории ZIP-архивы
if any(item.glob("*.zip")):
archive_dirs.append(item)
# Группируем по слагу (части имени до первого '_')
slug_groups = {}
for dir_path in archive_dirs:
dir_name = dir_path.name
slug = dir_name.split('_')[0] if '_' in dir_name else dir_name
if slug not in slug_groups:
slug_groups[slug] = []
slug_groups[slug].append(dir_path)
# Для каждой группы консолидируем
for slug, dirs in slug_groups.items():
if len(dirs) <= 1:
continue
for item in source_dir.iterdir():
dest_item = target_dir / item.name
# Создаем целевую директорию
target_dir = root_directory / slug
target_dir.mkdir(exist_ok=True)
logger.info("[consolidate_archive_folders][State] Consolidating %d directories under %s", len(dirs), target_dir)
# Перемещаем содержимое
for source_dir in dirs:
if source_dir == target_dir:
continue
for item in source_dir.iterdir():
dest_item = target_dir / item.name
try:
if item.is_dir():
shutil.move(str(item), str(dest_item))
else:
shutil.move(str(item), str(dest_item))
except Exception as e:
logger.error("[consolidate_archive_folders][Failure] Failed to move %s to %s: %s", item, dest_item, e)
# Удаляем исходную директорию
try:
if item.is_dir():
shutil.move(str(item), str(dest_item))
else:
shutil.move(str(item), str(dest_item))
source_dir.rmdir()
logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
except Exception as e:
logger.error("[consolidate_archive_folders][Failure] Failed to move %s to %s: %s", item, dest_item, e)
# Удаляем исходную директорию
try:
source_dir.rmdir()
logger.info("[consolidate_archive_folders][State] Removed source directory: %s", source_dir)
except Exception as e:
logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
logger.error("[consolidate_archive_folders][Failure] Failed to remove source directory %s: %s", source_dir, e)
# [/DEF:consolidate_archive_folders:Function]
# [/DEF:superset_tool.utils.fileio:Module]

View File

@@ -13,14 +13,31 @@ import logging
import sys
from datetime import datetime
from pathlib import Path
from typing import Optional, Any, Mapping
from typing import Optional, Any, Mapping, Generator
from contextlib import contextmanager
# [/SECTION]
# [DEF:belief_scope:Function]
# @PURPOSE: Context manager for belief state logging to maintain execution coherence.
# @PRE: scope_id must be a string.
# @POST: Entry and exit actions are logged.
# @PARAM: scope_id (str) - Identifier for the logical scope.
@contextmanager
def belief_scope(scope_id: str) -> Generator[None, None, None]:
"""Context manager for belief state logging."""
logger = logging.getLogger("superset_tool")
logger.debug(f"[BELIEF_ENTRY] {scope_id}")
try:
yield
finally:
logger.debug(f"[BELIEF_EXIT] {scope_id}")
# [/DEF:belief_scope:Function]
# [DEF:SupersetLogger:Class]
# @PURPOSE: Обёртка над `logging.Logger`, которая упрощает конфигурацию и использование логгеров.
# @RELATION: WRAPS -> logging.Logger
class SupersetLogger:
# [DEF:SupersetLogger.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Конфигурирует и инициализирует логгер, добавляя обработчики для файла и/или консоли.
# @PRE: Если log_dir указан, путь должен быть валидным (или создаваемым).
# @POST: `self.logger` готов к использованию с настроенными обработчиками.
@@ -29,78 +46,110 @@ class SupersetLogger:
# @PARAM: level (int) - Уровень логирования (e.g., `logging.INFO`).
# @PARAM: console (bool) - Флаг для включения вывода в консоль.
def __init__(self, name: str = "superset_tool", log_dir: Optional[Path] = None, level: int = logging.INFO, console: bool = True, logger: Optional[logging.Logger] = None) -> None:
if logger:
self.logger = logger
return
with belief_scope("__init__"):
if logger:
self.logger = logger
return
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.propagate = False
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.propagate = False
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
if self.logger.hasHandlers():
self.logger.handlers.clear()
if self.logger.hasHandlers():
self.logger.handlers.clear()
if log_dir:
log_dir.mkdir(parents=True, exist_ok=True)
timestamp = datetime.now().strftime("%Y%m%d")
file_handler = logging.FileHandler(log_dir / f"{name}_{timestamp}.log", encoding="utf-8")
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
if log_dir:
log_dir.mkdir(parents=True, exist_ok=True)
timestamp = datetime.now().strftime("%Y%m%d")
file_handler = logging.FileHandler(log_dir / f"{name}_{timestamp}.log", encoding="utf-8")
file_handler.setFormatter(formatter)
self.logger.addHandler(file_handler)
if console:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
# [/DEF:SupersetLogger.__init__:Function]
if console:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
self.logger.addHandler(console_handler)
# [/DEF:__init__:Function]
# [DEF:SupersetLogger._log:Function]
# [DEF:_log:Function]
# @PURPOSE: (Helper) Универсальный метод для вызова соответствующего уровня логирования.
# @PRE: level_method должен быть вызываемым методом логгера. msg must be a string.
# @POST: Сообщение записано в лог.
# @PARAM: level_method (Any) - Метод логгера (info, debug, etc).
# @PARAM: msg (str) - Сообщение.
# @PARAM: args (Any) - Аргументы форматирования.
# @PARAM: extra (Optional[Mapping[str, Any]]) - Дополнительные данные.
# @PARAM: exc_info (bool) - Добавлять ли информацию об исключении.
def _log(self, level_method: Any, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
level_method(msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger._log:Function]
with belief_scope("_log"):
level_method(msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:_log:Function]
# [DEF:SupersetLogger.info:Function]
# [DEF:info:Function]
# @PURPOSE: Записывает сообщение уровня INFO.
# @PRE: msg должен быть строкой.
# @POST: Сообщение уровня INFO записано.
def info(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.info, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.info:Function]
with belief_scope("info"):
self._log(self.logger.info, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:info:Function]
# [DEF:SupersetLogger.debug:Function]
# [DEF:debug:Function]
# @PURPOSE: Записывает сообщение уровня DEBUG.
# @PRE: msg должен быть строкой.
# @POST: Сообщение уровня DEBUG записано.
def debug(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.debug, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.debug:Function]
with belief_scope("debug"):
self._log(self.logger.debug, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:debug:Function]
# [DEF:SupersetLogger.warning:Function]
# [DEF:warning:Function]
# @PURPOSE: Записывает сообщение уровня WARNING.
# @PRE: msg должен быть строкой.
# @POST: Сообщение уровня WARNING записано.
def warning(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.warning, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.warning:Function]
with belief_scope("warning"):
self._log(self.logger.warning, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:warning:Function]
# [DEF:SupersetLogger.error:Function]
# [DEF:error:Function]
# @PURPOSE: Записывает сообщение уровня ERROR.
# @PRE: msg должен быть строкой.
# @POST: Сообщение уровня ERROR записано.
def error(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.error, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.error:Function]
with belief_scope("error"):
self._log(self.logger.error, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:error:Function]
# [DEF:SupersetLogger.critical:Function]
# [DEF:critical:Function]
# @PURPOSE: Записывает сообщение уровня CRITICAL.
# @PRE: msg должен быть строкой.
# @POST: Сообщение уровня CRITICAL записано.
def critical(self, msg: str, *args: Any, extra: Optional[Mapping[str, Any]] = None, exc_info: bool = False) -> None:
self._log(self.logger.critical, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:SupersetLogger.critical:Function]
with belief_scope("critical"):
self._log(self.logger.critical, msg, *args, extra=extra, exc_info=exc_info)
# [/DEF:critical:Function]
# [DEF:SupersetLogger.exception:Function]
# [DEF:exception:Function]
# @PURPOSE: Записывает сообщение уровня ERROR вместе с трассировкой стека текущего исключения.
# @PRE: msg должен быть строкой.
# @POST: Сообщение об ошибке с traceback записано.
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
self.logger.exception(msg, *args, **kwargs)
# [/DEF:SupersetLogger.exception:Function]
with belief_scope("exception"):
self.logger.exception(msg, *args, **kwargs)
# [/DEF:exception:Function]
# [DEF:belief_scope:Method]
# @PURPOSE: Instance method wrapper for belief_scope context manager.
# @PRE: scope_id must be a string.
# @POST: Enters the belief scope.
@contextmanager
def belief_scope(self, scope_id: str) -> Generator[None, None, None]:
with belief_scope(scope_id):
yield
# [/DEF:belief_scope:Method]
# [/DEF:SupersetLogger:Class]

View File

@@ -16,6 +16,7 @@ from pathlib import Path
import requests
from requests.adapters import HTTPAdapter
import urllib3
from superset_tool.utils.logger import belief_scope
from urllib3.util.retry import Retry
from superset_tool.exceptions import AuthenticationError, NetworkError, DashboardNotFoundError, SupersetAPIError, PermissionDeniedError
from superset_tool.utils.logger import SupersetLogger
@@ -26,15 +27,18 @@ from superset_tool.utils.logger import SupersetLogger
class APIClient:
DEFAULT_TIMEOUT = 30
# [DEF:APIClient.__init__:Function]
# [DEF:__init__:Function]
# @PURPOSE: Инициализирует API клиент с конфигурацией, сессией и логгером.
# @PARAM: config (Dict[str, Any]) - Конфигурация.
# @PARAM: verify_ssl (bool) - Проверять ли SSL.
# @PARAM: timeout (int) - Таймаут запросов.
# @PARAM: logger (Optional[SupersetLogger]) - Логгер.
# @PRE: config must contain 'base_url' and 'auth'.
# @POST: APIClient instance is initialized with a session.
def __init__(self, config: Dict[str, Any], verify_ssl: bool = True, timeout: int = DEFAULT_TIMEOUT, logger: Optional[SupersetLogger] = None):
self.logger = logger or SupersetLogger(name="APIClient")
self.logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
with belief_scope("__init__"):
self.logger = logger or SupersetLogger(name="APIClient")
self.logger.info("[APIClient.__init__][Entry] Initializing APIClient.")
self.base_url: str = config.get("base_url", "")
self.auth = config.get("auth")
self.request_settings = {"verify_ssl": verify_ssl, "timeout": timeout}
@@ -42,13 +46,16 @@ class APIClient:
self._tokens: Dict[str, str] = {}
self._authenticated = False
self.logger.info("[APIClient.__init__][Exit] APIClient initialized.")
# [/DEF:APIClient.__init__:Function]
# [/DEF:__init__:Function]
# [DEF:APIClient._init_session:Function]
# [DEF:_init_session:Function]
# @PURPOSE: Создает и настраивает `requests.Session` с retry-логикой.
# @PRE: self.request_settings must be initialized.
# @POST: Returns a configured requests.Session instance.
# @RETURN: requests.Session - Настроенная сессия.
def _init_session(self) -> requests.Session:
session = requests.Session()
with belief_scope("_init_session"):
session = requests.Session()
retries = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
adapter = HTTPAdapter(max_retries=retries)
session.mount('http://', adapter)
@@ -58,58 +65,66 @@ class APIClient:
self.logger.warning("[_init_session][State] SSL verification disabled.")
session.verify = self.request_settings["verify_ssl"]
return session
# [/DEF:APIClient._init_session:Function]
# [/DEF:_init_session:Function]
# [DEF:APIClient.authenticate:Function]
# [DEF:authenticate:Function]
# @PURPOSE: Выполняет аутентификацию в Superset API и получает access и CSRF токены.
# @PRE: self.auth and self.base_url must be valid.
# @POST: `self._tokens` заполнен, `self._authenticated` установлен в `True`.
# @RETURN: Dict[str, str] - Словарь с токенами.
# @THROW: AuthenticationError, NetworkError - при ошибках.
def authenticate(self) -> Dict[str, str]:
self.logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
try:
login_url = f"{self.base_url}/security/login"
response = self.session.post(login_url, json=self.auth, timeout=self.request_settings["timeout"])
response.raise_for_status()
access_token = response.json()["access_token"]
csrf_url = f"{self.base_url}/security/csrf_token/"
csrf_response = self.session.get(csrf_url, headers={"Authorization": f"Bearer {access_token}"}, timeout=self.request_settings["timeout"])
csrf_response.raise_for_status()
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
self._authenticated = True
self.logger.info("[authenticate][Exit] Authenticated successfully.")
return self._tokens
except requests.exceptions.HTTPError as e:
raise AuthenticationError(f"Authentication failed: {e}") from e
except (requests.exceptions.RequestException, KeyError) as e:
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
# [/DEF:APIClient.authenticate:Function]
with belief_scope("authenticate"):
self.logger.info("[authenticate][Enter] Authenticating to %s", self.base_url)
try:
login_url = f"{self.base_url}/security/login"
response = self.session.post(login_url, json=self.auth, timeout=self.request_settings["timeout"])
response.raise_for_status()
access_token = response.json()["access_token"]
csrf_url = f"{self.base_url}/security/csrf_token/"
csrf_response = self.session.get(csrf_url, headers={"Authorization": f"Bearer {access_token}"}, timeout=self.request_settings["timeout"])
csrf_response.raise_for_status()
self._tokens = {"access_token": access_token, "csrf_token": csrf_response.json()["result"]}
self._authenticated = True
self.logger.info("[authenticate][Exit] Authenticated successfully.")
return self._tokens
except requests.exceptions.HTTPError as e:
raise AuthenticationError(f"Authentication failed: {e}") from e
except (requests.exceptions.RequestException, KeyError) as e:
raise NetworkError(f"Network or parsing error during authentication: {e}") from e
# [/DEF:authenticate:Function]
@property
# [DEF:headers:Function]
# @PURPOSE: Возвращает HTTP-заголовки для аутентифицированных запросов.
# @PRE: APIClient is initialized and authenticated or can be authenticated.
# @POST: Returns headers including auth tokens.
def headers(self) -> Dict[str, str]:
# [DEF:APIClient.headers:Function]
# @PURPOSE: Возвращает HTTP-заголовки для аутентифицированных запросов.
if not self._authenticated: self.authenticate()
return {
with belief_scope("headers"):
if not self._authenticated: self.authenticate()
return {
"Authorization": f"Bearer {self._tokens['access_token']}",
"X-CSRFToken": self._tokens.get("csrf_token", ""),
"Referer": self.base_url,
"Content-Type": "application/json"
}
# [/DEF:APIClient.headers:Function]
# [/DEF:headers:Function]
# [DEF:APIClient.request:Function]
# [DEF:request:Function]
# @PURPOSE: Выполняет универсальный HTTP-запрос к API.
# @RETURN: `requests.Response` если `raw_response=True`, иначе `dict`.
# @THROW: SupersetAPIError, NetworkError и их подклассы.
# @PARAM: method (str) - HTTP метод.
# @PARAM: endpoint (str) - API эндпоинт.
# @PARAM: headers (Optional[Dict]) - Дополнительные заголовки.
# @PARAM: raw_response (bool) - Возвращать ли сырой ответ.
# @PRE: method and endpoint must be strings.
# @POST: Returns response content or raw Response object.
# @RETURN: `requests.Response` если `raw_response=True`, иначе `dict`.
# @THROW: SupersetAPIError, NetworkError и их подклассы.
def request(self, method: str, endpoint: str, headers: Optional[Dict] = None, raw_response: bool = False, **kwargs) -> Union[requests.Response, Dict[str, Any]]:
full_url = f"{self.base_url}{endpoint}"
with belief_scope("request"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy()
if headers: _headers.update(headers)
@@ -121,41 +136,50 @@ class APIClient:
self._handle_http_error(e, endpoint)
except requests.exceptions.RequestException as e:
self._handle_network_error(e, full_url)
# [/DEF:APIClient.request:Function]
# [/DEF:request:Function]
# [DEF:APIClient._handle_http_error:Function]
# [DEF:_handle_http_error:Function]
# @PURPOSE: (Helper) Преобразует HTTP ошибки в кастомные исключения.
# @PARAM: e (requests.exceptions.HTTPError) - Ошибка.
# @PARAM: endpoint (str) - Эндпоинт.
# @PRE: e must be a valid HTTPError with a response.
# @POST: Raises a specific SupersetAPIError or subclass.
def _handle_http_error(self, e: requests.exceptions.HTTPError, endpoint: str):
status_code = e.response.status_code
with belief_scope("_handle_http_error"):
status_code = e.response.status_code
if status_code == 404: raise DashboardNotFoundError(endpoint) from e
if status_code == 403: raise PermissionDeniedError() from e
if status_code == 401: raise AuthenticationError() from e
raise SupersetAPIError(f"API Error {status_code}: {e.response.text}") from e
# [/DEF:APIClient._handle_http_error:Function]
# [/DEF:_handle_http_error:Function]
# [DEF:APIClient._handle_network_error:Function]
# [DEF:_handle_network_error:Function]
# @PURPOSE: (Helper) Преобразует сетевые ошибки в `NetworkError`.
# @PARAM: e (requests.exceptions.RequestException) - Ошибка.
# @PARAM: url (str) - URL.
# @PRE: e must be a RequestException.
# @POST: Raises a NetworkError.
def _handle_network_error(self, e: requests.exceptions.RequestException, url: str):
if isinstance(e, requests.exceptions.Timeout): msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:APIClient._handle_network_error:Function]
with belief_scope("_handle_network_error"):
if isinstance(e, requests.exceptions.Timeout): msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:_handle_network_error:Function]
# [DEF:APIClient.upload_file:Function]
# [DEF:upload_file:Function]
# @PURPOSE: Загружает файл на сервер через multipart/form-data.
# @RETURN: Ответ API в виде словаря.
# @THROW: SupersetAPIError, NetworkError, TypeError.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: file_info (Dict[str, Any]) - Информация о файле.
# @PARAM: extra_data (Optional[Dict]) - Дополнительные данные.
# @PARAM: timeout (Optional[int]) - Таймаут.
# @PRE: file_info must contain 'file_obj' and 'file_name'.
# @POST: File is uploaded and response returned.
# @RETURN: Ответ API в виде словаря.
# @THROW: SupersetAPIError, NetworkError, TypeError.
def upload_file(self, endpoint: str, file_info: Dict[str, Any], extra_data: Optional[Dict] = None, timeout: Optional[int] = None) -> Dict:
full_url = f"{self.base_url}{endpoint}"
with belief_scope("upload_file"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy(); _headers.pop('Content-Type', None)
file_obj, file_name, form_field = file_info.get("file_obj"), file_info.get("file_name"), file_info.get("form_field", "file")
@@ -170,52 +194,61 @@ class APIClient:
raise TypeError(f"Unsupported file_obj type: {type(file_obj)}")
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
# [/DEF:APIClient.upload_file:Function]
# [/DEF:upload_file:Function]
# [DEF:APIClient._perform_upload:Function]
# [DEF:_perform_upload:Function]
# @PURPOSE: (Helper) Выполняет POST запрос с файлом.
# @PARAM: url (str) - URL.
# @PARAM: files (Dict) - Файлы.
# @PARAM: data (Optional[Dict]) - Данные.
# @PARAM: headers (Dict) - Заголовки.
# @PARAM: timeout (Optional[int]) - Таймаут.
# @PRE: url, files, and headers must be provided.
# @POST: POST request is performed and JSON response returned.
# @RETURN: Dict - Ответ.
def _perform_upload(self, url: str, files: Dict, data: Optional[Dict], headers: Dict, timeout: Optional[int]) -> Dict:
try:
response = self.session.post(url, files=files, data=data or {}, headers=headers, timeout=timeout or self.request_settings["timeout"])
response.raise_for_status()
# Добавляем логирование для отладки
if response.status_code == 200:
try:
return response.json()
except Exception as json_e:
self.logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
raise SupersetAPIError(f"API error during upload: Response is not valid JSON: {json_e}") from json_e
return response.json()
except requests.exceptions.HTTPError as e:
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
except requests.exceptions.RequestException as e:
raise NetworkError(f"Network error during upload: {e}", url=url) from e
# [/DEF:APIClient._perform_upload:Function]
with belief_scope("_perform_upload"):
try:
response = self.session.post(url, files=files, data=data or {}, headers=headers, timeout=timeout or self.request_settings["timeout"])
response.raise_for_status()
# Добавляем логирование для отладки
if response.status_code == 200:
try:
return response.json()
except Exception as json_e:
self.logger.debug(f"[_perform_upload][Debug] Response is not valid JSON: {response.text[:200]}...")
raise SupersetAPIError(f"API error during upload: Response is not valid JSON: {json_e}") from json_e
return response.json()
except requests.exceptions.HTTPError as e:
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
except requests.exceptions.RequestException as e:
raise NetworkError(f"Network error during upload: {e}", url=url) from e
# [/DEF:_perform_upload:Function]
# [DEF:APIClient.fetch_paginated_count:Function]
# [DEF:fetch_paginated_count:Function]
# @PURPOSE: Получает общее количество элементов для пагинации.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: query_params (Dict) - Параметры запроса.
# @PARAM: count_field (str) - Поле с количеством.
# @PRE: query_params must be a dictionary.
# @POST: Returns total count of items.
# @RETURN: int - Количество.
def fetch_paginated_count(self, endpoint: str, query_params: Dict, count_field: str = "count") -> int:
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query_params)}))
with belief_scope("fetch_paginated_count"):
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query_params)}))
return response_json.get(count_field, 0)
# [/DEF:APIClient.fetch_paginated_count:Function]
# [/DEF:fetch_paginated_count:Function]
# [DEF:APIClient.fetch_paginated_data:Function]
# [DEF:fetch_paginated_data:Function]
# @PURPOSE: Автоматически собирает данные со всех страниц пагинированного эндпоинта.
# @PARAM: endpoint (str) - Эндпоинт.
# @PARAM: pagination_options (Dict[str, Any]) - Опции пагинации.
# @PRE: pagination_options must contain 'base_query', 'total_count', 'results_field'.
# @POST: Returns all items across all pages.
# @RETURN: List[Any] - Список данных.
def fetch_paginated_data(self, endpoint: str, pagination_options: Dict[str, Any]) -> List[Any]:
base_query, total_count = pagination_options["base_query"], pagination_options["total_count"]
with belief_scope("fetch_paginated_data"):
base_query, total_count = pagination_options["base_query"], pagination_options["total_count"]
results_field, page_size = pagination_options["results_field"], base_query.get('page_size')
assert page_size and page_size > 0, "'page_size' must be a positive number."
@@ -225,7 +258,7 @@ class APIClient:
response_json = cast(Dict[str, Any], self.request("GET", endpoint, params={"q": json.dumps(query)}))
results.extend(response_json.get(results_field, []))
return results
# [/DEF:APIClient.fetch_paginated_data:Function]
# [/DEF:fetch_paginated_data:Function]
# [/DEF:APIClient:Class]

View File

@@ -8,6 +8,7 @@
# [SECTION: IMPORTS]
import sys
from typing import List, Tuple, Optional, Any
from .logger import belief_scope
# [/SECTION]
# [DEF:menu:Function]
@@ -15,17 +16,20 @@ from typing import List, Tuple, Optional, Any
# @PARAM: title (str) - Заголовок меню.
# @PARAM: prompt (str) - Приглашение к вводу.
# @PARAM: choices (List[str]) - Список вариантов для выбора.
# @PRE: choices must be a non-empty list of strings.
# @POST: Returns a tuple with return code and selected choice.
# @RETURN: Tuple[int, Optional[str]] - Кортеж (код возврата, выбранный элемент). rc=0 - успех.
def menu(title: str, prompt: str, choices: List[str], **kwargs) -> Tuple[int, Optional[str]]:
print(f"\n=== {title} ===\n{prompt}")
for idx, item in enumerate(choices, 1):
print(f"{idx}) {item}")
try:
raw = input("\nВведите номер (0 отмена): ").strip()
sel = int(raw)
return (0, choices[sel - 1]) if 0 < sel <= len(choices) else (1, None)
except (ValueError, IndexError):
return 1, None
with belief_scope("menu"):
print(f"\n=== {title} ===\n{prompt}")
for idx, item in enumerate(choices, 1):
print(f"{idx}) {item}")
try:
raw = input("\nВведите номер (0 отмена): ").strip()
sel = int(raw)
return (0, choices[sel - 1]) if 0 < sel <= len(choices) else (1, None)
except (ValueError, IndexError):
return 1, None
# [/DEF:menu:Function]
# [DEF:checklist:Function]
@@ -33,72 +37,121 @@ def menu(title: str, prompt: str, choices: List[str], **kwargs) -> Tuple[int, Op
# @PARAM: title (str) - Заголовок.
# @PARAM: prompt (str) - Приглашение к вводу.
# @PARAM: options (List[Tuple[str, str]]) - Список кортежей (значение, метка).
# @PRE: options must be a list of (value, label) tuples.
# @POST: Returns a list of selected values.
# @RETURN: Tuple[int, List[str]] - Кортеж (код возврата, список выбранных значений).
def checklist(title: str, prompt: str, options: List[Tuple[str, str]], **kwargs) -> Tuple[int, List[str]]:
print(f"\n=== {title} ===\n{prompt}")
for idx, (val, label) in enumerate(options, 1):
print(f"{idx}) [{val}] {label}")
raw = input("\nВведите номера через запятую (пустой ввод → отказ): ").strip()
if not raw: return 1, []
try:
indices = {int(x.strip()) for x in raw.split(",") if x.strip()}
selected_values = [options[i - 1][0] for i in indices if 0 < i <= len(options)]
return 0, selected_values
except (ValueError, IndexError):
return 1, []
with belief_scope("checklist"):
print(f"\n=== {title} ===\n{prompt}")
for idx, (val, label) in enumerate(options, 1):
print(f"{idx}) [{val}] {label}")
raw = input("\nВведите номера через запятую (пустой ввод → отказ): ").strip()
if not raw: return 1, []
try:
indices = {int(x.strip()) for x in raw.split(",") if x.strip()}
selected_values = [options[i - 1][0] for i in indices if 0 < i <= len(options)]
return 0, selected_values
except (ValueError, IndexError):
return 1, []
# [/DEF:checklist:Function]
# [DEF:yesno:Function]
# @PURPOSE: Задает вопрос с ответом да/нет.
# @PARAM: title (str) - Заголовок.
# @PARAM: question (str) - Вопрос для пользователя.
# @PRE: question must be a string.
# @POST: Returns boolean based on user input.
# @RETURN: bool - `True`, если пользователь ответил "да".
def yesno(title: str, question: str, **kwargs) -> bool:
ans = input(f"\n=== {title} ===\n{question} (y/n): ").strip().lower()
return ans in ("y", "yes", "да", "д")
with belief_scope("yesno"):
ans = input(f"\n=== {title} ===\n{question} (y/n): ").strip().lower()
return ans in ("y", "yes", "да", "д")
# [/DEF:yesno:Function]
# [DEF:msgbox:Function]
# @PURPOSE: Отображает информационное сообщение.
# @PARAM: title (str) - Заголовок.
# @PARAM: msg (str) - Текст сообщения.
# @PRE: msg must be a string.
# @POST: Message is printed to console.
def msgbox(title: str, msg: str, **kwargs) -> None:
print(f"\n=== {title} ===\n{msg}\n")
with belief_scope("msgbox"):
print(f"\n=== {title} ===\n{msg}\n")
# [/DEF:msgbox:Function]
# [DEF:inputbox:Function]
# @PURPOSE: Запрашивает у пользователя текстовый ввод.
# @PARAM: title (str) - Заголовок.
# @PARAM: prompt (str) - Приглашение к вводу.
# @PRE: prompt must be a string.
# @POST: Returns user input string.
# @RETURN: Tuple[int, Optional[str]] - Кортеж (код возврата, введенная строка).
def inputbox(title: str, prompt: str, **kwargs) -> Tuple[int, Optional[str]]:
print(f"\n=== {title} ===")
val = input(f"{prompt}\n")
return (0, val) if val else (1, None)
with belief_scope("inputbox"):
print(f"\n=== {title} ===")
val = input(f"{prompt}\n")
return (0, val) if val else (1, None)
# [/DEF:inputbox:Function]
# [DEF:_ConsoleGauge:Class]
# @PURPOSE: Контекстный менеджер для имитации `whiptail gauge` в консоли.
class _ConsoleGauge:
# [DEF:__init__:Function]
# @PURPOSE: Initializes the gauge.
# @PRE: title must be a string.
# @POST: Instance initialized.
def __init__(self, title: str, **kwargs):
self.title = title
with belief_scope("__init__"):
self.title = title
# [/DEF:__init__:Function]
# [DEF:__enter__:Function]
# @PURPOSE: Enters the context.
# @PRE: Instance initialized.
# @POST: Header printed, returns self.
def __enter__(self):
print(f"\n=== {self.title} ===")
return self
with belief_scope("__enter__"):
print(f"\n=== {self.title} ===")
return self
# [/DEF:__enter__:Function]
# [DEF:__exit__:Function]
# @PURPOSE: Exits the context.
# @PRE: Context entered.
# @POST: Newline printed.
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout.write("\n"); sys.stdout.flush()
with belief_scope("__exit__"):
sys.stdout.write("\n"); sys.stdout.flush()
# [/DEF:__exit__:Function]
# [DEF:set_text:Function]
# @PURPOSE: Sets the gauge text.
# @PRE: txt must be a string.
# @POST: Text written to stdout.
def set_text(self, txt: str) -> None:
sys.stdout.write(f"\r{txt} "); sys.stdout.flush()
with belief_scope("set_text"):
sys.stdout.write(f"\r{txt} "); sys.stdout.flush()
# [/DEF:set_text:Function]
# [DEF:set_percent:Function]
# @PURPOSE: Sets the gauge percentage.
# @PRE: percent must be an integer.
# @POST: Percentage written to stdout.
def set_percent(self, percent: int) -> None:
sys.stdout.write(f"{percent}%"); sys.stdout.flush()
with belief_scope("set_percent"):
sys.stdout.write(f"{percent}%"); sys.stdout.flush()
# [/DEF:set_percent:Function]
# [/DEF:_ConsoleGauge:Class]
# [DEF:gauge:Function]
# @PURPOSE: Создает и возвращает экземпляр `_ConsoleGauge`.
# @PRE: title must be a string.
# @POST: Returns an instance of _ConsoleGauge.
# @PARAM: title (str) - Заголовок для индикатора прогресса.
# @RETURN: _ConsoleGauge - Экземпляр контекстного менеджера.
def gauge(title: str, **kwargs) -> _ConsoleGauge:
return _ConsoleGauge(title, **kwargs)
with belief_scope("gauge"):
return _ConsoleGauge(title, **kwargs)
# [/DEF:gauge:Function]
# [/DEF:superset_tool.utils.whiptail_fallback:Module]