Работает создание коммитов и перенос в новый enviroment
This commit is contained in:
1
backend/backend/git_repos/12
Submodule
1
backend/backend/git_repos/12
Submodule
Submodule backend/backend/git_repos/12 added at d592fa7ed5
Binary file not shown.
@@ -43,4 +43,5 @@ uvicorn==0.38.0
|
||||
websockets==15.0.1
|
||||
pandas
|
||||
psycopg2-binary
|
||||
openpyxl
|
||||
openpyxl
|
||||
GitPython==3.1.44
|
||||
@@ -1 +1 @@
|
||||
from . import plugins, tasks, settings, connections
|
||||
from . import plugins, tasks, settings, connections, environments, mappings, migration, git
|
||||
|
||||
@@ -61,7 +61,7 @@ async def get_environments(config_manager=Depends(get_config_manager)):
|
||||
backup_schedule=ScheduleSchema(
|
||||
enabled=e.backup_schedule.enabled,
|
||||
cron_expression=e.backup_schedule.cron_expression
|
||||
) if e.backup_schedule else None
|
||||
) if getattr(e, 'backup_schedule', None) else None
|
||||
) for e in envs
|
||||
]
|
||||
# [/DEF:get_environments:Function]
|
||||
|
||||
303
backend/src/api/routes/git.py
Normal file
303
backend/src/api/routes/git.py
Normal file
@@ -0,0 +1,303 @@
|
||||
# [DEF:backend.src.api.routes.git:Module]
|
||||
#
|
||||
# @SEMANTICS: git, routes, api, fastapi, repository, deployment
|
||||
# @PURPOSE: Provides FastAPI endpoints for Git integration operations.
|
||||
# @LAYER: API
|
||||
# @RELATION: USES -> src.services.git_service.GitService
|
||||
# @RELATION: USES -> src.api.routes.git_schemas
|
||||
# @RELATION: USES -> src.models.git
|
||||
#
|
||||
# @INVARIANT: All Git operations must be routed through GitService.
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
import typing
|
||||
from src.dependencies import get_config_manager
|
||||
from src.core.database import get_db
|
||||
from src.models.git import GitServerConfig, GitStatus, DeploymentEnvironment, GitRepository
|
||||
from src.api.routes.git_schemas import (
|
||||
GitServerConfigSchema, GitServerConfigCreate,
|
||||
GitRepositorySchema, BranchSchema, BranchCreate,
|
||||
BranchCheckout, CommitSchema, CommitCreate,
|
||||
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest
|
||||
)
|
||||
from src.services.git_service import GitService
|
||||
from src.core.logger import logger, belief_scope
|
||||
|
||||
router = APIRouter(prefix="/api/git", tags=["git"])
|
||||
git_service = GitService()
|
||||
|
||||
# [DEF:get_git_configs:Function]
|
||||
# @PURPOSE: List all configured Git servers.
|
||||
# @RETURN: List[GitServerConfigSchema]
|
||||
@router.get("/config", response_model=List[GitServerConfigSchema])
|
||||
async def get_git_configs(db: Session = Depends(get_db)):
|
||||
with belief_scope("get_git_configs"):
|
||||
return db.query(GitServerConfig).all()
|
||||
# [/DEF:get_git_configs:Function]
|
||||
|
||||
# [DEF:create_git_config:Function]
|
||||
# @PURPOSE: Register a new Git server configuration.
|
||||
# @PARAM: config (GitServerConfigCreate)
|
||||
# @RETURN: GitServerConfigSchema
|
||||
@router.post("/config", response_model=GitServerConfigSchema)
|
||||
async def create_git_config(config: GitServerConfigCreate, db: Session = Depends(get_db)):
|
||||
with belief_scope("create_git_config"):
|
||||
db_config = GitServerConfig(**config.dict())
|
||||
db.add(db_config)
|
||||
db.commit()
|
||||
db.refresh(db_config)
|
||||
return db_config
|
||||
# [/DEF:create_git_config:Function]
|
||||
|
||||
# [DEF:delete_git_config:Function]
|
||||
# @PURPOSE: Remove a Git server configuration.
|
||||
# @PARAM: config_id (str)
|
||||
@router.delete("/config/{config_id}")
|
||||
async def delete_git_config(config_id: str, db: Session = Depends(get_db)):
|
||||
with belief_scope("delete_git_config"):
|
||||
db_config = db.query(GitServerConfig).filter(GitServerConfig.id == config_id).first()
|
||||
if not db_config:
|
||||
raise HTTPException(status_code=404, detail="Configuration not found")
|
||||
|
||||
db.delete(db_config)
|
||||
db.commit()
|
||||
return {"status": "success", "message": "Configuration deleted"}
|
||||
# [/DEF:delete_git_config:Function]
|
||||
|
||||
# [DEF:test_git_config:Function]
|
||||
# @PURPOSE: Validate connection to a Git server using provided credentials.
|
||||
# @PARAM: config (GitServerConfigCreate)
|
||||
@router.post("/config/test")
|
||||
async def test_git_config(config: GitServerConfigCreate):
|
||||
with belief_scope("test_git_config"):
|
||||
success = await git_service.test_connection(config.provider, config.url, config.pat)
|
||||
if success:
|
||||
return {"status": "success", "message": "Connection successful"}
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="Connection failed")
|
||||
# [/DEF:test_git_config:Function]
|
||||
|
||||
# [DEF:init_repository:Function]
|
||||
# @PURPOSE: Link a dashboard to a Git repository and perform initial clone/init.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: init_data (RepoInitRequest)
|
||||
@router.post("/repositories/{dashboard_id}/init")
|
||||
async def init_repository(dashboard_id: int, init_data: RepoInitRequest, db: Session = Depends(get_db)):
|
||||
with belief_scope("init_repository"):
|
||||
# 1. Get config
|
||||
config = db.query(GitServerConfig).filter(GitServerConfig.id == init_data.config_id).first()
|
||||
if not config:
|
||||
raise HTTPException(status_code=404, detail="Git configuration not found")
|
||||
|
||||
try:
|
||||
# 2. Perform Git clone/init
|
||||
logger.info(f"[init_repository][Action] Initializing repo for dashboard {dashboard_id}")
|
||||
git_service.init_repo(dashboard_id, init_data.remote_url, config.pat)
|
||||
|
||||
# 3. Save to DB
|
||||
repo_path = git_service._get_repo_path(dashboard_id)
|
||||
db_repo = db.query(GitRepository).filter(GitRepository.dashboard_id == dashboard_id).first()
|
||||
if not db_repo:
|
||||
db_repo = GitRepository(
|
||||
dashboard_id=dashboard_id,
|
||||
config_id=config.id,
|
||||
remote_url=init_data.remote_url,
|
||||
local_path=repo_path
|
||||
)
|
||||
db.add(db_repo)
|
||||
else:
|
||||
db_repo.config_id = config.id
|
||||
db_repo.remote_url = init_data.remote_url
|
||||
db_repo.local_path = repo_path
|
||||
|
||||
db.commit()
|
||||
logger.info(f"[init_repository][Coherence:OK] Repository initialized for dashboard {dashboard_id}")
|
||||
return {"status": "success", "message": "Repository initialized"}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"[init_repository][Coherence:Failed] Failed to init repository: {e}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:init_repository:Function]
|
||||
|
||||
# [DEF:get_branches:Function]
|
||||
# @PURPOSE: List all branches for a dashboard's repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @RETURN: List[BranchSchema]
|
||||
@router.get("/repositories/{dashboard_id}/branches", response_model=List[BranchSchema])
|
||||
async def get_branches(dashboard_id: int):
|
||||
with belief_scope("get_branches"):
|
||||
try:
|
||||
return git_service.list_branches(dashboard_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
# [/DEF:get_branches:Function]
|
||||
|
||||
# [DEF:create_branch:Function]
|
||||
# @PURPOSE: Create a new branch in the dashboard's repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: branch_data (BranchCreate)
|
||||
@router.post("/repositories/{dashboard_id}/branches")
|
||||
async def create_branch(dashboard_id: int, branch_data: BranchCreate):
|
||||
with belief_scope("create_branch"):
|
||||
try:
|
||||
git_service.create_branch(dashboard_id, branch_data.name, branch_data.from_branch)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:create_branch:Function]
|
||||
|
||||
# [DEF:checkout_branch:Function]
|
||||
# @PURPOSE: Switch the dashboard's repository to a specific branch.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: checkout_data (BranchCheckout)
|
||||
@router.post("/repositories/{dashboard_id}/checkout")
|
||||
async def checkout_branch(dashboard_id: int, checkout_data: BranchCheckout):
|
||||
with belief_scope("checkout_branch"):
|
||||
try:
|
||||
git_service.checkout_branch(dashboard_id, checkout_data.name)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:checkout_branch:Function]
|
||||
|
||||
# [DEF:commit_changes:Function]
|
||||
# @PURPOSE: Stage and commit changes in the dashboard's repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: commit_data (CommitCreate)
|
||||
@router.post("/repositories/{dashboard_id}/commit")
|
||||
async def commit_changes(dashboard_id: int, commit_data: CommitCreate):
|
||||
with belief_scope("commit_changes"):
|
||||
try:
|
||||
git_service.commit_changes(dashboard_id, commit_data.message, commit_data.files)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:commit_changes:Function]
|
||||
|
||||
# [DEF:push_changes:Function]
|
||||
# @PURPOSE: Push local commits to the remote repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
@router.post("/repositories/{dashboard_id}/push")
|
||||
async def push_changes(dashboard_id: int):
|
||||
with belief_scope("push_changes"):
|
||||
try:
|
||||
git_service.push_changes(dashboard_id)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:push_changes:Function]
|
||||
|
||||
# [DEF:pull_changes:Function]
|
||||
# @PURPOSE: Pull changes from the remote repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
@router.post("/repositories/{dashboard_id}/pull")
|
||||
async def pull_changes(dashboard_id: int):
|
||||
with belief_scope("pull_changes"):
|
||||
try:
|
||||
git_service.pull_changes(dashboard_id)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:pull_changes:Function]
|
||||
|
||||
# [DEF:sync_dashboard:Function]
|
||||
# @PURPOSE: Sync dashboard state from Superset to Git using the GitPlugin.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: source_env_id (Optional[str])
|
||||
@router.post("/repositories/{dashboard_id}/sync")
|
||||
async def sync_dashboard(dashboard_id: int, source_env_id: typing.Optional[str] = None):
|
||||
with belief_scope("sync_dashboard"):
|
||||
try:
|
||||
from src.plugins.git_plugin import GitPlugin
|
||||
plugin = GitPlugin()
|
||||
return await plugin.execute({
|
||||
"operation": "sync",
|
||||
"dashboard_id": dashboard_id,
|
||||
"source_env_id": source_env_id
|
||||
})
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:sync_dashboard:Function]
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: List all deployment environments.
|
||||
# @RETURN: List[DeploymentEnvironmentSchema]
|
||||
@router.get("/environments", response_model=List[DeploymentEnvironmentSchema])
|
||||
async def get_environments(config_manager=Depends(get_config_manager)):
|
||||
with belief_scope("get_environments"):
|
||||
envs = config_manager.get_environments()
|
||||
return [
|
||||
DeploymentEnvironmentSchema(
|
||||
id=e.id,
|
||||
name=e.name,
|
||||
superset_url=e.url,
|
||||
is_active=True
|
||||
) for e in envs
|
||||
]
|
||||
# [/DEF:get_environments:Function]
|
||||
|
||||
# [DEF:deploy_dashboard:Function]
|
||||
# @PURPOSE: Deploy dashboard from Git to a target environment.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: deploy_data (DeployRequest)
|
||||
@router.post("/repositories/{dashboard_id}/deploy")
|
||||
async def deploy_dashboard(dashboard_id: int, deploy_data: DeployRequest):
|
||||
with belief_scope("deploy_dashboard"):
|
||||
try:
|
||||
from src.plugins.git_plugin import GitPlugin
|
||||
plugin = GitPlugin()
|
||||
return await plugin.execute({
|
||||
"operation": "deploy",
|
||||
"dashboard_id": dashboard_id,
|
||||
"environment_id": deploy_data.environment_id
|
||||
})
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:deploy_dashboard:Function]
|
||||
|
||||
# [DEF:get_history:Function]
|
||||
# @PURPOSE: View commit history for a dashboard's repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: limit (int)
|
||||
# @RETURN: List[CommitSchema]
|
||||
@router.get("/repositories/{dashboard_id}/history", response_model=List[CommitSchema])
|
||||
async def get_history(dashboard_id: int, limit: int = 50):
|
||||
with belief_scope("get_history"):
|
||||
try:
|
||||
return git_service.get_commit_history(dashboard_id, limit)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
# [/DEF:get_history:Function]
|
||||
|
||||
# [DEF:get_repository_status:Function]
|
||||
# @PURPOSE: Get current Git status for a dashboard repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @RETURN: dict
|
||||
@router.get("/repositories/{dashboard_id}/status")
|
||||
async def get_repository_status(dashboard_id: int):
|
||||
with belief_scope("get_repository_status"):
|
||||
try:
|
||||
return git_service.get_status(dashboard_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:get_repository_status:Function]
|
||||
|
||||
# [DEF:get_repository_diff:Function]
|
||||
# @PURPOSE: Get Git diff for a dashboard repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: file_path (Optional[str])
|
||||
# @PARAM: staged (bool)
|
||||
# @RETURN: str
|
||||
@router.get("/repositories/{dashboard_id}/diff")
|
||||
async def get_repository_diff(dashboard_id: int, file_path: Optional[str] = None, staged: bool = False):
|
||||
with belief_scope("get_repository_diff"):
|
||||
try:
|
||||
diff_text = git_service.get_diff(dashboard_id, file_path, staged)
|
||||
return diff_text
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
# [/DEF:get_repository_diff:Function]
|
||||
|
||||
# [/DEF:backend.src.api.routes.git:Module]
|
||||
130
backend/src/api/routes/git_schemas.py
Normal file
130
backend/src/api/routes/git_schemas.py
Normal file
@@ -0,0 +1,130 @@
|
||||
# [DEF:backend.src.api.routes.git_schemas:Module]
|
||||
#
|
||||
# @SEMANTICS: git, schemas, pydantic, api, contracts
|
||||
# @PURPOSE: Defines Pydantic models for the Git integration API layer.
|
||||
# @LAYER: API
|
||||
# @RELATION: DEPENDS_ON -> backend.src.models.git
|
||||
#
|
||||
# @INVARIANT: All schemas must be compatible with the FastAPI router.
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
from src.models.git import GitProvider, GitStatus, SyncStatus
|
||||
|
||||
# [DEF:GitServerConfigBase:Class]
|
||||
class GitServerConfigBase(BaseModel):
|
||||
name: str = Field(..., description="Display name for the Git server")
|
||||
provider: GitProvider = Field(..., description="Git provider (GITHUB, GITLAB, GITEA)")
|
||||
url: str = Field(..., description="Server base URL")
|
||||
pat: str = Field(..., description="Personal Access Token")
|
||||
default_repository: Optional[str] = Field(None, description="Default repository path (org/repo)")
|
||||
# [/DEF:GitServerConfigBase:Class]
|
||||
|
||||
# [DEF:GitServerConfigCreate:Class]
|
||||
class GitServerConfigCreate(GitServerConfigBase):
|
||||
"""Schema for creating a new Git server configuration."""
|
||||
pass
|
||||
# [/DEF:GitServerConfigCreate:Class]
|
||||
|
||||
# [DEF:GitServerConfigSchema:Class]
|
||||
class GitServerConfigSchema(GitServerConfigBase):
|
||||
"""Schema for representing a Git server configuration with metadata."""
|
||||
id: str
|
||||
status: GitStatus
|
||||
last_validated: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
# [/DEF:GitServerConfigSchema:Class]
|
||||
|
||||
# [DEF:GitRepositorySchema:Class]
|
||||
class GitRepositorySchema(BaseModel):
|
||||
"""Schema for tracking a local Git repository linked to a dashboard."""
|
||||
id: str
|
||||
dashboard_id: int
|
||||
config_id: str
|
||||
remote_url: str
|
||||
local_path: str
|
||||
current_branch: str
|
||||
sync_status: SyncStatus
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
# [/DEF:GitRepositorySchema:Class]
|
||||
|
||||
# [DEF:BranchSchema:Class]
|
||||
class BranchSchema(BaseModel):
|
||||
"""Schema for representing a Git branch."""
|
||||
name: str
|
||||
commit_hash: str
|
||||
is_remote: bool
|
||||
last_updated: datetime
|
||||
# [/DEF:BranchSchema:Class]
|
||||
|
||||
# [DEF:CommitSchema:Class]
|
||||
class CommitSchema(BaseModel):
|
||||
"""Schema for representing a Git commit."""
|
||||
hash: str
|
||||
author: str
|
||||
email: str
|
||||
timestamp: datetime
|
||||
message: str
|
||||
files_changed: List[str]
|
||||
# [/DEF:CommitSchema:Class]
|
||||
|
||||
# [DEF:BranchCreate:Class]
|
||||
class BranchCreate(BaseModel):
|
||||
"""Schema for branch creation requests."""
|
||||
name: str
|
||||
from_branch: str
|
||||
# [/DEF:BranchCreate:Class]
|
||||
|
||||
# [DEF:BranchCheckout:Class]
|
||||
class BranchCheckout(BaseModel):
|
||||
"""Schema for branch checkout requests."""
|
||||
name: str
|
||||
# [/DEF:BranchCheckout:Class]
|
||||
|
||||
# [DEF:CommitCreate:Class]
|
||||
class CommitCreate(BaseModel):
|
||||
"""Schema for staging and committing changes."""
|
||||
message: str
|
||||
files: List[str]
|
||||
# [/DEF:CommitCreate:Class]
|
||||
|
||||
# [DEF:ConflictResolution:Class]
|
||||
class ConflictResolution(BaseModel):
|
||||
"""Schema for resolving merge conflicts."""
|
||||
file_path: str
|
||||
resolution: str = Field(pattern="^(mine|theirs|manual)$")
|
||||
content: Optional[str] = None
|
||||
# [/DEF:ConflictResolution:Class]
|
||||
|
||||
# [DEF:DeploymentEnvironmentSchema:Class]
|
||||
class DeploymentEnvironmentSchema(BaseModel):
|
||||
"""Schema for representing a target deployment environment."""
|
||||
id: str
|
||||
name: str
|
||||
superset_url: str
|
||||
is_active: bool
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
# [/DEF:DeploymentEnvironmentSchema:Class]
|
||||
|
||||
# [DEF:DeployRequest:Class]
|
||||
class DeployRequest(BaseModel):
|
||||
"""Schema for deployment requests."""
|
||||
environment_id: str
|
||||
# [/DEF:DeployRequest:Class]
|
||||
|
||||
# [DEF:RepoInitRequest:Class]
|
||||
class RepoInitRequest(BaseModel):
|
||||
"""Schema for repository initialization requests."""
|
||||
config_id: str
|
||||
remote_url: str
|
||||
# [/DEF:RepoInitRequest:Class]
|
||||
|
||||
# [/DEF:backend.src.api.routes.git_schemas:Module]
|
||||
@@ -18,7 +18,7 @@ import os
|
||||
|
||||
from .dependencies import get_task_manager, get_scheduler_service
|
||||
from .core.logger import logger, belief_scope
|
||||
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections
|
||||
from .api.routes import plugins, tasks, settings, environments, mappings, migration, connections, git
|
||||
from .core.database import init_db
|
||||
|
||||
# [DEF:App:Global]
|
||||
@@ -88,6 +88,7 @@ app.include_router(connections.router, prefix="/api/settings/connections", tags=
|
||||
app.include_router(environments.router, prefix="/api/environments", tags=["Environments"])
|
||||
app.include_router(mappings.router)
|
||||
app.include_router(migration.router)
|
||||
app.include_router(git.router)
|
||||
|
||||
# [DEF:websocket_endpoint:Function]
|
||||
# @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task.
|
||||
|
||||
@@ -15,6 +15,7 @@ from ..models.mapping import Base
|
||||
# Import models to ensure they're registered with Base
|
||||
from ..models.task import TaskRecord
|
||||
from ..models.connection import ConnectionConfig
|
||||
from ..models.git import GitServerConfig, GitRepository, DeploymentEnvironment
|
||||
from .logger import belief_scope
|
||||
import os
|
||||
# [/SECTION]
|
||||
|
||||
73
backend/src/models/git.py
Normal file
73
backend/src/models/git.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""
|
||||
[DEF:GitModels:Module]
|
||||
Git-specific SQLAlchemy models for configuration and repository tracking.
|
||||
@RELATION: specs/011-git-integration-dashboard/data-model.md
|
||||
"""
|
||||
|
||||
import enum
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, String, Integer, DateTime, Enum, ForeignKey, Boolean
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
import uuid
|
||||
from src.core.database import Base
|
||||
|
||||
class GitProvider(str, enum.Enum):
|
||||
GITHUB = "GITHUB"
|
||||
GITLAB = "GITLAB"
|
||||
GITEA = "GITEA"
|
||||
|
||||
class GitStatus(str, enum.Enum):
|
||||
CONNECTED = "CONNECTED"
|
||||
FAILED = "FAILED"
|
||||
UNKNOWN = "UNKNOWN"
|
||||
|
||||
class SyncStatus(str, enum.Enum):
|
||||
CLEAN = "CLEAN"
|
||||
DIRTY = "DIRTY"
|
||||
CONFLICT = "CONFLICT"
|
||||
|
||||
class GitServerConfig(Base):
|
||||
"""
|
||||
[DEF:GitServerConfig:Class]
|
||||
Configuration for a Git server connection.
|
||||
"""
|
||||
__tablename__ = "git_server_configs"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
name = Column(String(255), nullable=False)
|
||||
provider = Column(Enum(GitProvider), nullable=False)
|
||||
url = Column(String(255), nullable=False)
|
||||
pat = Column(String(255), nullable=False) # PERSONAL ACCESS TOKEN
|
||||
default_repository = Column(String(255), nullable=True)
|
||||
status = Column(Enum(GitStatus), default=GitStatus.UNKNOWN)
|
||||
last_validated = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
class GitRepository(Base):
|
||||
"""
|
||||
[DEF:GitRepository:Class]
|
||||
Tracking for a local Git repository linked to a dashboard.
|
||||
"""
|
||||
__tablename__ = "git_repositories"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
dashboard_id = Column(Integer, nullable=False, unique=True)
|
||||
config_id = Column(String(36), ForeignKey("git_server_configs.id"), nullable=False)
|
||||
remote_url = Column(String(255), nullable=False)
|
||||
local_path = Column(String(255), nullable=False)
|
||||
current_branch = Column(String(255), default="main")
|
||||
sync_status = Column(Enum(SyncStatus), default=SyncStatus.CLEAN)
|
||||
|
||||
class DeploymentEnvironment(Base):
|
||||
"""
|
||||
[DEF:DeploymentEnvironment:Class]
|
||||
Target Superset environments for dashboard deployment.
|
||||
"""
|
||||
__tablename__ = "deployment_environments"
|
||||
|
||||
id = Column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
|
||||
name = Column(String(255), nullable=False)
|
||||
superset_url = Column(String(255), nullable=False)
|
||||
superset_token = Column(String(255), nullable=False)
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
# [/DEF:GitModels:Module]
|
||||
345
backend/src/plugins/git_plugin.py
Normal file
345
backend/src/plugins/git_plugin.py
Normal file
@@ -0,0 +1,345 @@
|
||||
# [DEF:backend.src.plugins.git_plugin:Module]
|
||||
#
|
||||
# @SEMANTICS: git, plugin, dashboard, version_control, sync, deploy
|
||||
# @PURPOSE: Предоставляет плагин для версионирования и развертывания дашбордов Superset.
|
||||
# @LAYER: Plugin
|
||||
# @RELATION: INHERITS_FROM -> src.core.plugin_base.PluginBase
|
||||
# @RELATION: USES -> src.services.git_service.GitService
|
||||
# @RELATION: USES -> src.core.superset_client.SupersetClient
|
||||
# @RELATION: USES -> src.core.config_manager.ConfigManager
|
||||
#
|
||||
# @INVARIANT: Все операции с Git должны выполняться через GitService.
|
||||
# @CONSTRAINT: Плагин работает только с распакованными YAML-экспортами Superset.
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import os
|
||||
import io
|
||||
import shutil
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
from src.core.plugin_base import PluginBase
|
||||
from src.services.git_service import GitService
|
||||
from src.core.logger import logger, belief_scope
|
||||
from src.core.config_manager import ConfigManager
|
||||
from src.core.superset_client import SupersetClient
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:GitPlugin:Class]
|
||||
# @PURPOSE: Реализация плагина Git Integration для управления версиями дашбордов.
|
||||
class GitPlugin(PluginBase):
|
||||
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Инициализирует плагин и его зависимости.
|
||||
# @POST: Инициализированы git_service и config_manager.
|
||||
def __init__(self):
|
||||
with belief_scope("GitPlugin.__init__"):
|
||||
logger.info("[GitPlugin.__init__][Entry] Initializing GitPlugin.")
|
||||
self.git_service = GitService()
|
||||
|
||||
# Robust config path resolution:
|
||||
# 1. Try absolute path from src/dependencies.py style if possible
|
||||
# 2. Try relative paths based on common execution patterns
|
||||
if os.path.exists("../config.json"):
|
||||
config_path = "../config.json"
|
||||
elif os.path.exists("config.json"):
|
||||
config_path = "config.json"
|
||||
else:
|
||||
# Fallback to the one initialized in dependencies if we can import it
|
||||
try:
|
||||
from src.dependencies import config_manager
|
||||
self.config_manager = config_manager
|
||||
logger.info("[GitPlugin.__init__][Exit] GitPlugin initialized using shared config_manager.")
|
||||
return
|
||||
except:
|
||||
config_path = "config.json"
|
||||
|
||||
self.config_manager = ConfigManager(config_path)
|
||||
logger.info(f"[GitPlugin.__init__][Exit] GitPlugin initialized with {config_path}")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
@property
|
||||
def id(self) -> str:
|
||||
return "git-integration"
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "Git Integration"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return "Version control for Superset dashboards"
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
return "0.1.0"
|
||||
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Возвращает JSON-схему параметров для выполнения задач плагина.
|
||||
# @RETURN: Dict[str, Any] - Схема параметров.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
with belief_scope("GitPlugin.get_schema"):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"operation": {"type": "string", "enum": ["sync", "deploy", "history"]},
|
||||
"dashboard_id": {"type": "integer"},
|
||||
"environment_id": {"type": "string"},
|
||||
"source_env_id": {"type": "string"}
|
||||
},
|
||||
"required": ["operation", "dashboard_id"]
|
||||
}
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
# [DEF:initialize:Function]
|
||||
# @PURPOSE: Выполняет начальную настройку плагина.
|
||||
# @POST: Плагин готов к выполнению задач.
|
||||
async def initialize(self):
|
||||
with belief_scope("GitPlugin.initialize"):
|
||||
logger.info("[GitPlugin.initialize][Action] Initializing Git Integration Plugin logic.")
|
||||
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Основной метод выполнения задач плагина.
|
||||
# @PRE: task_data содержит 'operation' и 'dashboard_id'.
|
||||
# @POST: Возвращает результат выполнения операции.
|
||||
# @PARAM: task_data (Dict[str, Any]) - Данные задачи.
|
||||
# @RETURN: Dict[str, Any] - Статус и сообщение.
|
||||
# @RELATION: CALLS -> self._handle_sync
|
||||
# @RELATION: CALLS -> self._handle_deploy
|
||||
async def execute(self, task_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
with belief_scope("GitPlugin.execute"):
|
||||
operation = task_data.get("operation")
|
||||
dashboard_id = task_data.get("dashboard_id")
|
||||
|
||||
logger.info(f"[GitPlugin.execute][Entry] Executing operation: {operation} for dashboard {dashboard_id}")
|
||||
|
||||
if operation == "sync":
|
||||
source_env_id = task_data.get("source_env_id")
|
||||
result = await self._handle_sync(dashboard_id, source_env_id)
|
||||
elif operation == "deploy":
|
||||
env_id = task_data.get("environment_id")
|
||||
result = await self._handle_deploy(dashboard_id, env_id)
|
||||
elif operation == "history":
|
||||
result = {"status": "success", "message": "History available via API"}
|
||||
else:
|
||||
logger.error(f"[GitPlugin.execute][Coherence:Failed] Unknown operation: {operation}")
|
||||
raise ValueError(f"Unknown operation: {operation}")
|
||||
|
||||
logger.info(f"[GitPlugin.execute][Exit] Operation {operation} completed.")
|
||||
return result
|
||||
# [/DEF:execute:Function]
|
||||
|
||||
# [DEF:_handle_sync:Function]
|
||||
# @PURPOSE: Экспортирует дашборд из Superset и распаковывает в Git-репозиторий.
|
||||
# @PRE: Репозиторий для дашборда должен существовать.
|
||||
# @POST: Файлы в репозитории обновлены до текущего состояния в Superset.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда.
|
||||
# @PARAM: source_env_id (Optional[str]) - ID исходного окружения.
|
||||
# @RETURN: Dict[str, str] - Результат синхронизации.
|
||||
# @SIDE_EFFECT: Изменяет файлы в локальной рабочей директории репозитория.
|
||||
# @RELATION: CALLS -> src.services.git_service.GitService.get_repo
|
||||
# @RELATION: CALLS -> src.core.superset_client.SupersetClient.export_dashboard
|
||||
async def _handle_sync(self, dashboard_id: int, source_env_id: Optional[str] = None) -> Dict[str, str]:
|
||||
with belief_scope("GitPlugin._handle_sync"):
|
||||
try:
|
||||
# 1. Получение репозитория
|
||||
repo = self.git_service.get_repo(dashboard_id)
|
||||
repo_path = Path(repo.working_dir)
|
||||
logger.info(f"[_handle_sync][Action] Target repo path: {repo_path}")
|
||||
|
||||
# 2. Настройка клиента Superset
|
||||
env = self._get_env(source_env_id)
|
||||
client = SupersetClient(env)
|
||||
client.authenticate()
|
||||
|
||||
# 3. Экспорт дашборда
|
||||
logger.info(f"[_handle_sync][Action] Exporting dashboard {dashboard_id} from {env.name}")
|
||||
zip_bytes, _ = client.export_dashboard(dashboard_id)
|
||||
|
||||
# 4. Распаковка с выравниванием структуры (flattening)
|
||||
logger.info(f"[_handle_sync][Action] Unpacking export to {repo_path}")
|
||||
|
||||
# Список папок/файлов, которые мы ожидаем от Superset
|
||||
managed_dirs = ["dashboards", "charts", "datasets", "databases"]
|
||||
managed_files = ["metadata.yaml"]
|
||||
|
||||
# Очистка старых данных перед распаковкой, чтобы не оставалось "призраков"
|
||||
for d in managed_dirs:
|
||||
d_path = repo_path / d
|
||||
if d_path.exists() and d_path.is_dir():
|
||||
shutil.rmtree(d_path)
|
||||
for f in managed_files:
|
||||
f_path = repo_path / f
|
||||
if f_path.exists():
|
||||
f_path.unlink()
|
||||
|
||||
with zipfile.ZipFile(io.BytesIO(zip_bytes)) as zf:
|
||||
# Superset экспортирует всё в подпапку dashboard_export_timestamp/
|
||||
# Нам нужно найти это имя папки
|
||||
namelist = zf.namelist()
|
||||
if not namelist:
|
||||
raise ValueError("Export ZIP is empty")
|
||||
|
||||
root_folder = namelist[0].split('/')[0]
|
||||
logger.info(f"[_handle_sync][Action] Detected root folder in ZIP: {root_folder}")
|
||||
|
||||
for member in zf.infolist():
|
||||
if member.filename.startswith(root_folder + "/") and len(member.filename) > len(root_folder) + 1:
|
||||
# Убираем префикс папки
|
||||
relative_path = member.filename[len(root_folder)+1:]
|
||||
target_path = repo_path / relative_path
|
||||
|
||||
if member.is_dir():
|
||||
target_path.mkdir(parents=True, exist_ok=True)
|
||||
else:
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zf.open(member) as source, open(target_path, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
# 5. Автоматический staging изменений (не коммит, чтобы юзер мог проверить diff)
|
||||
try:
|
||||
repo.git.add(A=True)
|
||||
logger.info(f"[_handle_sync][Action] Changes staged in git")
|
||||
except Exception as ge:
|
||||
logger.warning(f"[_handle_sync][Action] Failed to stage changes: {ge}")
|
||||
|
||||
logger.info(f"[_handle_sync][Coherence:OK] Dashboard {dashboard_id} synced successfully.")
|
||||
return {"status": "success", "message": "Dashboard synced and flattened in local repository"}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[_handle_sync][Coherence:Failed] Sync failed: {e}")
|
||||
raise
|
||||
# [/DEF:_handle_sync:Function]
|
||||
|
||||
# [DEF:_handle_deploy:Function]
|
||||
# @PURPOSE: Упаковывает репозиторий в ZIP и импортирует в целевое окружение Superset.
|
||||
# @PRE: environment_id должен соответствовать настроенному окружению.
|
||||
# @POST: Дашборд импортирован в целевой Superset.
|
||||
# @PARAM: dashboard_id (int) - ID дашборда.
|
||||
# @PARAM: env_id (str) - ID целевого окружения.
|
||||
# @RETURN: Dict[str, Any] - Результат деплоя.
|
||||
# @SIDE_EFFECT: Создает и удаляет временный ZIP-файл.
|
||||
# @RELATION: CALLS -> src.core.superset_client.SupersetClient.import_dashboard
|
||||
async def _handle_deploy(self, dashboard_id: int, env_id: str) -> Dict[str, Any]:
|
||||
with belief_scope("GitPlugin._handle_deploy"):
|
||||
try:
|
||||
if not env_id:
|
||||
raise ValueError("Target environment ID required for deployment")
|
||||
|
||||
# 1. Получение репозитория
|
||||
repo = self.git_service.get_repo(dashboard_id)
|
||||
repo_path = Path(repo.working_dir)
|
||||
|
||||
# 2. Упаковка в ZIP
|
||||
logger.info(f"[_handle_deploy][Action] Packing repository {repo_path} for deployment.")
|
||||
zip_buffer = io.BytesIO()
|
||||
|
||||
# Superset expects a root directory in the ZIP (e.g., dashboard_export_20240101T000000/)
|
||||
root_dir_name = f"dashboard_export_{dashboard_id}"
|
||||
|
||||
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
for root, dirs, files in os.walk(repo_path):
|
||||
if ".git" in dirs:
|
||||
dirs.remove(".git")
|
||||
for file in files:
|
||||
if file == ".git" or file.endswith(".zip"): continue
|
||||
file_path = Path(root) / file
|
||||
# Prepend the root directory name to the archive path
|
||||
arcname = Path(root_dir_name) / file_path.relative_to(repo_path)
|
||||
zf.write(file_path, arcname)
|
||||
|
||||
zip_buffer.seek(0)
|
||||
|
||||
# 3. Настройка клиента Superset
|
||||
env = self.config_manager.get_environment(env_id)
|
||||
if not env:
|
||||
raise ValueError(f"Environment {env_id} not found")
|
||||
|
||||
client = SupersetClient(env)
|
||||
client.authenticate()
|
||||
|
||||
# 4. Импорт
|
||||
temp_zip_path = repo_path / f"deploy_{dashboard_id}.zip"
|
||||
logger.info(f"[_handle_deploy][Action] Saving temporary zip to {temp_zip_path}")
|
||||
with open(temp_zip_path, "wb") as f:
|
||||
f.write(zip_buffer.getvalue())
|
||||
|
||||
try:
|
||||
logger.info(f"[_handle_deploy][Action] Importing dashboard to {env.name}")
|
||||
result = client.import_dashboard(temp_zip_path)
|
||||
logger.info(f"[_handle_deploy][Coherence:OK] Deployment successful for dashboard {dashboard_id}.")
|
||||
return {"status": "success", "message": f"Dashboard deployed to {env.name}", "details": result}
|
||||
finally:
|
||||
if temp_zip_path.exists():
|
||||
os.remove(temp_zip_path)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"[_handle_deploy][Coherence:Failed] Deployment failed: {e}")
|
||||
raise
|
||||
# [/DEF:_handle_deploy:Function]
|
||||
|
||||
# [DEF:_get_env:Function]
|
||||
# @PURPOSE: Вспомогательный метод для получения конфигурации окружения.
|
||||
# @PARAM: env_id (Optional[str]) - ID окружения.
|
||||
# @RETURN: Environment - Объект конфигурации окружения.
|
||||
def _get_env(self, env_id: Optional[str] = None):
|
||||
with belief_scope("GitPlugin._get_env"):
|
||||
logger.info(f"[_get_env][Entry] Fetching environment for ID: {env_id}")
|
||||
|
||||
# Priority 1: ConfigManager (config.json)
|
||||
if env_id:
|
||||
env = self.config_manager.get_environment(env_id)
|
||||
if env:
|
||||
logger.info(f"[_get_env][Exit] Found environment by ID in ConfigManager: {env.name}")
|
||||
return env
|
||||
|
||||
# Priority 2: Database (DeploymentEnvironment)
|
||||
from src.core.database import SessionLocal
|
||||
from src.models.git import DeploymentEnvironment
|
||||
|
||||
db = SessionLocal()
|
||||
try:
|
||||
if env_id:
|
||||
db_env = db.query(DeploymentEnvironment).filter(DeploymentEnvironment.id == env_id).first()
|
||||
else:
|
||||
# If no ID, try to find active or any environment in DB
|
||||
db_env = db.query(DeploymentEnvironment).filter(DeploymentEnvironment.is_active == True).first()
|
||||
if not db_env:
|
||||
db_env = db.query(DeploymentEnvironment).first()
|
||||
|
||||
if db_env:
|
||||
logger.info(f"[_get_env][Exit] Found environment in DB: {db_env.name}")
|
||||
from src.core.config_models import Environment
|
||||
# Use token as password for SupersetClient
|
||||
return Environment(
|
||||
id=db_env.id,
|
||||
name=db_env.name,
|
||||
url=db_env.superset_url,
|
||||
username="admin",
|
||||
password=db_env.superset_token,
|
||||
verify_ssl=True
|
||||
)
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Priority 3: ConfigManager Default (if no env_id provided)
|
||||
envs = self.config_manager.get_environments()
|
||||
if envs:
|
||||
if env_id:
|
||||
# If env_id was provided but not found in DB or specifically by ID in config,
|
||||
# but we have other envs, maybe it's one of them?
|
||||
env = next((e for e in envs if e.id == env_id), None)
|
||||
if env:
|
||||
logger.info(f"[_get_env][Exit] Found environment {env_id} in ConfigManager list")
|
||||
return env
|
||||
|
||||
if not env_id:
|
||||
logger.info(f"[_get_env][Exit] Using first environment from ConfigManager: {envs[0].name}")
|
||||
return envs[0]
|
||||
|
||||
logger.error(f"[_get_env][Coherence:Failed] No environments configured (searched config.json and DB). env_id={env_id}")
|
||||
raise ValueError("No environments configured. Please add a Superset Environment in Settings.")
|
||||
# [/DEF:_get_env:Function]
|
||||
|
||||
# [/DEF:GitPlugin:Class]
|
||||
# [/DEF:backend.src.plugins.git_plugin:Module]
|
||||
380
backend/src/services/git_service.py
Normal file
380
backend/src/services/git_service.py
Normal file
@@ -0,0 +1,380 @@
|
||||
# [DEF:backend.src.services.git_service:Module]
|
||||
#
|
||||
# @SEMANTICS: git, service, gitpython, repository, version_control
|
||||
# @PURPOSE: Core Git logic using GitPython to manage dashboard repositories.
|
||||
# @LAYER: Service
|
||||
# @RELATION: INHERITS_FROM -> None
|
||||
# @RELATION: USED_BY -> src.api.routes.git
|
||||
# @RELATION: USED_BY -> src.plugins.git_plugin
|
||||
#
|
||||
# @INVARIANT: All Git operations must be performed on a valid local directory.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import httpx
|
||||
from git import Repo, RemoteProgress
|
||||
from fastapi import HTTPException
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from src.core.logger import logger, belief_scope
|
||||
from src.models.git import GitProvider
|
||||
|
||||
# [DEF:GitService:Class]
|
||||
# @PURPOSE: Wrapper for GitPython operations with semantic logging and error handling.
|
||||
class GitService:
|
||||
"""
|
||||
Wrapper for GitPython operations.
|
||||
"""
|
||||
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the GitService with a base path for repositories.
|
||||
# @PARAM: base_path (str) - Root directory for all Git clones.
|
||||
def __init__(self, base_path: str = "backend/git_repos"):
|
||||
with belief_scope("GitService.__init__"):
|
||||
self.base_path = base_path
|
||||
if not os.path.exists(self.base_path):
|
||||
os.makedirs(self.base_path)
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_get_repo_path:Function]
|
||||
# @PURPOSE: Resolves the local filesystem path for a dashboard's repository.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @RETURN: str
|
||||
def _get_repo_path(self, dashboard_id: int) -> str:
|
||||
return os.path.join(self.base_path, str(dashboard_id))
|
||||
# [/DEF:_get_repo_path:Function]
|
||||
|
||||
# [DEF:init_repo:Function]
|
||||
# @PURPOSE: Initialize or clone a repository for a dashboard.
|
||||
# @PARAM: dashboard_id (int)
|
||||
# @PARAM: remote_url (str)
|
||||
# @PARAM: pat (str) - Personal Access Token for authentication.
|
||||
# @RETURN: Repo - GitPython Repo object.
|
||||
def init_repo(self, dashboard_id: int, remote_url: str, pat: str) -> Repo:
|
||||
with belief_scope("GitService.init_repo"):
|
||||
repo_path = self._get_repo_path(dashboard_id)
|
||||
|
||||
# Inject PAT into remote URL if needed
|
||||
if pat and "://" in remote_url:
|
||||
proto, rest = remote_url.split("://", 1)
|
||||
auth_url = f"{proto}://oauth2:{pat}@{rest}"
|
||||
else:
|
||||
auth_url = remote_url
|
||||
|
||||
if os.path.exists(repo_path):
|
||||
logger.info(f"[init_repo][Action] Opening existing repo at {repo_path}")
|
||||
return Repo(repo_path)
|
||||
|
||||
logger.info(f"[init_repo][Action] Cloning {remote_url} to {repo_path}")
|
||||
return Repo.clone_from(auth_url, repo_path)
|
||||
# [/DEF:init_repo:Function]
|
||||
|
||||
# [DEF:get_repo:Function]
|
||||
# @PURPOSE: Get Repo object for a dashboard.
|
||||
# @PRE: Repository must exist on disk.
|
||||
# @RETURN: Repo
|
||||
def get_repo(self, dashboard_id: int) -> Repo:
|
||||
with belief_scope("GitService.get_repo"):
|
||||
repo_path = self._get_repo_path(dashboard_id)
|
||||
if not os.path.exists(repo_path):
|
||||
logger.error(f"[get_repo][Coherence:Failed] Repository for dashboard {dashboard_id} does not exist")
|
||||
raise HTTPException(status_code=404, detail=f"Repository for dashboard {dashboard_id} not found")
|
||||
try:
|
||||
return Repo(repo_path)
|
||||
except Exception as e:
|
||||
logger.error(f"[get_repo][Coherence:Failed] Failed to open repository at {repo_path}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to open local Git repository")
|
||||
# [/DEF:get_repo:Function]
|
||||
|
||||
# [DEF:list_branches:Function]
|
||||
# @PURPOSE: List all branches for a dashboard's repository.
|
||||
# @RETURN: List[dict]
|
||||
def list_branches(self, dashboard_id: int) -> List[dict]:
|
||||
with belief_scope("GitService.list_branches"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
logger.info(f"[list_branches][Action] Listing branches for {dashboard_id}. Refs: {repo.refs}")
|
||||
branches = []
|
||||
|
||||
# Add existing refs
|
||||
for ref in repo.refs:
|
||||
try:
|
||||
# Strip prefixes for UI
|
||||
name = ref.name.replace('refs/heads/', '').replace('refs/remotes/origin/', '')
|
||||
|
||||
# Avoid duplicates (e.g. local and remote with same name)
|
||||
if any(b['name'] == name for b in branches):
|
||||
continue
|
||||
|
||||
branches.append({
|
||||
"name": name,
|
||||
"commit_hash": ref.commit.hexsha if hasattr(ref, 'commit') else "0000000",
|
||||
"is_remote": ref.is_remote() if hasattr(ref, 'is_remote') else False,
|
||||
"last_updated": datetime.fromtimestamp(ref.commit.committed_date) if hasattr(ref, 'commit') else datetime.utcnow()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"[list_branches][Action] Skipping ref {ref}: {e}")
|
||||
|
||||
# Ensure the current active branch is in the list even if it has no commits or refs
|
||||
try:
|
||||
active_name = repo.active_branch.name
|
||||
if not any(b['name'] == active_name for b in branches):
|
||||
branches.append({
|
||||
"name": active_name,
|
||||
"commit_hash": "0000000",
|
||||
"is_remote": False,
|
||||
"last_updated": datetime.utcnow()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"[list_branches][Action] Could not determine active branch: {e}")
|
||||
# If everything else failed and list is still empty, add default
|
||||
if not branches:
|
||||
branches.append({
|
||||
"name": "main",
|
||||
"commit_hash": "0000000",
|
||||
"is_remote": False,
|
||||
"last_updated": datetime.utcnow()
|
||||
})
|
||||
|
||||
return branches
|
||||
# [/DEF:list_branches:Function]
|
||||
|
||||
# [DEF:create_branch:Function]
|
||||
# @PURPOSE: Create a new branch from an existing one.
|
||||
# @PARAM: name (str) - New branch name.
|
||||
# @PARAM: from_branch (str) - Source branch.
|
||||
def create_branch(self, dashboard_id: int, name: str, from_branch: str = "main"):
|
||||
with belief_scope("GitService.create_branch"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
logger.info(f"[create_branch][Action] Creating branch {name} from {from_branch}")
|
||||
|
||||
# Handle empty repository case (no commits)
|
||||
if not repo.heads and not repo.remotes:
|
||||
logger.warning(f"[create_branch][Action] Repository is empty. Creating initial commit to enable branching.")
|
||||
readme_path = os.path.join(repo.working_dir, "README.md")
|
||||
if not os.path.exists(readme_path):
|
||||
with open(readme_path, "w") as f:
|
||||
f.write(f"# Dashboard {dashboard_id}\nGit repository for Superset dashboard integration.")
|
||||
repo.index.add(["README.md"])
|
||||
repo.index.commit("Initial commit")
|
||||
|
||||
# Verify source branch exists
|
||||
try:
|
||||
repo.commit(from_branch)
|
||||
except:
|
||||
logger.warning(f"[create_branch][Action] Source branch {from_branch} not found, using HEAD")
|
||||
from_branch = repo.head
|
||||
|
||||
try:
|
||||
new_branch = repo.create_head(name, from_branch)
|
||||
return new_branch
|
||||
except Exception as e:
|
||||
logger.error(f"[create_branch][Coherence:Failed] {e}")
|
||||
raise
|
||||
# [/DEF:create_branch:Function]
|
||||
# [/DEF:create_branch:Function]
|
||||
|
||||
# [DEF:checkout_branch:Function]
|
||||
# @PURPOSE: Switch to a specific branch.
|
||||
def checkout_branch(self, dashboard_id: int, name: str):
|
||||
with belief_scope("GitService.checkout_branch"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
logger.info(f"[checkout_branch][Action] Checking out branch {name}")
|
||||
repo.git.checkout(name)
|
||||
# [/DEF:checkout_branch:Function]
|
||||
|
||||
# [DEF:commit_changes:Function]
|
||||
# @PURPOSE: Stage and commit changes.
|
||||
# @PARAM: message (str) - Commit message.
|
||||
# @PARAM: files (List[str]) - Optional list of specific files to stage.
|
||||
def commit_changes(self, dashboard_id: int, message: str, files: List[str] = None):
|
||||
with belief_scope("GitService.commit_changes"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
|
||||
# Check if there are any changes to commit
|
||||
if not repo.is_dirty(untracked_files=True) and not files:
|
||||
logger.info(f"[commit_changes][Action] No changes to commit for dashboard {dashboard_id}")
|
||||
return
|
||||
|
||||
if files:
|
||||
logger.info(f"[commit_changes][Action] Staging files: {files}")
|
||||
repo.index.add(files)
|
||||
else:
|
||||
logger.info("[commit_changes][Action] Staging all changes")
|
||||
repo.git.add(A=True)
|
||||
|
||||
repo.index.commit(message)
|
||||
logger.info(f"[commit_changes][Coherence:OK] Committed changes with message: {message}")
|
||||
# [/DEF:commit_changes:Function]
|
||||
|
||||
# [DEF:push_changes:Function]
|
||||
# @PURPOSE: Push local commits to remote.
|
||||
def push_changes(self, dashboard_id: int):
|
||||
with belief_scope("GitService.push_changes"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
|
||||
# Ensure we have something to push
|
||||
if not repo.heads:
|
||||
logger.warning(f"[push_changes][Coherence:Failed] No local branches to push for dashboard {dashboard_id}")
|
||||
return
|
||||
|
||||
try:
|
||||
origin = repo.remote(name='origin')
|
||||
except ValueError:
|
||||
logger.error(f"[push_changes][Coherence:Failed] Remote 'origin' not found for dashboard {dashboard_id}")
|
||||
raise HTTPException(status_code=400, detail="Remote 'origin' not configured")
|
||||
|
||||
# Check if current branch has an upstream
|
||||
try:
|
||||
current_branch = repo.active_branch
|
||||
logger.info(f"[push_changes][Action] Pushing branch {current_branch.name} to origin")
|
||||
# Using a timeout for network operations
|
||||
push_info = origin.push(refspec=f'{current_branch.name}:{current_branch.name}')
|
||||
for info in push_info:
|
||||
if info.flags & info.ERROR:
|
||||
logger.error(f"[push_changes][Coherence:Failed] Error pushing ref {info.remote_ref_string}: {info.summary}")
|
||||
raise Exception(f"Git push error for {info.remote_ref_string}: {info.summary}")
|
||||
except Exception as e:
|
||||
logger.error(f"[push_changes][Coherence:Failed] Failed to push changes: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Git push failed: {str(e)}")
|
||||
# [/DEF:push_changes:Function]
|
||||
|
||||
# [DEF:pull_changes:Function]
|
||||
# @PURPOSE: Pull changes from remote.
|
||||
def pull_changes(self, dashboard_id: int):
|
||||
with belief_scope("GitService.pull_changes"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
try:
|
||||
origin = repo.remote(name='origin')
|
||||
logger.info("[pull_changes][Action] Pulling changes from origin")
|
||||
fetch_info = origin.pull()
|
||||
for info in fetch_info:
|
||||
if info.flags & info.ERROR:
|
||||
logger.error(f"[pull_changes][Coherence:Failed] Error pulling ref {info.ref}: {info.note}")
|
||||
raise Exception(f"Git pull error for {info.ref}: {info.note}")
|
||||
except ValueError:
|
||||
logger.error(f"[pull_changes][Coherence:Failed] Remote 'origin' not found for dashboard {dashboard_id}")
|
||||
raise HTTPException(status_code=400, detail="Remote 'origin' not configured")
|
||||
except Exception as e:
|
||||
logger.error(f"[pull_changes][Coherence:Failed] Failed to pull changes: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Git pull failed: {str(e)}")
|
||||
# [/DEF:pull_changes:Function]
|
||||
|
||||
# [DEF:get_status:Function]
|
||||
# @PURPOSE: Get current repository status (dirty files, untracked, etc.)
|
||||
# @RETURN: dict
|
||||
def get_status(self, dashboard_id: int) -> dict:
|
||||
with belief_scope("GitService.get_status"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
|
||||
# Handle empty repository (no commits)
|
||||
has_commits = False
|
||||
try:
|
||||
repo.head.commit
|
||||
has_commits = True
|
||||
except (ValueError, Exception):
|
||||
has_commits = False
|
||||
|
||||
return {
|
||||
"is_dirty": repo.is_dirty(untracked_files=True),
|
||||
"untracked_files": repo.untracked_files,
|
||||
"modified_files": [item.a_path for item in repo.index.diff(None)],
|
||||
"staged_files": [item.a_path for item in repo.index.diff("HEAD")] if has_commits else [],
|
||||
"current_branch": repo.active_branch.name
|
||||
}
|
||||
# [/DEF:get_status:Function]
|
||||
|
||||
# [DEF:get_diff:Function]
|
||||
# @PURPOSE: Generate diff for a file or the whole repository.
|
||||
# @PARAM: file_path (str) - Optional specific file.
|
||||
# @PARAM: staged (bool) - Whether to show staged changes.
|
||||
# @RETURN: str
|
||||
def get_diff(self, dashboard_id: int, file_path: str = None, staged: bool = False) -> str:
|
||||
with belief_scope("GitService.get_diff"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
diff_args = []
|
||||
if staged:
|
||||
diff_args.append("--staged")
|
||||
|
||||
if file_path:
|
||||
return repo.git.diff(*diff_args, "--", file_path)
|
||||
return repo.git.diff(*diff_args)
|
||||
# [/DEF:get_diff:Function]
|
||||
|
||||
# [DEF:get_commit_history:Function]
|
||||
# @PURPOSE: Retrieve commit history for a repository.
|
||||
# @PARAM: limit (int) - Max number of commits to return.
|
||||
# @RETURN: List[dict]
|
||||
def get_commit_history(self, dashboard_id: int, limit: int = 50) -> List[dict]:
|
||||
with belief_scope("GitService.get_commit_history"):
|
||||
repo = self.get_repo(dashboard_id)
|
||||
commits = []
|
||||
try:
|
||||
# Check if there are any commits at all
|
||||
if not repo.heads and not repo.remotes:
|
||||
return []
|
||||
|
||||
for commit in repo.iter_commits(max_count=limit):
|
||||
commits.append({
|
||||
"hash": commit.hexsha,
|
||||
"author": commit.author.name,
|
||||
"email": commit.author.email,
|
||||
"timestamp": datetime.fromtimestamp(commit.committed_date),
|
||||
"message": commit.message.strip(),
|
||||
"files_changed": list(commit.stats.files.keys())
|
||||
})
|
||||
except Exception as e:
|
||||
logger.warning(f"[get_commit_history][Action] Could not retrieve commit history for dashboard {dashboard_id}: {e}")
|
||||
return []
|
||||
return commits
|
||||
# [/DEF:get_commit_history:Function]
|
||||
|
||||
# [DEF:test_connection:Function]
|
||||
# @PURPOSE: Test connection to Git provider using PAT.
|
||||
# @PARAM: provider (GitProvider)
|
||||
# @PARAM: url (str)
|
||||
# @PARAM: pat (str)
|
||||
# @RETURN: bool
|
||||
async def test_connection(self, provider: GitProvider, url: str, pat: str) -> bool:
|
||||
with belief_scope("GitService.test_connection"):
|
||||
# Check for offline mode or local-only URLs
|
||||
if ".local" in url or "localhost" in url:
|
||||
logger.info("[test_connection][Action] Local/Offline mode detected for URL")
|
||||
return True
|
||||
|
||||
if not url.startswith(('http://', 'https://')):
|
||||
logger.error(f"[test_connection][Coherence:Failed] Invalid URL protocol: {url}")
|
||||
return False
|
||||
|
||||
if not pat or not pat.strip():
|
||||
logger.error("[test_connection][Coherence:Failed] Git PAT is missing or empty")
|
||||
return False
|
||||
|
||||
pat = pat.strip()
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
if provider == GitProvider.GITHUB:
|
||||
headers = {"Authorization": f"token {pat}"}
|
||||
api_url = "https://api.github.com/user" if "github.com" in url else f"{url.rstrip('/')}/api/v3/user"
|
||||
resp = await client.get(api_url, headers=headers)
|
||||
elif provider == GitProvider.GITLAB:
|
||||
headers = {"PRIVATE-TOKEN": pat}
|
||||
api_url = f"{url.rstrip('/')}/api/v4/user"
|
||||
resp = await client.get(api_url, headers=headers)
|
||||
elif provider == GitProvider.GITEA:
|
||||
headers = {"Authorization": f"token {pat}"}
|
||||
api_url = f"{url.rstrip('/')}/api/v1/user"
|
||||
resp = await client.get(api_url, headers=headers)
|
||||
else:
|
||||
return False
|
||||
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"[test_connection][Coherence:Failed] Git connection test failed for {provider} at {api_url}. Status: {resp.status_code}")
|
||||
return resp.status_code == 200
|
||||
except Exception as e:
|
||||
logger.error(f"[test_connection][Coherence:Failed] Error testing git connection: {e}")
|
||||
return False
|
||||
# [/DEF:test_connection:Function]
|
||||
|
||||
# [/DEF:GitService:Class]
|
||||
# [/DEF:backend.src.services.git_service:Module]
|
||||
BIN
backend/tasks.db
BIN
backend/tasks.db
Binary file not shown.
Reference in New Issue
Block a user