feat: implement plugin architecture and application settings with Svelte UI
- Added plugin base and loader for backend extensibility - Implemented application settings management with config persistence - Created Svelte-based frontend with Dashboard and Settings pages - Added API routes for plugins, tasks, and settings - Updated documentation and specifications - Improved project structure and developer tools
This commit is contained in:
205
backend/src/core/config_manager.py
Executable file
205
backend/src/core/config_manager.py
Executable file
@@ -0,0 +1,205 @@
|
||||
# [DEF:ConfigManagerModule:Module]
|
||||
#
|
||||
# @SEMANTICS: config, manager, persistence, json
|
||||
# @PURPOSE: Manages application configuration, including loading/saving to JSON and CRUD for environments.
|
||||
# @LAYER: Core
|
||||
# @RELATION: DEPENDS_ON -> ConfigModels
|
||||
# @RELATION: CALLS -> logger
|
||||
# @RELATION: WRITES_TO -> config.json
|
||||
#
|
||||
# @INVARIANT: Configuration must always be valid according to AppConfig model.
|
||||
# @PUBLIC_API: ConfigManager
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional, List
|
||||
from .config_models import AppConfig, Environment, GlobalSettings
|
||||
from .logger import logger
|
||||
# [/SECTION]
|
||||
|
||||
# [DEF:ConfigManager:Class]
|
||||
# @PURPOSE: A class to handle application configuration persistence and management.
|
||||
# @RELATION: WRITES_TO -> config.json
|
||||
class ConfigManager:
|
||||
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the ConfigManager.
|
||||
# @PRE: isinstance(config_path, str) and len(config_path) > 0
|
||||
# @POST: self.config is an instance of AppConfig
|
||||
# @PARAM: config_path (str) - Path to the configuration file.
|
||||
def __init__(self, config_path: str = "config.json"):
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||
|
||||
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config_path = Path(config_path)
|
||||
self.config: AppConfig = self._load_config()
|
||||
|
||||
# 3. Runtime check of @POST
|
||||
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||
|
||||
logger.info(f"[ConfigManager][Exit] Initialized")
|
||||
# [/DEF:__init__]
|
||||
|
||||
# [DEF:_load_config:Function]
|
||||
# @PURPOSE: Loads the configuration from disk or creates a default one.
|
||||
# @POST: isinstance(return, AppConfig)
|
||||
# @RETURN: AppConfig - The loaded or default configuration.
|
||||
def _load_config(self) -> AppConfig:
|
||||
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
|
||||
|
||||
if not self.config_path.exists():
|
||||
logger.info(f"[_load_config][Action] Config file not found. Creating default.")
|
||||
default_config = AppConfig(
|
||||
environments=[],
|
||||
settings=GlobalSettings(backup_path="backups")
|
||||
)
|
||||
self._save_config_to_disk(default_config)
|
||||
return default_config
|
||||
|
||||
try:
|
||||
with open(self.config_path, "r") as f:
|
||||
data = json.load(f)
|
||||
config = AppConfig(**data)
|
||||
logger.info(f"[_load_config][Coherence:OK] Configuration loaded")
|
||||
return config
|
||||
except Exception as e:
|
||||
logger.error(f"[_load_config][Coherence:Failed] Error loading config: {e}")
|
||||
return AppConfig(
|
||||
environments=[],
|
||||
settings=GlobalSettings(backup_path="backups")
|
||||
)
|
||||
# [/DEF:_load_config]
|
||||
|
||||
# [DEF:_save_config_to_disk:Function]
|
||||
# @PURPOSE: Saves the provided configuration object to disk.
|
||||
# @PRE: isinstance(config, AppConfig)
|
||||
# @PARAM: config (AppConfig) - The configuration to save.
|
||||
def _save_config_to_disk(self, config: AppConfig):
|
||||
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config, AppConfig), "config must be an instance of AppConfig"
|
||||
|
||||
# 2. Logic implementation
|
||||
try:
|
||||
with open(self.config_path, "w") as f:
|
||||
json.dump(config.dict(), f, indent=4)
|
||||
logger.info(f"[_save_config_to_disk][Action] Configuration saved")
|
||||
except Exception as e:
|
||||
logger.error(f"[_save_config_to_disk][Coherence:Failed] Failed to save: {e}")
|
||||
# [/DEF:_save_config_to_disk]
|
||||
|
||||
# [DEF:save:Function]
|
||||
# @PURPOSE: Saves the current configuration state to disk.
|
||||
def save(self):
|
||||
self._save_config_to_disk(self.config)
|
||||
# [/DEF:save]
|
||||
|
||||
# [DEF:get_config:Function]
|
||||
# @PURPOSE: Returns the current configuration.
|
||||
# @RETURN: AppConfig - The current configuration.
|
||||
def get_config(self) -> AppConfig:
|
||||
return self.config
|
||||
# [/DEF:get_config]
|
||||
|
||||
# [DEF:update_global_settings:Function]
|
||||
# @PURPOSE: Updates the global settings and persists the change.
|
||||
# @PRE: isinstance(settings, GlobalSettings)
|
||||
# @PARAM: settings (GlobalSettings) - The new global settings.
|
||||
def update_global_settings(self, settings: GlobalSettings):
|
||||
logger.info(f"[update_global_settings][Entry] Updating settings")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config.settings = settings
|
||||
self.save()
|
||||
|
||||
logger.info(f"[update_global_settings][Exit] Settings updated")
|
||||
# [/DEF:update_global_settings]
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: Returns the list of configured environments.
|
||||
# @RETURN: List[Environment] - List of environments.
|
||||
def get_environments(self) -> List[Environment]:
|
||||
return self.config.environments
|
||||
# [/DEF:get_environments]
|
||||
|
||||
# [DEF:add_environment:Function]
|
||||
# @PURPOSE: Adds a new environment to the configuration.
|
||||
# @PRE: isinstance(env, Environment)
|
||||
# @PARAM: env (Environment) - The environment to add.
|
||||
def add_environment(self, env: Environment):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(env, Environment), "env must be an instance of Environment"
|
||||
|
||||
# 2. Logic implementation
|
||||
# Check for duplicate ID and remove if exists
|
||||
self.config.environments = [e for e in self.config.environments if e.id != env.id]
|
||||
self.config.environments.append(env)
|
||||
self.save()
|
||||
|
||||
logger.info(f"[add_environment][Exit] Environment added")
|
||||
# [/DEF:add_environment]
|
||||
|
||||
# [DEF:update_environment:Function]
|
||||
# @PURPOSE: Updates an existing environment.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0 and isinstance(updated_env, Environment)
|
||||
# @PARAM: env_id (str) - The ID of the environment to update.
|
||||
# @PARAM: updated_env (Environment) - The updated environment data.
|
||||
# @RETURN: bool - True if updated, False otherwise.
|
||||
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
|
||||
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
assert isinstance(updated_env, Environment), "updated_env must be an instance of Environment"
|
||||
|
||||
# 2. Logic implementation
|
||||
for i, env in enumerate(self.config.environments):
|
||||
if env.id == env_id:
|
||||
# If password is masked, keep the old one
|
||||
if updated_env.password == "********":
|
||||
updated_env.password = env.password
|
||||
|
||||
self.config.environments[i] = updated_env
|
||||
self.save()
|
||||
logger.info(f"[update_environment][Coherence:OK] Updated {env_id}")
|
||||
return True
|
||||
|
||||
logger.warning(f"[update_environment][Coherence:Failed] Environment {env_id} not found")
|
||||
return False
|
||||
# [/DEF:update_environment]
|
||||
|
||||
# [DEF:delete_environment:Function]
|
||||
# @PURPOSE: Deletes an environment by ID.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0
|
||||
# @PARAM: env_id (str) - The ID of the environment to delete.
|
||||
def delete_environment(self, env_id: str):
|
||||
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
|
||||
# 2. Logic implementation
|
||||
original_count = len(self.config.environments)
|
||||
self.config.environments = [e for e in self.config.environments if e.id != env_id]
|
||||
|
||||
if len(self.config.environments) < original_count:
|
||||
self.save()
|
||||
logger.info(f"[delete_environment][Action] Deleted {env_id}")
|
||||
else:
|
||||
logger.warning(f"[delete_environment][Coherence:Failed] Environment {env_id} not found")
|
||||
# [/DEF:delete_environment]
|
||||
|
||||
# [/DEF:ConfigManager]
|
||||
|
||||
# [/DEF:ConfigManagerModule]
|
||||
36
backend/src/core/config_models.py
Executable file
36
backend/src/core/config_models.py
Executable file
@@ -0,0 +1,36 @@
|
||||
# [DEF:ConfigModels:Module]
|
||||
# @SEMANTICS: config, models, pydantic
|
||||
# @PURPOSE: Defines the data models for application configuration using Pydantic.
|
||||
# @LAYER: Core
|
||||
# @RELATION: READS_FROM -> config.json
|
||||
# @RELATION: USED_BY -> ConfigManager
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List, Optional
|
||||
|
||||
# [DEF:Environment:DataClass]
|
||||
# @PURPOSE: Represents a Superset environment configuration.
|
||||
class Environment(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
url: str
|
||||
username: str
|
||||
password: str # Will be masked in UI
|
||||
is_default: bool = False
|
||||
# [/DEF:Environment]
|
||||
|
||||
# [DEF:GlobalSettings:DataClass]
|
||||
# @PURPOSE: Represents global application settings.
|
||||
class GlobalSettings(BaseModel):
|
||||
backup_path: str
|
||||
default_environment_id: Optional[str] = None
|
||||
# [/DEF:GlobalSettings]
|
||||
|
||||
# [DEF:AppConfig:DataClass]
|
||||
# @PURPOSE: The root configuration model containing all application settings.
|
||||
class AppConfig(BaseModel):
|
||||
environments: List[Environment] = []
|
||||
settings: GlobalSettings
|
||||
# [/DEF:AppConfig]
|
||||
|
||||
# [/DEF:ConfigModels]
|
||||
182
backend/src/core/logger.py
Normal file → Executable file
182
backend/src/core/logger.py
Normal file → Executable file
@@ -1,92 +1,92 @@
|
||||
# [DEF:LoggerModule:Module]
|
||||
# @SEMANTICS: logging, websocket, streaming, handler
|
||||
# @PURPOSE: Configures the application's logging system, including a custom handler for buffering logs and streaming them over WebSockets.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by the main application and other modules to log events. The WebSocketLogHandler is used by the WebSocket endpoint in app.py.
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from collections import deque
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Re-using LogEntry from task_manager for consistency
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry. This is a re-definition for consistency, as it's also defined in task_manager.py.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:WebSocketLogHandler:Class]
|
||||
# @SEMANTICS: logging, handler, websocket, buffer
|
||||
# @PURPOSE: A custom logging handler that captures log records into a buffer. It is designed to be extended for real-time log streaming over WebSockets.
|
||||
class WebSocketLogHandler(logging.Handler):
|
||||
"""
|
||||
A logging handler that stores log records and can be extended to send them
|
||||
over WebSockets.
|
||||
"""
|
||||
def __init__(self, capacity: int = 1000):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
# In a real implementation, you'd have a way to manage active WebSocket connections
|
||||
# e.g., self.active_connections: Set[WebSocket] = set()
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def get_recent_logs(self) -> List[LogEntry]:
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:Logger:Global]
|
||||
# @SEMANTICS: logger, global, instance
|
||||
# @PURPOSE: The global logger instance for the application, configured with both a console handler and the custom WebSocket handler.
|
||||
logger = logging.getLogger("superset_tools_app")
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Create a formatter
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
|
||||
)
|
||||
|
||||
# Add console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# Add WebSocket log handler
|
||||
websocket_log_handler = WebSocketLogHandler()
|
||||
websocket_log_handler.setFormatter(formatter)
|
||||
logger.addHandler(websocket_log_handler)
|
||||
|
||||
# Example usage:
|
||||
# logger.info("Application started", extra={"context_key": "context_value"})
|
||||
# logger.error("An error occurred", exc_info=True)
|
||||
# [DEF:LoggerModule:Module]
|
||||
# @SEMANTICS: logging, websocket, streaming, handler
|
||||
# @PURPOSE: Configures the application's logging system, including a custom handler for buffering logs and streaming them over WebSockets.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by the main application and other modules to log events. The WebSocketLogHandler is used by the WebSocket endpoint in app.py.
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional
|
||||
from collections import deque
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Re-using LogEntry from task_manager for consistency
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry. This is a re-definition for consistency, as it's also defined in task_manager.py.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:WebSocketLogHandler:Class]
|
||||
# @SEMANTICS: logging, handler, websocket, buffer
|
||||
# @PURPOSE: A custom logging handler that captures log records into a buffer. It is designed to be extended for real-time log streaming over WebSockets.
|
||||
class WebSocketLogHandler(logging.Handler):
|
||||
"""
|
||||
A logging handler that stores log records and can be extended to send them
|
||||
over WebSockets.
|
||||
"""
|
||||
def __init__(self, capacity: int = 1000):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
# In a real implementation, you'd have a way to manage active WebSocket connections
|
||||
# e.g., self.active_connections: Set[WebSocket] = set()
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def get_recent_logs(self) -> List[LogEntry]:
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:Logger:Global]
|
||||
# @SEMANTICS: logger, global, instance
|
||||
# @PURPOSE: The global logger instance for the application, configured with both a console handler and the custom WebSocket handler.
|
||||
logger = logging.getLogger("superset_tools_app")
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Create a formatter
|
||||
formatter = logging.Formatter(
|
||||
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
|
||||
)
|
||||
|
||||
# Add console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setFormatter(formatter)
|
||||
logger.addHandler(console_handler)
|
||||
|
||||
# Add WebSocket log handler
|
||||
websocket_log_handler = WebSocketLogHandler()
|
||||
websocket_log_handler.setFormatter(formatter)
|
||||
logger.addHandler(websocket_log_handler)
|
||||
|
||||
# Example usage:
|
||||
# logger.info("Application started", extra={"context_key": "context_value"})
|
||||
# logger.error("An error occurred", exc_info=True)
|
||||
# [/DEF]
|
||||
140
backend/src/core/plugin_base.py
Normal file → Executable file
140
backend/src/core/plugin_base.py
Normal file → Executable file
@@ -1,71 +1,71 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# [DEF:PluginBase:Class]
|
||||
# @SEMANTICS: plugin, interface, base, abstract
|
||||
# @PURPOSE: Defines the abstract base class that all plugins must implement to be recognized by the system. It enforces a common structure for plugin metadata and execution.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by PluginLoader to identify valid plugins.
|
||||
# @INVARIANT: All plugins MUST inherit from this class.
|
||||
class PluginBase(ABC):
|
||||
"""
|
||||
Base class for all plugins.
|
||||
Plugins must inherit from this class and implement the abstract methods.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def id(self) -> str:
|
||||
"""A unique identifier for the plugin."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def name(self) -> str:
|
||||
"""A human-readable name for the plugin."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def description(self) -> str:
|
||||
"""A brief description of what the plugin does."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def version(self) -> str:
|
||||
"""The version of the plugin."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the JSON schema for the plugin's input parameters.
|
||||
This schema will be used to generate the frontend form.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
"""
|
||||
Executes the plugin's logic.
|
||||
The `params` argument will be validated against the schema returned by `get_schema()`.
|
||||
"""
|
||||
pass
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:PluginConfig:Class]
|
||||
# @SEMANTICS: plugin, config, schema, pydantic
|
||||
# @PURPOSE: A Pydantic model used to represent the validated configuration and metadata of a loaded plugin. This object is what gets exposed to the API layer.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Instantiated by PluginLoader after validating a PluginBase instance.
|
||||
class PluginConfig(BaseModel):
|
||||
"""Pydantic model for plugin configuration."""
|
||||
id: str = Field(..., description="Unique identifier for the plugin")
|
||||
name: str = Field(..., description="Human-readable name for the plugin")
|
||||
description: str = Field(..., description="Brief description of what the plugin does")
|
||||
version: str = Field(..., description="Version of the plugin")
|
||||
input_schema: Dict[str, Any] = Field(..., description="JSON schema for input parameters", alias="schema")
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# [DEF:PluginBase:Class]
|
||||
# @SEMANTICS: plugin, interface, base, abstract
|
||||
# @PURPOSE: Defines the abstract base class that all plugins must implement to be recognized by the system. It enforces a common structure for plugin metadata and execution.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Used by PluginLoader to identify valid plugins.
|
||||
# @INVARIANT: All plugins MUST inherit from this class.
|
||||
class PluginBase(ABC):
|
||||
"""
|
||||
Base class for all plugins.
|
||||
Plugins must inherit from this class and implement the abstract methods.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def id(self) -> str:
|
||||
"""A unique identifier for the plugin."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def name(self) -> str:
|
||||
"""A human-readable name for the plugin."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def description(self) -> str:
|
||||
"""A brief description of what the plugin does."""
|
||||
pass
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def version(self) -> str:
|
||||
"""The version of the plugin."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the JSON schema for the plugin's input parameters.
|
||||
This schema will be used to generate the frontend form.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
"""
|
||||
Executes the plugin's logic.
|
||||
The `params` argument will be validated against the schema returned by `get_schema()`.
|
||||
"""
|
||||
pass
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:PluginConfig:Class]
|
||||
# @SEMANTICS: plugin, config, schema, pydantic
|
||||
# @PURPOSE: A Pydantic model used to represent the validated configuration and metadata of a loaded plugin. This object is what gets exposed to the API layer.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Instantiated by PluginLoader after validating a PluginBase instance.
|
||||
class PluginConfig(BaseModel):
|
||||
"""Pydantic model for plugin configuration."""
|
||||
id: str = Field(..., description="Unique identifier for the plugin")
|
||||
name: str = Field(..., description="Human-readable name for the plugin")
|
||||
description: str = Field(..., description="Brief description of what the plugin does")
|
||||
version: str = Field(..., description="Version of the plugin")
|
||||
input_schema: Dict[str, Any] = Field(..., description="JSON schema for input parameters", alias="schema")
|
||||
# [/DEF]
|
||||
251
backend/src/core/plugin_loader.py
Normal file → Executable file
251
backend/src/core/plugin_loader.py
Normal file → Executable file
@@ -1,123 +1,130 @@
|
||||
import importlib.util
|
||||
import os
|
||||
import sys # Added this line
|
||||
from typing import Dict, Type, List, Optional
|
||||
from .plugin_base import PluginBase, PluginConfig
|
||||
from jsonschema import validate
|
||||
|
||||
# [DEF:PluginLoader:Class]
|
||||
# @SEMANTICS: plugin, loader, dynamic, import
|
||||
# @PURPOSE: Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginBase. It is used by the main application to discover and manage available plugins.
|
||||
class PluginLoader:
|
||||
"""
|
||||
Scans a directory for Python modules, loads them, and identifies classes
|
||||
that inherit from PluginBase.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_dir: str):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
|
||||
def _load_plugins(self):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
if not os.path.exists(self.plugin_dir):
|
||||
os.makedirs(self.plugin_dir)
|
||||
|
||||
# Add the plugin directory's parent to sys.path to enable relative imports within plugins
|
||||
# This assumes plugin_dir is something like 'backend/src/plugins'
|
||||
# and we want 'backend/src' to be on the path for 'from ..core...' imports
|
||||
plugin_parent_dir = os.path.abspath(os.path.join(self.plugin_dir, os.pardir))
|
||||
if plugin_parent_dir not in sys.path:
|
||||
sys.path.insert(0, plugin_parent_dir)
|
||||
|
||||
for filename in os.listdir(self.plugin_dir):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
module_name = filename[:-3]
|
||||
file_path = os.path.join(self.plugin_dir, filename)
|
||||
self._load_module(module_name, file_path)
|
||||
|
||||
def _load_module(self, module_name: str, file_path: str):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
package_name = f"src.plugins.{module_name}"
|
||||
spec = importlib.util.spec_from_file_location(package_name, file_path)
|
||||
if spec is None or spec.loader is None:
|
||||
print(f"Could not load module spec for {package_name}") # Replace with proper logging
|
||||
return
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
except Exception as e:
|
||||
print(f"Error loading plugin module {module_name}: {e}") # Replace with proper logging
|
||||
return
|
||||
|
||||
for attribute_name in dir(module):
|
||||
attribute = getattr(module, attribute_name)
|
||||
if (
|
||||
isinstance(attribute, type)
|
||||
and issubclass(attribute, PluginBase)
|
||||
and attribute is not PluginBase
|
||||
):
|
||||
try:
|
||||
plugin_instance = attribute()
|
||||
self._register_plugin(plugin_instance)
|
||||
except Exception as e:
|
||||
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
|
||||
|
||||
def _register_plugin(self, plugin_instance: PluginBase):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
plugin_id = plugin_instance.id
|
||||
if plugin_id in self._plugins:
|
||||
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
|
||||
return
|
||||
|
||||
try:
|
||||
schema = plugin_instance.get_schema()
|
||||
# Basic validation to ensure it's a dictionary
|
||||
if not isinstance(schema, dict):
|
||||
raise TypeError("get_schema() must return a dictionary.")
|
||||
|
||||
plugin_config = PluginConfig(
|
||||
id=plugin_instance.id,
|
||||
name=plugin_instance.name,
|
||||
description=plugin_instance.description,
|
||||
version=plugin_instance.version,
|
||||
schema=schema,
|
||||
)
|
||||
# The following line is commented out because it requires a schema to be passed to validate against.
|
||||
# The schema provided by the plugin is the one being validated, not the data.
|
||||
# validate(instance={}, schema=schema)
|
||||
self._plugins[plugin_id] = plugin_instance
|
||||
self._plugin_configs[plugin_id] = plugin_config
|
||||
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging
|
||||
except Exception as e:
|
||||
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging
|
||||
|
||||
|
||||
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
return self._plugins.get(plugin_id)
|
||||
|
||||
def get_all_plugin_configs(self) -> List[PluginConfig]:
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
return list(self._plugin_configs.values())
|
||||
|
||||
def has_plugin(self, plugin_id: str) -> bool:
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
import importlib.util
|
||||
import os
|
||||
import sys # Added this line
|
||||
from typing import Dict, Type, List, Optional
|
||||
from .plugin_base import PluginBase, PluginConfig
|
||||
from jsonschema import validate
|
||||
|
||||
# [DEF:PluginLoader:Class]
|
||||
# @SEMANTICS: plugin, loader, dynamic, import
|
||||
# @PURPOSE: Scans a specified directory for Python modules, dynamically loads them, and registers any classes that are valid implementations of the PluginBase interface.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginBase. It is used by the main application to discover and manage available plugins.
|
||||
class PluginLoader:
|
||||
"""
|
||||
Scans a directory for Python modules, loads them, and identifies classes
|
||||
that inherit from PluginBase.
|
||||
"""
|
||||
|
||||
def __init__(self, plugin_dir: str):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
|
||||
def _load_plugins(self):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
if not os.path.exists(self.plugin_dir):
|
||||
os.makedirs(self.plugin_dir)
|
||||
|
||||
# Add the plugin directory's parent to sys.path to enable relative imports within plugins
|
||||
# This assumes plugin_dir is something like 'backend/src/plugins'
|
||||
# and we want 'backend/src' to be on the path for 'from ..core...' imports
|
||||
plugin_parent_dir = os.path.abspath(os.path.join(self.plugin_dir, os.pardir))
|
||||
if plugin_parent_dir not in sys.path:
|
||||
sys.path.insert(0, plugin_parent_dir)
|
||||
|
||||
for filename in os.listdir(self.plugin_dir):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
module_name = filename[:-3]
|
||||
file_path = os.path.join(self.plugin_dir, filename)
|
||||
self._load_module(module_name, file_path)
|
||||
|
||||
def _load_module(self, module_name: str, file_path: str):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
# Try to determine the correct package prefix based on how the app is running
|
||||
if "backend.src" in __name__:
|
||||
package_prefix = "backend.src.plugins"
|
||||
else:
|
||||
package_prefix = "src.plugins"
|
||||
|
||||
package_name = f"{package_prefix}.{module_name}"
|
||||
# print(f"DEBUG: Loading plugin {module_name} as {package_name}")
|
||||
spec = importlib.util.spec_from_file_location(package_name, file_path)
|
||||
if spec is None or spec.loader is None:
|
||||
print(f"Could not load module spec for {package_name}") # Replace with proper logging
|
||||
return
|
||||
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
try:
|
||||
spec.loader.exec_module(module)
|
||||
except Exception as e:
|
||||
print(f"Error loading plugin module {module_name}: {e}") # Replace with proper logging
|
||||
return
|
||||
|
||||
for attribute_name in dir(module):
|
||||
attribute = getattr(module, attribute_name)
|
||||
if (
|
||||
isinstance(attribute, type)
|
||||
and issubclass(attribute, PluginBase)
|
||||
and attribute is not PluginBase
|
||||
):
|
||||
try:
|
||||
plugin_instance = attribute()
|
||||
self._register_plugin(plugin_instance)
|
||||
except Exception as e:
|
||||
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
|
||||
|
||||
def _register_plugin(self, plugin_instance: PluginBase):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
plugin_id = plugin_instance.id
|
||||
if plugin_id in self._plugins:
|
||||
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
|
||||
return
|
||||
|
||||
try:
|
||||
schema = plugin_instance.get_schema()
|
||||
# Basic validation to ensure it's a dictionary
|
||||
if not isinstance(schema, dict):
|
||||
raise TypeError("get_schema() must return a dictionary.")
|
||||
|
||||
plugin_config = PluginConfig(
|
||||
id=plugin_instance.id,
|
||||
name=plugin_instance.name,
|
||||
description=plugin_instance.description,
|
||||
version=plugin_instance.version,
|
||||
schema=schema,
|
||||
)
|
||||
# The following line is commented out because it requires a schema to be passed to validate against.
|
||||
# The schema provided by the plugin is the one being validated, not the data.
|
||||
# validate(instance={}, schema=schema)
|
||||
self._plugins[plugin_id] = plugin_instance
|
||||
self._plugin_configs[plugin_id] = plugin_config
|
||||
print(f"Plugin '{plugin_instance.name}' (ID: {plugin_id}) loaded successfully.") # Replace with proper logging
|
||||
except Exception as e:
|
||||
print(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}") # Replace with proper logging
|
||||
|
||||
|
||||
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
return self._plugins.get(plugin_id)
|
||||
|
||||
def get_all_plugin_configs(self) -> List[PluginConfig]:
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
return list(self._plugin_configs.values())
|
||||
|
||||
def has_plugin(self, plugin_id: str) -> bool:
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
return plugin_id in self._plugins
|
||||
262
backend/src/core/task_manager.py
Normal file → Executable file
262
backend/src/core/task_manager.py
Normal file → Executable file
@@ -1,131 +1,131 @@
|
||||
# [DEF:TaskManagerModule:Module]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
|
||||
import asyncio
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, List, Optional
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Assuming PluginBase and PluginConfig are defined in plugin_base.py
|
||||
# from .plugin_base import PluginBase, PluginConfig # Not needed here, TaskManager interacts with the PluginLoader
|
||||
|
||||
# [DEF:TaskStatus:Enum]
|
||||
# @SEMANTICS: task, status, state, enum
|
||||
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
|
||||
class TaskStatus(str, Enum):
|
||||
PENDING = "PENDING"
|
||||
RUNNING = "RUNNING"
|
||||
SUCCESS = "SUCCESS"
|
||||
FAILED = "FAILED"
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:Task:Class]
|
||||
# @SEMANTICS: task, job, execution, state, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
|
||||
class Task(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
plugin_id: str
|
||||
status: TaskStatus = TaskStatus.PENDING
|
||||
started_at: Optional[datetime] = None
|
||||
finished_at: Optional[datetime] = None
|
||||
user_id: Optional[str] = None
|
||||
logs: List[LogEntry] = Field(default_factory=list)
|
||||
params: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:TaskManager:Class]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
class TaskManager:
|
||||
"""
|
||||
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
"""
|
||||
def __init__(self, plugin_loader):
|
||||
self.plugin_loader = plugin_loader
|
||||
self.tasks: Dict[str, Task] = {}
|
||||
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
|
||||
self.loop = asyncio.get_event_loop()
|
||||
# [/DEF]
|
||||
|
||||
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
|
||||
"""
|
||||
Creates and queues a new task for execution.
|
||||
"""
|
||||
if not self.plugin_loader.has_plugin(plugin_id):
|
||||
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
|
||||
|
||||
plugin = self.plugin_loader.get_plugin(plugin_id)
|
||||
# Validate params against plugin schema (this will be done at a higher level, e.g., API route)
|
||||
# For now, a basic check
|
||||
if not isinstance(params, dict):
|
||||
raise ValueError("Task parameters must be a dictionary.")
|
||||
|
||||
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
|
||||
self.tasks[task.id] = task
|
||||
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
|
||||
return task
|
||||
|
||||
async def _run_task(self, task_id: str):
|
||||
"""
|
||||
Internal method to execute a task.
|
||||
"""
|
||||
task = self.tasks[task_id]
|
||||
plugin = self.plugin_loader.get_plugin(task.plugin_id)
|
||||
|
||||
task.status = TaskStatus.RUNNING
|
||||
task.started_at = datetime.utcnow()
|
||||
task.logs.append(LogEntry(level="INFO", message=f"Task started for plugin '{plugin.name}'"))
|
||||
|
||||
try:
|
||||
# Execute plugin in a separate thread to avoid blocking the event loop
|
||||
# if the plugin's execute method is synchronous and potentially CPU-bound.
|
||||
# If the plugin's execute method is already async, this can be simplified.
|
||||
await self.loop.run_in_executor(
|
||||
self.executor,
|
||||
lambda: asyncio.run(plugin.execute(task.params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(task.params)
|
||||
)
|
||||
task.status = TaskStatus.SUCCESS
|
||||
task.logs.append(LogEntry(level="INFO", message=f"Task completed successfully for plugin '{plugin.name}'"))
|
||||
except Exception as e:
|
||||
task.status = TaskStatus.FAILED
|
||||
task.logs.append(LogEntry(level="ERROR", message=f"Task failed: {e}", context={"error_type": type(e).__name__}))
|
||||
finally:
|
||||
task.finished_at = datetime.utcnow()
|
||||
# In a real system, you might notify clients via WebSocket here
|
||||
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Retrieves a task by its ID.
|
||||
"""
|
||||
return self.tasks.get(task_id)
|
||||
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
"""
|
||||
Retrieves all registered tasks.
|
||||
"""
|
||||
return list(self.tasks.values())
|
||||
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
"""
|
||||
Retrieves logs for a specific task.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
# [DEF:TaskManagerModule:Module]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking. It uses a thread pool to run plugins asynchronously.
|
||||
# @LAYER: Core
|
||||
# @RELATION: Depends on PluginLoader to get plugin instances. It is used by the API layer to create and query tasks.
|
||||
import asyncio
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Any, List, Optional
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Assuming PluginBase and PluginConfig are defined in plugin_base.py
|
||||
# from .plugin_base import PluginBase, PluginConfig # Not needed here, TaskManager interacts with the PluginLoader
|
||||
|
||||
# [DEF:TaskStatus:Enum]
|
||||
# @SEMANTICS: task, status, state, enum
|
||||
# @PURPOSE: Defines the possible states a task can be in during its lifecycle.
|
||||
class TaskStatus(str, Enum):
|
||||
PENDING = "PENDING"
|
||||
RUNNING = "RUNNING"
|
||||
SUCCESS = "SUCCESS"
|
||||
FAILED = "FAILED"
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:LogEntry:Class]
|
||||
# @SEMANTICS: log, entry, record, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single, structured log entry associated with a task.
|
||||
class LogEntry(BaseModel):
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
level: str
|
||||
message: str
|
||||
context: Optional[Dict[str, Any]] = None
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:Task:Class]
|
||||
# @SEMANTICS: task, job, execution, state, pydantic
|
||||
# @PURPOSE: A Pydantic model representing a single execution instance of a plugin, including its status, parameters, and logs.
|
||||
class Task(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||
plugin_id: str
|
||||
status: TaskStatus = TaskStatus.PENDING
|
||||
started_at: Optional[datetime] = None
|
||||
finished_at: Optional[datetime] = None
|
||||
user_id: Optional[str] = None
|
||||
logs: List[LogEntry] = Field(default_factory=list)
|
||||
params: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
# [/DEF]
|
||||
|
||||
# [DEF:TaskManager:Class]
|
||||
# @SEMANTICS: task, manager, lifecycle, execution, state
|
||||
# @PURPOSE: Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
class TaskManager:
|
||||
"""
|
||||
Manages the lifecycle of tasks, including their creation, execution, and state tracking.
|
||||
"""
|
||||
def __init__(self, plugin_loader):
|
||||
self.plugin_loader = plugin_loader
|
||||
self.tasks: Dict[str, Task] = {}
|
||||
self.executor = ThreadPoolExecutor(max_workers=5) # For CPU-bound plugin execution
|
||||
self.loop = asyncio.get_event_loop()
|
||||
# [/DEF]
|
||||
|
||||
async def create_task(self, plugin_id: str, params: Dict[str, Any], user_id: Optional[str] = None) -> Task:
|
||||
"""
|
||||
Creates and queues a new task for execution.
|
||||
"""
|
||||
if not self.plugin_loader.has_plugin(plugin_id):
|
||||
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
|
||||
|
||||
plugin = self.plugin_loader.get_plugin(plugin_id)
|
||||
# Validate params against plugin schema (this will be done at a higher level, e.g., API route)
|
||||
# For now, a basic check
|
||||
if not isinstance(params, dict):
|
||||
raise ValueError("Task parameters must be a dictionary.")
|
||||
|
||||
task = Task(plugin_id=plugin_id, params=params, user_id=user_id)
|
||||
self.tasks[task.id] = task
|
||||
self.loop.create_task(self._run_task(task.id)) # Schedule task for execution
|
||||
return task
|
||||
|
||||
async def _run_task(self, task_id: str):
|
||||
"""
|
||||
Internal method to execute a task.
|
||||
"""
|
||||
task = self.tasks[task_id]
|
||||
plugin = self.plugin_loader.get_plugin(task.plugin_id)
|
||||
|
||||
task.status = TaskStatus.RUNNING
|
||||
task.started_at = datetime.utcnow()
|
||||
task.logs.append(LogEntry(level="INFO", message=f"Task started for plugin '{plugin.name}'"))
|
||||
|
||||
try:
|
||||
# Execute plugin in a separate thread to avoid blocking the event loop
|
||||
# if the plugin's execute method is synchronous and potentially CPU-bound.
|
||||
# If the plugin's execute method is already async, this can be simplified.
|
||||
await self.loop.run_in_executor(
|
||||
self.executor,
|
||||
lambda: asyncio.run(plugin.execute(task.params)) if asyncio.iscoroutinefunction(plugin.execute) else plugin.execute(task.params)
|
||||
)
|
||||
task.status = TaskStatus.SUCCESS
|
||||
task.logs.append(LogEntry(level="INFO", message=f"Task completed successfully for plugin '{plugin.name}'"))
|
||||
except Exception as e:
|
||||
task.status = TaskStatus.FAILED
|
||||
task.logs.append(LogEntry(level="ERROR", message=f"Task failed: {e}", context={"error_type": type(e).__name__}))
|
||||
finally:
|
||||
task.finished_at = datetime.utcnow()
|
||||
# In a real system, you might notify clients via WebSocket here
|
||||
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
"""
|
||||
Retrieves a task by its ID.
|
||||
"""
|
||||
return self.tasks.get(task_id)
|
||||
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
"""
|
||||
Retrieves all registered tasks.
|
||||
"""
|
||||
return list(self.tasks.values())
|
||||
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
"""
|
||||
Retrieves logs for a specific task.
|
||||
"""
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
|
||||
Reference in New Issue
Block a user