refactor: move manager classes from core to managers module

* Move ConfigManager, SettingsManager, and FileManager from setup/core to setup/managers with new init.py for cleaner organization.
* Update SettingsManager with enhanced metadata handling methods and installation detection utilities.
This commit is contained in:
Andrew Low
2025-07-22 18:11:18 +08:00
parent df94650bef
commit 2db7c80eb1
4 changed files with 48 additions and 7 deletions

View File

@@ -1,399 +0,0 @@
"""
Configuration management for SuperClaude installation system
"""
import json
from typing import Dict, Any, List, Optional
from pathlib import Path
# Handle jsonschema import - if not available, use basic validation
try:
import jsonschema
from jsonschema import validate, ValidationError
JSONSCHEMA_AVAILABLE = True
except ImportError:
JSONSCHEMA_AVAILABLE = False
class ValidationError(Exception):
"""Simple validation error for when jsonschema is not available"""
def __init__(self, message):
self.message = message
super().__init__(message)
def validate(instance, schema):
"""Dummy validation function"""
# Basic type checking only
if "type" in schema:
expected_type = schema["type"]
if expected_type == "object" and not isinstance(instance, dict):
raise ValidationError(f"Expected object, got {type(instance).__name__}")
elif expected_type == "array" and not isinstance(instance, list):
raise ValidationError(f"Expected array, got {type(instance).__name__}")
elif expected_type == "string" and not isinstance(instance, str):
raise ValidationError(f"Expected string, got {type(instance).__name__}")
elif expected_type == "integer" and not isinstance(instance, int):
raise ValidationError(f"Expected integer, got {type(instance).__name__}")
# Skip detailed validation if jsonschema not available
class ConfigManager:
"""Manages configuration files and validation"""
def __init__(self, config_dir: Path):
"""
Initialize config manager
Args:
config_dir: Directory containing configuration files
"""
self.config_dir = config_dir
self.features_file = config_dir / "features.json"
self.requirements_file = config_dir / "requirements.json"
self._features_cache = None
self._requirements_cache = None
# Schema for features.json
self.features_schema = {
"type": "object",
"properties": {
"components": {
"type": "object",
"patternProperties": {
"^[a-zA-Z_][a-zA-Z0-9_]*$": {
"type": "object",
"properties": {
"name": {"type": "string"},
"version": {"type": "string"},
"description": {"type": "string"},
"category": {"type": "string"},
"dependencies": {
"type": "array",
"items": {"type": "string"}
},
"enabled": {"type": "boolean"},
"required_tools": {
"type": "array",
"items": {"type": "string"}
}
},
"required": ["name", "version", "description", "category"],
"additionalProperties": False
}
}
}
},
"required": ["components"],
"additionalProperties": False
}
# Schema for requirements.json
self.requirements_schema = {
"type": "object",
"properties": {
"python": {
"type": "object",
"properties": {
"min_version": {"type": "string"},
"max_version": {"type": "string"}
},
"required": ["min_version"]
},
"node": {
"type": "object",
"properties": {
"min_version": {"type": "string"},
"max_version": {"type": "string"},
"required_for": {
"type": "array",
"items": {"type": "string"}
}
},
"required": ["min_version"]
},
"disk_space_mb": {"type": "integer"},
"external_tools": {
"type": "object",
"patternProperties": {
"^[a-zA-Z_][a-zA-Z0-9_-]*$": {
"type": "object",
"properties": {
"command": {"type": "string"},
"min_version": {"type": "string"},
"required_for": {
"type": "array",
"items": {"type": "string"}
},
"optional": {"type": "boolean"}
},
"required": ["command"],
"additionalProperties": False
}
}
},
"installation_commands": {
"type": "object",
"patternProperties": {
"^[a-zA-Z_][a-zA-Z0-9_-]*$": {
"type": "object",
"properties": {
"linux": {"type": "string"},
"darwin": {"type": "string"},
"win32": {"type": "string"},
"all": {"type": "string"},
"description": {"type": "string"}
},
"additionalProperties": False
}
}
}
},
"required": ["python", "disk_space_mb"],
"additionalProperties": False
}
def load_features(self) -> Dict[str, Any]:
"""
Load and validate features configuration
Returns:
Features configuration dict
Raises:
FileNotFoundError: If features.json not found
ValidationError: If features.json is invalid
"""
if self._features_cache is not None:
return self._features_cache
if not self.features_file.exists():
raise FileNotFoundError(f"Features config not found: {self.features_file}")
try:
with open(self.features_file, 'r') as f:
features = json.load(f)
# Validate schema
validate(instance=features, schema=self.features_schema)
self._features_cache = features
return features
except json.JSONDecodeError as e:
raise ValidationError(f"Invalid JSON in {self.features_file}: {e}")
except ValidationError as e:
raise ValidationError(f"Invalid features schema: {e.message}")
def load_requirements(self) -> Dict[str, Any]:
"""
Load and validate requirements configuration
Returns:
Requirements configuration dict
Raises:
FileNotFoundError: If requirements.json not found
ValidationError: If requirements.json is invalid
"""
if self._requirements_cache is not None:
return self._requirements_cache
if not self.requirements_file.exists():
raise FileNotFoundError(f"Requirements config not found: {self.requirements_file}")
try:
with open(self.requirements_file, 'r') as f:
requirements = json.load(f)
# Validate schema
validate(instance=requirements, schema=self.requirements_schema)
self._requirements_cache = requirements
return requirements
except json.JSONDecodeError as e:
raise ValidationError(f"Invalid JSON in {self.requirements_file}: {e}")
except ValidationError as e:
raise ValidationError(f"Invalid requirements schema: {e.message}")
def get_component_info(self, component_name: str) -> Optional[Dict[str, Any]]:
"""
Get information about a specific component
Args:
component_name: Name of component
Returns:
Component info dict or None if not found
"""
features = self.load_features()
return features.get("components", {}).get(component_name)
def get_enabled_components(self) -> List[str]:
"""
Get list of enabled component names
Returns:
List of enabled component names
"""
features = self.load_features()
enabled = []
for name, info in features.get("components", {}).items():
if info.get("enabled", True): # Default to enabled
enabled.append(name)
return enabled
def get_components_by_category(self, category: str) -> List[str]:
"""
Get component names by category
Args:
category: Component category
Returns:
List of component names in category
"""
features = self.load_features()
components = []
for name, info in features.get("components", {}).items():
if info.get("category") == category:
components.append(name)
return components
def get_component_dependencies(self, component_name: str) -> List[str]:
"""
Get dependencies for a component
Args:
component_name: Name of component
Returns:
List of dependency component names
"""
component_info = self.get_component_info(component_name)
if component_info:
return component_info.get("dependencies", [])
return []
def load_profile(self, profile_path: Path) -> Dict[str, Any]:
"""
Load installation profile
Args:
profile_path: Path to profile JSON file
Returns:
Profile configuration dict
Raises:
FileNotFoundError: If profile not found
ValidationError: If profile is invalid
"""
if not profile_path.exists():
raise FileNotFoundError(f"Profile not found: {profile_path}")
try:
with open(profile_path, 'r') as f:
profile = json.load(f)
# Basic validation
if "components" not in profile:
raise ValidationError("Profile must contain 'components' field")
if not isinstance(profile["components"], list):
raise ValidationError("Profile 'components' must be a list")
# Validate that all components exist
features = self.load_features()
available_components = set(features.get("components", {}).keys())
for component in profile["components"]:
if component not in available_components:
raise ValidationError(f"Unknown component in profile: {component}")
return profile
except json.JSONDecodeError as e:
raise ValidationError(f"Invalid JSON in {profile_path}: {e}")
def get_system_requirements(self) -> Dict[str, Any]:
"""
Get system requirements
Returns:
System requirements dict
"""
return self.load_requirements()
def get_requirements_for_components(self, component_names: List[str]) -> Dict[str, Any]:
"""
Get consolidated requirements for specific components
Args:
component_names: List of component names
Returns:
Consolidated requirements dict
"""
requirements = self.load_requirements()
features = self.load_features()
# Start with base requirements
result = {
"python": requirements["python"],
"disk_space_mb": requirements["disk_space_mb"],
"external_tools": {}
}
# Add Node.js requirements if needed
node_required = False
for component_name in component_names:
component_info = features.get("components", {}).get(component_name, {})
required_tools = component_info.get("required_tools", [])
if "node" in required_tools:
node_required = True
break
if node_required and "node" in requirements:
result["node"] = requirements["node"]
# Add external tool requirements
for component_name in component_names:
component_info = features.get("components", {}).get(component_name, {})
required_tools = component_info.get("required_tools", [])
for tool in required_tools:
if tool in requirements.get("external_tools", {}):
result["external_tools"][tool] = requirements["external_tools"][tool]
return result
def validate_config_files(self) -> List[str]:
"""
Validate all configuration files
Returns:
List of validation errors (empty if all valid)
"""
errors = []
try:
self.load_features()
except Exception as e:
errors.append(f"Features config error: {e}")
try:
self.load_requirements()
except Exception as e:
errors.append(f"Requirements config error: {e}")
return errors
def clear_cache(self) -> None:
"""Clear cached configuration data"""
self._features_cache = None
self._requirements_cache = None

View File

@@ -1,428 +0,0 @@
"""
Cross-platform file management for SuperClaude installation system
"""
import shutil
import stat
from typing import List, Optional, Callable, Dict, Any
from pathlib import Path
import fnmatch
import hashlib
class FileManager:
"""Cross-platform file operations manager"""
def __init__(self, dry_run: bool = False):
"""
Initialize file manager
Args:
dry_run: If True, only simulate file operations
"""
self.dry_run = dry_run
self.copied_files: List[Path] = []
self.created_dirs: List[Path] = []
def copy_file(self, source: Path, target: Path, preserve_permissions: bool = True) -> bool:
"""
Copy single file with permission preservation
Args:
source: Source file path
target: Target file path
preserve_permissions: Whether to preserve file permissions
Returns:
True if successful, False otherwise
"""
if not source.exists():
raise FileNotFoundError(f"Source file not found: {source}")
if not source.is_file():
raise ValueError(f"Source is not a file: {source}")
if self.dry_run:
print(f"[DRY RUN] Would copy {source} -> {target}")
return True
try:
# Ensure target directory exists
target.parent.mkdir(parents=True, exist_ok=True)
# Copy file
if preserve_permissions:
shutil.copy2(source, target)
else:
shutil.copy(source, target)
self.copied_files.append(target)
return True
except Exception as e:
print(f"Error copying {source} to {target}: {e}")
return False
def copy_directory(self, source: Path, target: Path, ignore_patterns: Optional[List[str]] = None) -> bool:
"""
Recursively copy directory with gitignore-style patterns
Args:
source: Source directory path
target: Target directory path
ignore_patterns: List of patterns to ignore (gitignore style)
Returns:
True if successful, False otherwise
"""
if not source.exists():
raise FileNotFoundError(f"Source directory not found: {source}")
if not source.is_dir():
raise ValueError(f"Source is not a directory: {source}")
ignore_patterns = ignore_patterns or []
default_ignores = ['.git', '.gitignore', '__pycache__', '*.pyc', '.DS_Store']
all_ignores = ignore_patterns + default_ignores
if self.dry_run:
print(f"[DRY RUN] Would copy directory {source} -> {target}")
return True
try:
# Create ignore function
def ignore_func(directory: str, contents: List[str]) -> List[str]:
ignored = []
for item in contents:
item_path = Path(directory) / item
rel_path = item_path.relative_to(source)
# Check against ignore patterns
for pattern in all_ignores:
if fnmatch.fnmatch(item, pattern) or fnmatch.fnmatch(str(rel_path), pattern):
ignored.append(item)
break
return ignored
# Copy tree
shutil.copytree(source, target, ignore=ignore_func, dirs_exist_ok=True)
# Track created directories and files
for item in target.rglob('*'):
if item.is_dir():
self.created_dirs.append(item)
else:
self.copied_files.append(item)
return True
except Exception as e:
print(f"Error copying directory {source} to {target}: {e}")
return False
def ensure_directory(self, directory: Path, mode: int = 0o755) -> bool:
"""
Create directory and parents if they don't exist
Args:
directory: Directory path to create
mode: Directory permissions (Unix only)
Returns:
True if successful, False otherwise
"""
if self.dry_run:
print(f"[DRY RUN] Would create directory {directory}")
return True
try:
directory.mkdir(parents=True, exist_ok=True, mode=mode)
if directory not in self.created_dirs:
self.created_dirs.append(directory)
return True
except Exception as e:
print(f"Error creating directory {directory}: {e}")
return False
def remove_file(self, file_path: Path) -> bool:
"""
Remove single file
Args:
file_path: Path to file to remove
Returns:
True if successful, False otherwise
"""
if not file_path.exists():
return True # Already gone
if self.dry_run:
print(f"[DRY RUN] Would remove file {file_path}")
return True
try:
if file_path.is_file():
file_path.unlink()
else:
print(f"Warning: {file_path} is not a file, skipping")
return False
# Remove from tracking
if file_path in self.copied_files:
self.copied_files.remove(file_path)
return True
except Exception as e:
print(f"Error removing file {file_path}: {e}")
return False
def remove_directory(self, directory: Path, recursive: bool = False) -> bool:
"""
Remove directory
Args:
directory: Directory path to remove
recursive: Whether to remove recursively
Returns:
True if successful, False otherwise
"""
if not directory.exists():
return True # Already gone
if self.dry_run:
action = "recursively remove" if recursive else "remove"
print(f"[DRY RUN] Would {action} directory {directory}")
return True
try:
if recursive:
shutil.rmtree(directory)
else:
directory.rmdir() # Only works if empty
# Remove from tracking
if directory in self.created_dirs:
self.created_dirs.remove(directory)
return True
except Exception as e:
print(f"Error removing directory {directory}: {e}")
return False
def resolve_home_path(self, path: str) -> Path:
"""
Convert path with ~ to actual home path on any OS
Args:
path: Path string potentially containing ~
Returns:
Resolved Path object
"""
return Path(path).expanduser().resolve()
def make_executable(self, file_path: Path) -> bool:
"""
Make file executable (Unix/Linux/macOS)
Args:
file_path: Path to file to make executable
Returns:
True if successful, False otherwise
"""
if not file_path.exists():
return False
if self.dry_run:
print(f"[DRY RUN] Would make {file_path} executable")
return True
try:
# Get current permissions
current_mode = file_path.stat().st_mode
# Add execute permissions for owner, group, and others
new_mode = current_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
file_path.chmod(new_mode)
return True
except Exception as e:
print(f"Error making {file_path} executable: {e}")
return False
def get_file_hash(self, file_path: Path, algorithm: str = 'sha256') -> Optional[str]:
"""
Calculate file hash
Args:
file_path: Path to file
algorithm: Hash algorithm (md5, sha1, sha256, etc.)
Returns:
Hex hash string or None if error
"""
if not file_path.exists() or not file_path.is_file():
return None
try:
hasher = hashlib.new(algorithm)
with open(file_path, 'rb') as f:
# Read in chunks for large files
for chunk in iter(lambda: f.read(8192), b""):
hasher.update(chunk)
return hasher.hexdigest()
except Exception:
return None
def verify_file_integrity(self, file_path: Path, expected_hash: str, algorithm: str = 'sha256') -> bool:
"""
Verify file integrity using hash
Args:
file_path: Path to file to verify
expected_hash: Expected hash value
algorithm: Hash algorithm used
Returns:
True if file matches expected hash, False otherwise
"""
actual_hash = self.get_file_hash(file_path, algorithm)
return actual_hash is not None and actual_hash.lower() == expected_hash.lower()
def get_directory_size(self, directory: Path) -> int:
"""
Calculate total size of directory in bytes
Args:
directory: Directory path
Returns:
Total size in bytes
"""
if not directory.exists() or not directory.is_dir():
return 0
total_size = 0
try:
for file_path in directory.rglob('*'):
if file_path.is_file():
total_size += file_path.stat().st_size
except Exception:
pass # Skip files we can't access
return total_size
def find_files(self, directory: Path, pattern: str = '*', recursive: bool = True) -> List[Path]:
"""
Find files matching pattern
Args:
directory: Directory to search
pattern: Glob pattern to match
recursive: Whether to search recursively
Returns:
List of matching file paths
"""
if not directory.exists() or not directory.is_dir():
return []
try:
if recursive:
return list(directory.rglob(pattern))
else:
return list(directory.glob(pattern))
except Exception:
return []
def backup_file(self, file_path: Path, backup_suffix: str = '.backup') -> Optional[Path]:
"""
Create backup copy of file
Args:
file_path: Path to file to backup
backup_suffix: Suffix to add to backup file
Returns:
Path to backup file or None if failed
"""
if not file_path.exists() or not file_path.is_file():
return None
backup_path = file_path.with_suffix(file_path.suffix + backup_suffix)
if self.copy_file(file_path, backup_path):
return backup_path
return None
def get_free_space(self, path: Path) -> int:
"""
Get free disk space at path in bytes
Args:
path: Path to check (can be file or directory)
Returns:
Free space in bytes
"""
try:
if path.is_file():
path = path.parent
stat_result = shutil.disk_usage(path)
return stat_result.free
except Exception:
return 0
def cleanup_tracked_files(self) -> None:
"""Remove all files and directories created during this session"""
if self.dry_run:
print("[DRY RUN] Would cleanup tracked files")
return
# Remove files first
for file_path in reversed(self.copied_files):
try:
if file_path.exists():
file_path.unlink()
except Exception:
pass
# Remove directories (in reverse order of creation)
for directory in reversed(self.created_dirs):
try:
if directory.exists() and not any(directory.iterdir()):
directory.rmdir()
except Exception:
pass
self.copied_files.clear()
self.created_dirs.clear()
def get_operation_summary(self) -> Dict[str, Any]:
"""
Get summary of file operations performed
Returns:
Dict with operation statistics
"""
return {
'files_copied': len(self.copied_files),
'directories_created': len(self.created_dirs),
'dry_run': self.dry_run,
'copied_files': [str(f) for f in self.copied_files],
'created_directories': [str(d) for d in self.created_dirs]
}

View File

@@ -1,483 +0,0 @@
"""
Settings management for SuperClaude installation system
Handles settings.json manipulation with deep merge and backup
"""
import json
import shutil
from typing import Dict, Any, Optional, List
from pathlib import Path
from datetime import datetime
import copy
class SettingsManager:
"""Manages settings.json file operations"""
def __init__(self, install_dir: Path):
"""
Initialize settings manager
Args:
install_dir: Installation directory containing settings.json
"""
self.install_dir = install_dir
self.settings_file = install_dir / "settings.json"
self.metadata_file = install_dir / ".superclaude-metadata.json"
self.backup_dir = install_dir / "backups" / "settings"
def load_settings(self) -> Dict[str, Any]:
"""
Load settings from settings.json
Returns:
Settings dict (empty if file doesn't exist)
"""
if not self.settings_file.exists():
return {}
try:
with open(self.settings_file, 'r', encoding='utf-8') as f:
return json.load(f)
except (json.JSONDecodeError, IOError) as e:
raise ValueError(f"Could not load settings from {self.settings_file}: {e}")
def save_settings(self, settings: Dict[str, Any], create_backup: bool = True) -> None:
"""
Save settings to settings.json with optional backup
Args:
settings: Settings dict to save
create_backup: Whether to create backup before saving
"""
# Create backup if requested and file exists
if create_backup and self.settings_file.exists():
self._create_settings_backup()
# Ensure directory exists
self.settings_file.parent.mkdir(parents=True, exist_ok=True)
# Save with pretty formatting
try:
with open(self.settings_file, 'w', encoding='utf-8') as f:
json.dump(settings, f, indent=2, ensure_ascii=False, sort_keys=True)
except IOError as e:
raise ValueError(f"Could not save settings to {self.settings_file}: {e}")
def load_metadata(self) -> Dict[str, Any]:
"""
Load SuperClaude metadata from .superclaude-metadata.json
Returns:
Metadata dict (empty if file doesn't exist)
"""
if not self.metadata_file.exists():
return {}
try:
with open(self.metadata_file, 'r', encoding='utf-8') as f:
return json.load(f)
except (json.JSONDecodeError, IOError) as e:
raise ValueError(f"Could not load metadata from {self.metadata_file}: {e}")
def save_metadata(self, metadata: Dict[str, Any]) -> None:
"""
Save SuperClaude metadata to .superclaude-metadata.json
Args:
metadata: Metadata dict to save
"""
# Ensure directory exists
self.metadata_file.parent.mkdir(parents=True, exist_ok=True)
# Save with pretty formatting
try:
with open(self.metadata_file, 'w', encoding='utf-8') as f:
json.dump(metadata, f, indent=2, ensure_ascii=False, sort_keys=True)
except IOError as e:
raise ValueError(f"Could not save metadata to {self.metadata_file}: {e}")
def migrate_superclaude_data(self) -> bool:
"""
Migrate SuperClaude-specific data from settings.json to metadata file
Returns:
True if migration occurred, False if no data to migrate
"""
settings = self.load_settings()
# SuperClaude-specific fields to migrate
superclaude_fields = ["components", "framework", "superclaude", "mcp"]
data_to_migrate = {}
fields_found = False
# Extract SuperClaude data
for field in superclaude_fields:
if field in settings:
data_to_migrate[field] = settings[field]
fields_found = True
if not fields_found:
return False
# Load existing metadata (if any) and merge
existing_metadata = self.load_metadata()
merged_metadata = self._deep_merge(existing_metadata, data_to_migrate)
# Save to metadata file
self.save_metadata(merged_metadata)
# Remove SuperClaude fields from settings
clean_settings = {k: v for k, v in settings.items() if k not in superclaude_fields}
# Save cleaned settings
self.save_settings(clean_settings, create_backup=True)
return True
def merge_settings(self, modifications: Dict[str, Any]) -> Dict[str, Any]:
"""
Deep merge modifications into existing settings
Args:
modifications: Settings modifications to merge
Returns:
Merged settings dict
"""
existing = self.load_settings()
return self._deep_merge(existing, modifications)
def update_settings(self, modifications: Dict[str, Any], create_backup: bool = True) -> None:
"""
Update settings with modifications
Args:
modifications: Settings modifications to apply
create_backup: Whether to create backup before updating
"""
merged = self.merge_settings(modifications)
self.save_settings(merged, create_backup)
def get_setting(self, key_path: str, default: Any = None) -> Any:
"""
Get setting value using dot-notation path
Args:
key_path: Dot-separated path (e.g., "hooks.enabled")
default: Default value if key not found
Returns:
Setting value or default
"""
settings = self.load_settings()
try:
value = settings
for key in key_path.split('.'):
value = value[key]
return value
except (KeyError, TypeError):
return default
def set_setting(self, key_path: str, value: Any, create_backup: bool = True) -> None:
"""
Set setting value using dot-notation path
Args:
key_path: Dot-separated path (e.g., "hooks.enabled")
value: Value to set
create_backup: Whether to create backup before updating
"""
# Build nested dict structure
keys = key_path.split('.')
modification = {}
current = modification
for key in keys[:-1]:
current[key] = {}
current = current[key]
current[keys[-1]] = value
self.update_settings(modification, create_backup)
def remove_setting(self, key_path: str, create_backup: bool = True) -> bool:
"""
Remove setting using dot-notation path
Args:
key_path: Dot-separated path to remove
create_backup: Whether to create backup before updating
Returns:
True if setting was removed, False if not found
"""
settings = self.load_settings()
keys = key_path.split('.')
# Navigate to parent of target key
current = settings
try:
for key in keys[:-1]:
current = current[key]
# Remove the target key
if keys[-1] in current:
del current[keys[-1]]
self.save_settings(settings, create_backup)
return True
else:
return False
except (KeyError, TypeError):
return False
def add_component_registration(self, component_name: str, component_info: Dict[str, Any]) -> None:
"""
Add component to registry in metadata
Args:
component_name: Name of component
component_info: Component metadata dict
"""
metadata = self.load_metadata()
if "components" not in metadata:
metadata["components"] = {}
metadata["components"][component_name] = {
**component_info,
"installed_at": datetime.now().isoformat()
}
self.save_metadata(metadata)
def remove_component_registration(self, component_name: str) -> bool:
"""
Remove component from registry in metadata
Args:
component_name: Name of component to remove
Returns:
True if component was removed, False if not found
"""
metadata = self.load_metadata()
if "components" in metadata and component_name in metadata["components"]:
del metadata["components"][component_name]
self.save_metadata(metadata)
return True
return False
def get_installed_components(self) -> Dict[str, Dict[str, Any]]:
"""
Get all installed components from registry
Returns:
Dict of component_name -> component_info
"""
metadata = self.load_metadata()
return metadata.get("components", {})
def is_component_installed(self, component_name: str) -> bool:
"""
Check if component is registered as installed
Args:
component_name: Name of component to check
Returns:
True if component is installed, False otherwise
"""
components = self.get_installed_components()
return component_name in components
def get_component_version(self, component_name: str) -> Optional[str]:
"""
Get installed version of component
Args:
component_name: Name of component
Returns:
Version string or None if not installed
"""
components = self.get_installed_components()
component_info = components.get(component_name, {})
return component_info.get("version")
def update_framework_version(self, version: str) -> None:
"""
Update SuperClaude framework version in metadata
Args:
version: Framework version string
"""
metadata = self.load_metadata()
if "framework" not in metadata:
metadata["framework"] = {}
metadata["framework"]["version"] = version
metadata["framework"]["updated_at"] = datetime.now().isoformat()
self.save_metadata(metadata)
def get_framework_version(self) -> Optional[str]:
"""
Get SuperClaude framework version from metadata
Returns:
Version string or None if not set
"""
metadata = self.load_metadata()
framework = metadata.get("framework", {})
return framework.get("version")
def get_metadata_setting(self, key_path: str, default: Any = None) -> Any:
"""
Get metadata value using dot-notation path
Args:
key_path: Dot-separated path (e.g., "framework.version")
default: Default value if key not found
Returns:
Metadata value or default
"""
metadata = self.load_metadata()
try:
value = metadata
for key in key_path.split('.'):
value = value[key]
return value
except (KeyError, TypeError):
return default
def _deep_merge(self, base: Dict[str, Any], overlay: Dict[str, Any]) -> Dict[str, Any]:
"""
Deep merge two dictionaries
Args:
base: Base dictionary
overlay: Dictionary to merge on top
Returns:
Merged dictionary
"""
result = copy.deepcopy(base)
for key, value in overlay.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = self._deep_merge(result[key], value)
else:
result[key] = copy.deepcopy(value)
return result
def _create_settings_backup(self) -> Path:
"""
Create timestamped backup of settings.json
Returns:
Path to backup file
"""
if not self.settings_file.exists():
raise ValueError("Cannot backup non-existent settings file")
# Create backup directory
self.backup_dir.mkdir(parents=True, exist_ok=True)
# Create timestamped backup
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_file = self.backup_dir / f"settings_{timestamp}.json"
shutil.copy2(self.settings_file, backup_file)
# Keep only last 10 backups
self._cleanup_old_backups()
return backup_file
def _cleanup_old_backups(self, keep_count: int = 10) -> None:
"""
Remove old backup files, keeping only the most recent
Args:
keep_count: Number of backups to keep
"""
if not self.backup_dir.exists():
return
# Get all backup files sorted by modification time
backup_files = []
for file in self.backup_dir.glob("settings_*.json"):
backup_files.append((file.stat().st_mtime, file))
backup_files.sort(reverse=True) # Most recent first
# Remove old backups
for _, file in backup_files[keep_count:]:
try:
file.unlink()
except OSError:
pass # Ignore errors when cleaning up
def list_backups(self) -> List[Dict[str, Any]]:
"""
List available settings backups
Returns:
List of backup info dicts with name, path, and timestamp
"""
if not self.backup_dir.exists():
return []
backups = []
for file in self.backup_dir.glob("settings_*.json"):
try:
stat = file.stat()
backups.append({
"name": file.name,
"path": str(file),
"size": stat.st_size,
"created": datetime.fromtimestamp(stat.st_ctime).isoformat(),
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat()
})
except OSError:
continue
# Sort by creation time, most recent first
backups.sort(key=lambda x: x["created"], reverse=True)
return backups
def restore_backup(self, backup_name: str) -> bool:
"""
Restore settings from backup
Args:
backup_name: Name of backup file to restore
Returns:
True if successful, False otherwise
"""
backup_file = self.backup_dir / backup_name
if not backup_file.exists():
return False
try:
# Validate backup file first
with open(backup_file, 'r', encoding='utf-8') as f:
json.load(f) # Will raise exception if invalid
# Create backup of current settings
if self.settings_file.exists():
self._create_settings_backup()
# Restore backup
shutil.copy2(backup_file, self.settings_file)
return True
except (json.JSONDecodeError, IOError):
return False