EU-Utility/plugins/cloud_sync.py

679 lines
24 KiB
Python

"""
CloudSync Plugin - Synchronize Settings Across Devices
Provides cloud synchronization for EU-Utility settings, plugin configurations,
and user data across multiple devices using various cloud providers.
"""
import os
import json
import time
import hashlib
import threading
from pathlib import Path
from typing import Optional, Dict, Any, List, Callable, Union
from dataclasses import dataclass, asdict
from enum import Enum
from datetime import datetime
import urllib.request
import urllib.error
from core.base_plugin import BasePlugin
class SyncStatus(Enum):
"""Status of a sync operation."""
IDLE = "idle"
SYNCING = "syncing"
UPLOADING = "uploading"
DOWNLOADING = "downloading"
CONFLICT = "conflict"
ERROR = "error"
SUCCESS = "success"
class CloudProvider(Enum):
"""Supported cloud storage providers."""
DROPBOX = "dropbox"
GOOGLE_DRIVE = "google_drive"
ONE_DRIVE = "one_drive"
WEBDAV = "webdav"
CUSTOM = "custom"
@dataclass
class SyncConfig:
"""Configuration for cloud sync."""
enabled: bool = False
provider: str = "custom"
auto_sync: bool = True
sync_interval_minutes: int = 30
sync_on_change: bool = True
conflict_resolution: str = "ask" # ask, local, remote, newest
encrypt_data: bool = True
sync_plugins: bool = True
sync_settings: bool = True
sync_history: bool = False
last_sync: Optional[str] = None
remote_revision: Optional[str] = None
@dataclass
class SyncItem:
"""Represents an item to be synchronized."""
path: str
content: bytes
checksum: str
modified_time: float
size: int
class CloudSyncPlugin(BasePlugin):
"""
Cloud synchronization for EU-Utility settings and data.
Features:
- Multi-provider support (Dropbox, Google Drive, OneDrive, WebDAV, Custom)
- Automatic sync on changes
- Conflict resolution strategies
- Optional encryption
- Selective sync (settings, plugins, history)
- Sync status and history
"""
name = "cloud_sync"
description = "Synchronize settings across devices"
version = "1.0.0"
author = "EU-Utility"
DEFAULT_CONFIG = {
"data_dir": "data",
"sync_manifest_file": "data/sync_manifest.json",
"temp_dir": "data/temp/sync",
"max_retries": 3,
"retry_delay_seconds": 5,
}
def __init__(self):
super().__init__()
self._config = self.DEFAULT_CONFIG.copy()
self._sync_config = SyncConfig()
self._status = SyncStatus.IDLE
self._sync_history: List[Dict[str, Any]] = []
self._listeners: List[Callable] = []
self._sync_thread: Optional[threading.Thread] = None
self._running = False
self._pending_changes = False
self._lock = threading.Lock()
# File watchers
self._watched_files: Dict[str, float] = {}
self._watch_thread: Optional[threading.Thread] = None
# Provider-specific settings
self._provider_config: Dict[str, Any] = {}
# Load saved configs
self._load_sync_config()
self._load_history()
def on_start(self) -> None:
"""Start the cloud sync service."""
print(f"[{self.name}] Starting cloud sync...")
self._running = True
# Ensure directories exist
Path(self._config["temp_dir"]).mkdir(parents=True, exist_ok=True)
# Start auto-sync if enabled
if self._sync_config.enabled and self._sync_config.auto_sync:
self._start_auto_sync()
self._start_file_watcher()
def on_stop(self) -> None:
"""Stop the cloud sync service."""
print(f"[{self.name}] Stopping cloud sync...")
self._running = False
# Final sync if there are pending changes
if self._pending_changes and self._sync_config.sync_on_change:
self.sync_up()
self._save_sync_config()
self._save_history()
# Configuration
def get_sync_config(self) -> SyncConfig:
"""Get current sync configuration."""
return self._sync_config
def set_sync_config(self, config: SyncConfig) -> None:
"""Update sync configuration."""
was_enabled = self._sync_config.enabled
self._sync_config = config
# Handle enable/disable transitions
if not was_enabled and config.enabled and config.auto_sync:
self._start_auto_sync()
self._start_file_watcher()
elif was_enabled and not config.enabled:
self._running = False
def set_provider_config(self, provider: CloudProvider, config: Dict[str, Any]) -> None:
"""
Configure a cloud provider.
Args:
provider: The cloud provider to configure
config: Provider-specific configuration
"""
self._provider_config[provider.value] = config
self._sync_config.provider = provider.value
# Status & Events
def get_status(self) -> str:
"""Get current sync status."""
return self._status.value
def add_listener(self, callback: Callable[[SyncStatus, Optional[str]], None]) -> None:
"""Add a status change listener."""
self._listeners.append(callback)
def _set_status(self, status: SyncStatus, message: Optional[str] = None) -> None:
"""Set status and notify listeners."""
self._status = status
for listener in self._listeners:
try:
listener(status, message)
except Exception as e:
print(f"[{self.name}] Listener error: {e}")
# Core Sync Operations
def sync_up(self) -> bool:
"""
Upload local data to cloud.
Returns:
True if sync successful
"""
if not self._sync_config.enabled:
print(f"[{self.name}] Sync not enabled")
return False
with self._lock:
self._set_status(SyncStatus.UPLOADING)
try:
# Collect sync items
items = self._collect_sync_items()
# Build manifest
manifest = {
"version": "1.0",
"timestamp": datetime.now().isoformat(),
"device": os.environ.get("HOSTNAME", "unknown"),
"items": [
{
"path": item.path,
"checksum": item.checksum,
"modified_time": item.modified_time,
"size": item.size,
}
for item in items
],
}
# Upload based on provider
success = self._upload_to_provider(manifest, items)
if success:
self._sync_config.last_sync = datetime.now().isoformat()
self._sync_config.remote_revision = manifest["timestamp"]
self._pending_changes = False
self._record_sync("upload", True)
self._set_status(SyncStatus.SUCCESS)
print(f"[{self.name}] ✓ Synced up ({len(items)} items)")
else:
self._record_sync("upload", False, "Upload failed")
self._set_status(SyncStatus.ERROR, "Upload failed")
return success
except Exception as e:
self._record_sync("upload", False, str(e))
self._set_status(SyncStatus.ERROR, str(e))
print(f"[{self.name}] Sync up failed: {e}")
return False
def sync_down(self, force: bool = False) -> bool:
"""
Download data from cloud to local.
Args:
force: Force download even if local is newer
Returns:
True if sync successful
"""
if not self._sync_config.enabled:
print(f"[{self.name}] Sync not enabled")
return False
with self._lock:
self._set_status(SyncStatus.DOWNLOADING)
try:
# Download manifest and items
manifest, items = self._download_from_provider()
if not manifest:
self._set_status(SyncStatus.ERROR, "No remote data found")
return False
# Check for conflicts
conflicts = self._detect_conflicts(manifest, items)
if conflicts and not force:
if self._sync_config.conflict_resolution == "ask":
self._set_status(SyncStatus.CONFLICT)
print(f"[{self.name}] Conflicts detected: {len(conflicts)}")
return False
elif self._sync_config.conflict_resolution == "local":
print(f"[{self.name}] Keeping local versions")
elif self._sync_config.conflict_resolution == "remote":
self._apply_sync_items(items)
elif self._sync_config.conflict_resolution == "newest":
self._resolve_conflicts_newest(conflicts, items)
else:
self._apply_sync_items(items)
self._sync_config.last_sync = datetime.now().isoformat()
self._sync_config.remote_revision = manifest["timestamp"]
self._pending_changes = False
self._record_sync("download", True)
self._set_status(SyncStatus.SUCCESS)
print(f"[{self.name}] ✓ Synced down ({len(items)} items)")
return True
except Exception as e:
self._record_sync("download", False, str(e))
self._set_status(SyncStatus.ERROR, str(e))
print(f"[{self.name}] Sync down failed: {e}")
return False
def sync_bidirectional(self) -> bool:
"""
Perform bidirectional sync (merge local and remote).
Returns:
True if sync successful
"""
if not self._sync_config.enabled:
return False
self._set_status(SyncStatus.SYNCING)
# First sync down to get remote state
if not self.sync_down():
# If no remote data exists, just upload local
return self.sync_up()
# Then sync up to push local changes
return self.sync_up()
def force_sync(self) -> bool:
"""Force a complete sync (upload local, overwriting remote)."""
return self.sync_up()
# Private Methods
def _collect_sync_items(self) -> List[SyncItem]:
"""Collect files to sync based on configuration."""
items = []
data_dir = Path(self._config["data_dir"])
# Settings files
if self._sync_config.sync_settings:
for pattern in ["*.json", "*.yaml", "*.yml", "*.toml"]:
for file_path in data_dir.rglob(pattern):
if self._should_sync_file(file_path):
items.append(self._create_sync_item(file_path))
# Plugin configs
if self._sync_config.sync_plugins:
plugin_config_dir = data_dir / "plugin_configs"
if plugin_config_dir.exists():
for file_path in plugin_config_dir.rglob("*"):
if file_path.is_file():
items.append(self._create_sync_item(file_path))
# History (if enabled)
if self._sync_config.sync_history:
history_file = data_dir / "history.json"
if history_file.exists():
items.append(self._create_sync_item(history_file))
return items
def _should_sync_file(self, file_path: Path) -> bool:
"""Check if a file should be synced."""
# Skip certain files
skip_patterns = ["temp", "cache", "sync_manifest", "session"]
return not any(pattern in file_path.name for pattern in skip_patterns)
def _create_sync_item(self, file_path: Path) -> SyncItem:
"""Create a SyncItem from a file."""
content = file_path.read_bytes()
# Encrypt if enabled
if self._sync_config.encrypt_data:
content = self._encrypt(content)
stat = file_path.stat()
return SyncItem(
path=str(file_path.relative_to(Path(self._config["data_dir"]).parent)),
content=content,
checksum=hashlib.sha256(content).hexdigest(),
modified_time=stat.st_mtime,
size=len(content),
)
def _apply_sync_items(self, items: List[SyncItem]) -> None:
"""Apply downloaded items to local filesystem."""
for item in items:
file_path = Path(item.path)
file_path.parent.mkdir(parents=True, exist_ok=True)
# Decrypt if needed
content = item.content
if self._sync_config.encrypt_data:
content = self._decrypt(content)
file_path.write_bytes(content)
os.utime(file_path, (item.modified_time, item.modified_time))
def _detect_conflicts(self, remote_manifest: Dict, remote_items: List[SyncItem]) -> List[Dict[str, Any]]:
"""Detect conflicts between local and remote."""
conflicts = []
local_items = {item.path: item for item in self._collect_sync_items()}
remote_map = {item.path: item for item in remote_items}
for path, remote_item in remote_map.items():
if path in local_items:
local_item = local_items[path]
if local_item.checksum != remote_item.checksum:
conflicts.append({
"path": path,
"local_modified": local_item.modified_time,
"remote_modified": remote_item.modified_time,
})
return conflicts
def _resolve_conflicts_newest(self, conflicts: List[Dict], remote_items: List[SyncItem]) -> None:
"""Resolve conflicts by keeping newest version."""
remote_map = {item.path: item for item in remote_items}
for conflict in conflicts:
path = conflict["path"]
if conflict["remote_modified"] > conflict["local_modified"]:
# Apply remote version
for item in remote_items:
if item.path == path:
file_path = Path(path)
file_path.parent.mkdir(parents=True, exist_ok=True)
content = item.content
if self._sync_config.encrypt_data:
content = self._decrypt(content)
file_path.write_bytes(content)
break
# Provider Implementations
def _upload_to_provider(self, manifest: Dict, items: List[SyncItem]) -> bool:
"""Upload data to configured provider."""
provider = self._sync_config.provider
if provider == CloudProvider.CUSTOM.value:
return self._upload_custom(manifest, items)
elif provider == CloudProvider.WEBDAV.value:
return self._upload_webdav(manifest, items)
else:
print(f"[{self.name}] Provider '{provider}' not yet implemented")
return False
def _download_from_provider(self) -> tuple:
"""Download data from configured provider."""
provider = self._sync_config.provider
if provider == CloudProvider.CUSTOM.value:
return self._download_custom()
elif provider == CloudProvider.WEBDAV.value:
return self._download_webdav()
else:
print(f"[{self.name}] Provider '{provider}' not yet implemented")
return None, []
def _upload_custom(self, manifest: Dict, items: List[SyncItem]) -> bool:
"""Upload to custom endpoint."""
config = self._provider_config.get(CloudProvider.CUSTOM.value, {})
endpoint = config.get("upload_url")
api_key = config.get("api_key")
if not endpoint:
print(f"[{self.name}] Custom endpoint not configured")
return False
try:
# Create sync package
package = {
"manifest": manifest,
"items": [
{
"path": item.path,
"content": item.content.hex(),
"checksum": item.checksum,
}
for item in items
],
}
req = urllib.request.Request(
endpoint,
data=json.dumps(package).encode('utf-8'),
headers={
"Content-Type": "application/json",
"X-API-Key": api_key or "",
},
method="POST",
)
with urllib.request.urlopen(req, timeout=60) as response:
return response.status == 200
except Exception as e:
print(f"[{self.name}] Custom upload failed: {e}")
return False
def _download_custom(self) -> tuple:
"""Download from custom endpoint."""
config = self._provider_config.get(CloudProvider.CUSTOM.value, {})
endpoint = config.get("download_url")
api_key = config.get("api_key")
if not endpoint:
return None, []
try:
req = urllib.request.Request(
endpoint,
headers={"X-API-Key": api_key or ""},
)
with urllib.request.urlopen(req, timeout=60) as response:
package = json.loads(response.read().decode('utf-8'))
manifest = package.get("manifest", {})
items = [
SyncItem(
path=item["path"],
content=bytes.fromhex(item["content"]),
checksum=item["checksum"],
modified_time=item.get("modified_time", 0),
size=item.get("size", 0),
)
for item in package.get("items", [])
]
return manifest, items
except Exception as e:
print(f"[{self.name}] Custom download failed: {e}")
return None, []
def _upload_webdav(self, manifest: Dict, items: List[SyncItem]) -> bool:
"""Upload via WebDAV."""
# Placeholder for WebDAV implementation
print(f"[{self.name}] WebDAV upload not yet implemented")
return False
def _download_webdav(self) -> tuple:
"""Download via WebDAV."""
# Placeholder for WebDAV implementation
print(f"[{self.name}] WebDAV download not yet implemented")
return None, []
# Encryption
def _encrypt(self, data: bytes) -> bytes:
"""Simple XOR encryption (replace with proper encryption for production)."""
# This is a placeholder - use proper encryption in production
key = hashlib.sha256(b"eu-utility-sync-key").digest()
return bytes(b ^ key[i % len(key)] for i, b in enumerate(data))
def _decrypt(self, data: bytes) -> bytes:
"""Decrypt data (XOR is symmetric)."""
return self._encrypt(data) # XOR is its own inverse
# Auto-sync
def _start_auto_sync(self) -> None:
"""Start automatic sync thread."""
def auto_sync_loop():
interval = self._sync_config.sync_interval_minutes * 60
while self._running:
time.sleep(interval)
if self._running and self._sync_config.enabled:
self.sync_bidirectional()
self._sync_thread = threading.Thread(target=auto_sync_loop, daemon=True)
self._sync_thread.start()
print(f"[{self.name}] Auto-sync started ({self._sync_config.sync_interval_minutes}min)")
def _start_file_watcher(self) -> None:
"""Start file watcher for sync-on-change."""
if not self._sync_config.sync_on_change:
return
def watch_loop():
while self._running:
if self._check_for_changes():
self._pending_changes = True
# Debounce - wait for changes to settle
time.sleep(5)
if self._pending_changes and self._running:
self.sync_up()
time.sleep(2)
self._watch_thread = threading.Thread(target=watch_loop, daemon=True)
self._watch_thread.start()
def _check_for_changes(self) -> bool:
"""Check if any watched files have changed."""
# Simple implementation - check mtimes
items = self._collect_sync_items()
changed = False
for item in items:
last_mtime = self._watched_files.get(item.path, 0)
if item.modified_time > last_mtime:
self._watched_files[item.path] = item.modified_time
changed = True
return changed
# Persistence
def _load_sync_config(self) -> None:
"""Load sync configuration."""
config_file = Path(self._config["data_dir"]) / "cloud_sync_config.json"
if config_file.exists():
try:
with open(config_file) as f:
data = json.load(f)
self._sync_config = SyncConfig(**data.get("sync", {}))
self._provider_config = data.get("providers", {})
except Exception as e:
print(f"[{self.name}] Failed to load config: {e}")
def _save_sync_config(self) -> None:
"""Save sync configuration."""
config_file = Path(self._config["data_dir"]) / "cloud_sync_config.json"
try:
with open(config_file, 'w') as f:
json.dump({
"sync": asdict(self._sync_config),
"providers": self._provider_config,
}, f, indent=2)
except Exception as e:
print(f"[{self.name}] Failed to save config: {e}")
def _load_history(self) -> None:
"""Load sync history."""
history_file = Path(self._config["data_dir"]) / "sync_history.json"
if history_file.exists():
try:
with open(history_file) as f:
self._sync_history = json.load(f)
except Exception as e:
print(f"[{self.name}] Failed to load history: {e}")
def _save_history(self) -> None:
"""Save sync history."""
history_file = Path(self._config["data_dir"]) / "sync_history.json"
try:
with open(history_file, 'w') as f:
json.dump(self._sync_history[-100:], f, indent=2) # Keep last 100
except Exception as e:
print(f"[{self.name}] Failed to save history: {e}")
def _record_sync(self, direction: str, success: bool, error: Optional[str] = None) -> None:
"""Record a sync operation."""
record = {
"timestamp": datetime.now().isoformat(),
"direction": direction,
"success": success,
"error": error,
}
self._sync_history.append(record)
# Public API
def get_sync_history(self) -> List[Dict[str, Any]]:
"""Get sync history."""
return self._sync_history.copy()
def get_last_sync(self) -> Optional[str]:
"""Get timestamp of last successful sync."""
return self._sync_config.last_sync
def clear_remote_data(self) -> bool:
"""Clear all data from remote."""
# Provider-specific implementation
print(f"[{self.name}] Clear remote not implemented for provider: {self._sync_config.provider}")
return False