EU-Utility/projects/EU-Utility/core/log_reader.py

255 lines
7.8 KiB
Python

"""
EU-Utility - Log Reader Core Service
Real-time log file monitoring and parsing for Entropia Universe.
Part of core - not a plugin. Plugins access via PluginAPI.
"""
import os
import re
import time
import threading
from pathlib import Path
from datetime import datetime
from typing import List, Dict, Callable, Optional
from dataclasses import dataclass, field
@dataclass
class LogEvent:
"""Represents a parsed log event."""
timestamp: datetime
raw_line: str
event_type: str
data: Dict = field(default_factory=dict)
class LogReader:
"""
Core service for reading and parsing EU chat.log.
Runs in background thread, notifies subscribers of events.
"""
# Log file patterns
LOG_PATHS = [
Path.home() / "Documents" / "Entropia Universe" / "chat.log",
Path.home() / "Documents" / "Entropia Universe" / "Logs" / "chat.log",
Path.home() / "Entropia Universe" / "chat.log",
]
# Event patterns for parsing
PATTERNS = {
'skill_gain': re.compile(
r'(.+?)\s+has\s+improved\s+by\s+(\d+\.?\d*)\s+points?',
re.IGNORECASE
),
'loot': re.compile(
r'You\s+received\s+(.+?)\s+x\s*(\d+)',
re.IGNORECASE
),
'global': re.compile(
r'(\w+)\s+received\s+.+?\s+from\s+(\w+)\s+worth\s+(\d+)\s+PED',
re.IGNORECASE
),
'damage': re.compile(
r'You\s+(?:hit|inflicted)\s+(\d+)\s+damage',
re.IGNORECASE
),
'damage_taken': re.compile(
r'You\s+were\s+hit\s+for\s+(\d+)\s+damage',
re.IGNORECASE
),
'heal': re.compile(
r'You\s+(?:healed|restored)\s+(\d+)\s+(?:health|points)',
re.IGNORECASE
),
'mission_complete': re.compile(
r'Mission\s+completed:\s+(.+)',
re.IGNORECASE
),
'tier_increase': re.compile(
r'Your\s+(.+?)\s+has\s+reached\s+tier\s+(\d+)',
re.IGNORECASE
),
'enhancer_break': re.compile(
r'Your\s+(.+?)\s+broke',
re.IGNORECASE
),
}
def __init__(self, log_path: Path = None):
self.log_path = log_path or self._find_log_file()
self.running = False
self.thread = None
self.last_position = 0
# Subscribers: {event_type: [callbacks]}
self._subscribers: Dict[str, List[Callable]] = {}
self._any_subscribers: List[Callable] = []
# Cache recent lines
self._recent_lines: List[str] = []
self._max_cache = 1000
# Stats
self.stats = {
'lines_read': 0,
'events_parsed': 0,
'start_time': None
}
def _find_log_file(self) -> Optional[Path]:
"""Find EU chat.log file."""
for path in self.LOG_PATHS:
if path.exists():
return path
return None
def start(self) -> bool:
"""Start log monitoring in background thread."""
if not self.log_path or not self.log_path.exists():
print(f"[LogReader] Log file not found. Tried: {self.LOG_PATHS}")
return False
self.running = True
self.stats['start_time'] = datetime.now()
# Start at end of file (don't process old lines)
self.last_position = self.log_path.stat().st_size
self.thread = threading.Thread(target=self._watch_loop, daemon=True)
self.thread.start()
print(f"[LogReader] Started watching: {self.log_path}")
return True
def stop(self):
"""Stop log monitoring."""
self.running = False
if self.thread:
self.thread.join(timeout=1.0)
print("[LogReader] Stopped")
def _watch_loop(self):
"""Main watching loop."""
while self.running:
try:
self._check_for_new_lines()
except Exception as e:
print(f"[LogReader] Error: {e}")
time.sleep(0.5) # 500ms poll interval
def _check_for_new_lines(self):
"""Check for and process new log lines."""
current_size = self.log_path.stat().st_size
if current_size < self.last_position:
# Log was rotated/truncated
self.last_position = 0
if current_size == self.last_position:
return
with open(self.log_path, 'r', encoding='utf-8', errors='ignore') as f:
f.seek(self.last_position)
new_lines = f.readlines()
self.last_position = f.tell()
for line in new_lines:
line = line.strip()
if line:
self._process_line(line)
def _process_line(self, line: str):
"""Process a single log line."""
self.stats['lines_read'] += 1
# Add to cache
self._recent_lines.append(line)
if len(self._recent_lines) > self._max_cache:
self._recent_lines.pop(0)
# Try to parse as event
event = self._parse_event(line)
if event:
self.stats['events_parsed'] += 1
self._notify_subscribers(event)
def _parse_event(self, line: str) -> Optional[LogEvent]:
"""Parse a log line into a LogEvent."""
for event_type, pattern in self.PATTERNS.items():
match = pattern.search(line)
if match:
return LogEvent(
timestamp=datetime.now(),
raw_line=line,
event_type=event_type,
data={'groups': match.groups()}
)
return None
def _notify_subscribers(self, event: LogEvent):
"""Notify all subscribers of an event."""
# Type-specific subscribers
callbacks = self._subscribers.get(event.event_type, [])
for callback in callbacks:
try:
callback(event)
except Exception as e:
print(f"[LogReader] Subscriber error: {e}")
# "Any" subscribers
for callback in self._any_subscribers:
try:
callback(event)
except Exception as e:
print(f"[LogReader] Subscriber error: {e}")
# ========== Public API ==========
def subscribe(self, event_type: str, callback: Callable):
"""Subscribe to specific event type."""
if event_type not in self._subscribers:
self._subscribers[event_type] = []
self._subscribers[event_type].append(callback)
def subscribe_all(self, callback: Callable):
"""Subscribe to all events."""
self._any_subscribers.append(callback)
def unsubscribe(self, event_type: str, callback: Callable):
"""Unsubscribe from events."""
if event_type in self._subscribers:
self._subscribers[event_type] = [
cb for cb in self._subscribers[event_type] if cb != callback
]
def read_lines(self, count: int = 50, filter_text: str = None) -> List[str]:
"""Read recent lines (API method)."""
lines = self._recent_lines[-count:] if count < len(self._recent_lines) else self._recent_lines
if filter_text:
lines = [l for l in lines if filter_text.lower() in l.lower()]
return lines
def get_stats(self) -> Dict:
"""Get reader statistics."""
return self.stats.copy()
def is_available(self) -> bool:
"""Check if log file is available."""
return self.log_path is not None and self.log_path.exists()
# Singleton instance
_log_reader = None
def get_log_reader() -> LogReader:
"""Get global LogReader instance."""
global _log_reader
if _log_reader is None:
_log_reader = LogReader()
return _log_reader