291 lines
9.1 KiB
Python
291 lines
9.1 KiB
Python
"""
|
|
Lemontropia Suite - Icon Cache Parser
|
|
Parse Entropia Universe iconcache.dat for item metadata.
|
|
|
|
The iconcache.dat file contains mappings between icon IDs and item information.
|
|
"""
|
|
|
|
import struct
|
|
import logging
|
|
from pathlib import Path
|
|
from typing import Dict, Optional, List, Tuple
|
|
from dataclasses import dataclass
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@dataclass
|
|
class IconCacheEntry:
|
|
"""Represents an entry in the icon cache."""
|
|
icon_id: str # e.g., "i0000328"
|
|
item_name: str
|
|
item_type: str = "" # weapon, armor, etc.
|
|
extra_data: bytes = b""
|
|
|
|
|
|
class IconCacheParser:
|
|
"""
|
|
Parser for Entropia Universe iconcache.dat files.
|
|
|
|
The iconcache.dat is a binary file that maps icon IDs to item names.
|
|
Format appears to be proprietary/binary.
|
|
|
|
Usage:
|
|
parser = IconCacheParser()
|
|
entries = parser.parse_cache_file("path/to/iconcache.dat")
|
|
|
|
# Look up an item name by icon ID
|
|
name = parser.get_item_name("i0000328")
|
|
"""
|
|
|
|
# Common cache file locations
|
|
DEFAULT_CACHE_PATHS = [
|
|
Path("C:") / "ProgramData" / "Entropia Universe" / "public_users_data" / "cache" / "icon",
|
|
Path.home() / "Documents" / "Entropia Universe" / "cache" / "icons",
|
|
]
|
|
|
|
def __init__(self, cache_path: Optional[Path] = None):
|
|
"""
|
|
Initialize parser.
|
|
|
|
Args:
|
|
cache_path: Path to cache directory (auto-find if None)
|
|
"""
|
|
self.cache_path = cache_path
|
|
self._entries: Dict[str, IconCacheEntry] = {}
|
|
self._parsed = False
|
|
|
|
def find_cache_file(self) -> Optional[Path]:
|
|
"""Find the iconcache.dat file."""
|
|
search_paths = []
|
|
|
|
if self.cache_path:
|
|
search_paths.append(self.cache_path)
|
|
else:
|
|
search_paths = self.DEFAULT_CACHE_PATHS
|
|
|
|
for path in search_paths:
|
|
if not path.exists():
|
|
continue
|
|
|
|
# Look for iconcache.dat in this folder or subfolders
|
|
for dat_file in path.rglob("iconcache.dat"):
|
|
logger.info(f"Found iconcache.dat: {dat_file}")
|
|
return dat_file
|
|
|
|
logger.warning("iconcache.dat not found")
|
|
return None
|
|
|
|
def parse_cache_file(self, filepath: Optional[Path] = None) -> Dict[str, IconCacheEntry]:
|
|
"""
|
|
Parse the iconcache.dat file.
|
|
|
|
Args:
|
|
filepath: Path to iconcache.dat (auto-find if None)
|
|
|
|
Returns:
|
|
Dictionary mapping icon_id to IconCacheEntry
|
|
"""
|
|
if filepath is None:
|
|
filepath = self.find_cache_file()
|
|
|
|
if not filepath or not filepath.exists():
|
|
logger.error("iconcache.dat not found")
|
|
return {}
|
|
|
|
entries = {}
|
|
|
|
try:
|
|
with open(filepath, 'rb') as f:
|
|
data = f.read()
|
|
|
|
logger.info(f"Parsing iconcache.dat ({len(data)} bytes)")
|
|
|
|
# Try to identify format
|
|
# Common binary patterns to look for:
|
|
# - Magic headers
|
|
# - String tables
|
|
# - Index structures
|
|
|
|
# Try simple string extraction first (many .dat files have embedded strings)
|
|
entries = self._extract_strings(data)
|
|
|
|
if not entries:
|
|
# Try more sophisticated parsing
|
|
entries = self._parse_binary_structure(data)
|
|
|
|
self._entries = entries
|
|
self._parsed = True
|
|
|
|
logger.info(f"Parsed {len(entries)} entries from iconcache.dat")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to parse iconcache.dat: {e}")
|
|
|
|
return entries
|
|
|
|
def _extract_strings(self, data: bytes) -> Dict[str, IconCacheEntry]:
|
|
"""
|
|
Extract readable strings from binary data.
|
|
|
|
This is a heuristic approach that looks for:
|
|
- Icon IDs (pattern: i0000000)
|
|
- Item names (readable ASCII/UTF-8 strings)
|
|
"""
|
|
entries = {}
|
|
|
|
# Look for icon ID patterns (i followed by digits)
|
|
import re
|
|
|
|
# Find all icon IDs
|
|
icon_pattern = rb'i\d{7}' # i0000000 format
|
|
icon_ids = re.findall(icon_pattern, data)
|
|
|
|
logger.debug(f"Found {len(icon_ids)} potential icon IDs")
|
|
|
|
# Look for strings that might be item names
|
|
# Item names are typically printable ASCII
|
|
string_pattern = rb'[A-Za-z][A-Za-z0-9_\s\-\(\)]{3,50}(?=[\x00\x01\x02])'
|
|
strings = re.findall(string_pattern, data)
|
|
|
|
logger.debug(f"Found {len(strings)} potential strings")
|
|
|
|
# Try to match icon IDs with nearby strings
|
|
for i, icon_id_bytes in enumerate(icon_ids[:50]): # Limit to first 50 for testing
|
|
icon_id = icon_id_bytes.decode('ascii', errors='ignore')
|
|
|
|
# Find position in data
|
|
pos = data.find(icon_id_bytes)
|
|
if pos == -1:
|
|
continue
|
|
|
|
# Look for nearby strings (within 200 bytes)
|
|
search_start = max(0, pos - 100)
|
|
search_end = min(len(data), pos + 200)
|
|
nearby_data = data[search_start:search_end]
|
|
|
|
# Find strings in this region
|
|
nearby_strings = re.findall(string_pattern, nearby_data)
|
|
|
|
if nearby_strings:
|
|
# Use the longest string as the item name (heuristic)
|
|
item_name = max(nearby_strings, key=len).decode('latin-1', errors='ignore')
|
|
entries[icon_id] = IconCacheEntry(
|
|
icon_id=icon_id,
|
|
item_name=item_name.strip()
|
|
)
|
|
|
|
return entries
|
|
|
|
def _parse_binary_structure(self, data: bytes) -> Dict[str, IconCacheEntry]:
|
|
"""
|
|
Try to parse the binary structure of the file.
|
|
|
|
This attempts to identify record structures based on:
|
|
- Fixed-size records
|
|
- Offset tables
|
|
- Common binary patterns
|
|
"""
|
|
entries = {}
|
|
|
|
if len(data) < 100:
|
|
return entries
|
|
|
|
# Try to detect record size by looking for repeating patterns
|
|
# Many binary cache files have fixed-size records
|
|
|
|
# Check header
|
|
magic = data[:4]
|
|
logger.debug(f"File header (first 4 bytes): {magic.hex()}")
|
|
|
|
# Try reading as simple table
|
|
# Some formats: [count][record_size][records...]
|
|
try:
|
|
count = struct.unpack('<I', data[:4])[0] # Little-endian uint32
|
|
if 0 < count < 100000: # Reasonable count
|
|
logger.debug(f"Potential record count: {count}")
|
|
|
|
# Try to find record size
|
|
record_size = (len(data) - 8) // count if count > 0 else 0
|
|
logger.debug(f"Estimated record size: {record_size}")
|
|
except:
|
|
pass
|
|
|
|
return entries
|
|
|
|
def get_item_name(self, icon_id: str) -> Optional[str]:
|
|
"""Get item name by icon ID."""
|
|
if not self._parsed:
|
|
self.parse_cache_file()
|
|
|
|
entry = self._entries.get(icon_id)
|
|
return entry.item_name if entry else None
|
|
|
|
def get_all_entries(self) -> Dict[str, IconCacheEntry]:
|
|
"""Get all parsed entries."""
|
|
if not self._parsed:
|
|
self.parse_cache_file()
|
|
return self._entries.copy()
|
|
|
|
def print_summary(self):
|
|
"""Print a summary of the parsed cache."""
|
|
if not self._parsed:
|
|
self.parse_cache_file()
|
|
|
|
print("=" * 60)
|
|
print("Icon Cache Summary")
|
|
print("=" * 60)
|
|
print(f"Total entries: {len(self._entries)}")
|
|
print()
|
|
|
|
# Show sample entries
|
|
if self._entries:
|
|
print("Sample entries:")
|
|
for i, (icon_id, entry) in enumerate(list(self._entries.items())[:10]):
|
|
print(f" {icon_id}: {entry.item_name}")
|
|
else:
|
|
print("No entries parsed (format may be unknown)")
|
|
|
|
print("=" * 60)
|
|
|
|
|
|
# Convenience function
|
|
def parse_icon_cache(cache_path: Optional[Path] = None) -> Dict[str, str]:
|
|
"""
|
|
Quick function to parse icon cache and return icon_id -> name mapping.
|
|
|
|
Returns:
|
|
Dictionary mapping icon_id to item_name
|
|
"""
|
|
parser = IconCacheParser(cache_path)
|
|
entries = parser.parse_cache_file()
|
|
return {k: v.item_name for k, v in entries.items()}
|
|
|
|
|
|
def main():
|
|
"""CLI to analyze iconcache.dat."""
|
|
import sys
|
|
|
|
print("🔍 Entropia Universe Icon Cache Parser")
|
|
print("=" * 60)
|
|
|
|
parser = IconCacheParser()
|
|
|
|
# Try to find cache file
|
|
cache_file = parser.find_cache_file()
|
|
|
|
if cache_file:
|
|
print(f"Found: {cache_file}")
|
|
print()
|
|
parser.parse_cache_file(cache_file)
|
|
parser.print_summary()
|
|
else:
|
|
print("❌ iconcache.dat not found")
|
|
print()
|
|
print("Searched in:")
|
|
for path in parser.DEFAULT_CACHE_PATHS:
|
|
print(f" - {path}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main() |