# logger_daily_multi.py - Daily JSON logging system with dynamic paths
# Compatible with Python 3.9+

import json
import os
from datetime import datetime
from pathlib import Path
from typing import Dict, Any, Optional

# Base directory for data storage (can be overridden)
DATA_DIR = "/var/www/html/btcusdt"


def set_data_dir(new_dir: str):
    """Set the data directory dynamically"""
    global DATA_DIR
    DATA_DIR = new_dir


def get_daily_filename(symbol: Optional[str] = None):
    """Generate filename for today's data"""
    today = datetime.utcnow().strftime("%Y-%m-%d")
    if symbol:
        return f"{symbol.lower()}_{today}.json"
    else:
        return f"btcusdt_{today}.json"


def get_daily_filepath(symbol: Optional[str] = None):
    """Get full path for today's data file"""
    filename = get_daily_filename(symbol)
    return os.path.join(DATA_DIR, filename)


def ensure_data_directory(directory: Optional[str] = None):
    """Create data directory if it doesn't exist"""
    dir_path = directory or DATA_DIR
    Path(dir_path).mkdir(parents=True, exist_ok=True)
    
    # Set permissions (readable by web server)
    try:
        os.chmod(dir_path, 0o755)
    except Exception as e:
        print(f"⚠️  Could not set directory permissions: {e}")


def append_daily_log(record: Dict[Any, Any], symbol: Optional[str] = None, directory: Optional[str] = None):
    """
    Append a data record to today's JSON file
    
    IMPORTANT: This function APPENDS to existing file, never overwrites!
    
    Args:
        record (dict): Data record to append
        symbol (str): Optional symbol name for filename
        directory (str): Optional directory override
    """
    
    # Use provided directory or global DATA_DIR
    dir_path = directory or DATA_DIR
    
    # Ensure directory exists
    ensure_data_directory(dir_path)
    
    # Build filepath
    today = datetime.utcnow().strftime("%Y-%m-%d")
    if symbol:
        filename = f"{symbol.lower()}_{today}.json"
    else:
        filename = f"btcusdt_{today}.json"
    
    filepath = os.path.join(dir_path, filename)
    
    # ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
    # APPEND MODE - NEVER OVERWRITE
    # ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
    
    try:
        # Check if file exists
        if os.path.exists(filepath):
            # File exists - READ and APPEND
            try:
                with open(filepath, "r", encoding="utf-8") as f:
                    data = json.load(f)
                
                # Ensure data is a list
                if not isinstance(data, list):
                    print(f"⚠️  Warning: Existing file is not a list, converting...")
                    data = [data]
                
            except json.JSONDecodeError as e:
                print(f"❌ Error reading existing file: {e}")
                print(f"⚠️  Creating backup and starting fresh...")
                
                # Backup corrupted file
                backup_path = filepath + f".backup.{int(datetime.utcnow().timestamp())}"
                try:
                    os.rename(filepath, backup_path)
                    print(f"💾 Backed up corrupted file to: {backup_path}")
                except Exception as be:
                    print(f"❌ Could not backup: {be}")
                
                # Start fresh
                data = []
        else:
            # File doesn't exist - create new
            data = []
            print(f"✅ Creating new daily log: {filepath}")
        
        # Append new record
        data.append(record)
        
        # Write back to file (ATOMIC operation)
        temp_filepath = filepath + ".tmp"
        
        with open(temp_filepath, "w", encoding="utf-8") as f:
            json.dump(data, f, indent=2, ensure_ascii=False)
        
        # Atomic rename
        os.replace(temp_filepath, filepath)
        
        # Set file permissions
        try:
            os.chmod(filepath, 0o644)
        except Exception:
            pass
        
        # Print status every 100 records
        record_count = len(data)
        if record_count % 100 == 0:
            file_size = os.path.getsize(filepath) / (1024 * 1024)  # MB
            print(f"📁 {filename}: {record_count} records | {file_size:.2f} MB")
        
    except Exception as e:
        print(f"❌ Error appending to daily log: {e}")
        
        # Try to save as emergency backup
        try:
            emergency_filepath = filepath + f".emergency.{int(datetime.utcnow().timestamp())}.json"
            with open(emergency_filepath, "w", encoding="utf-8") as f:
                json.dump([record], f, indent=2, ensure_ascii=False)
            print(f"💾 Saved to emergency backup: {emergency_filepath}")
        except Exception as e2:
            print(f"❌ Emergency backup also failed: {e2}")


def get_log_stats(symbol: Optional[str] = None, directory: Optional[str] = None):
    """Get statistics about today's log file"""
    
    dir_path = directory or DATA_DIR
    today = datetime.utcnow().strftime("%Y-%m-%d")
    
    if symbol:
        filename = f"{symbol.lower()}_{today}.json"
    else:
        filename = f"btcusdt_{today}.json"
    
    filepath = os.path.join(dir_path, filename)
    
    if not os.path.exists(filepath):
        return {
            "exists": False,
            "records": 0,
            "size_mb": 0,
            "filename": filename,
        }
    
    try:
        with open(filepath, "r", encoding="utf-8") as f:
            data = json.load(f)
        
        record_count = len(data) if isinstance(data, list) else 1
        file_size = os.path.getsize(filepath) / (1024 * 1024)  # MB
        
        return {
            "exists": True,
            "records": record_count,
            "size_mb": round(file_size, 2),
            "filename": filename,
            "filepath": filepath,
        }
    except Exception as e:
        return {
            "exists": True,
            "error": str(e),
            "filename": filename,
        }


def list_all_logs(directory: Optional[str] = None):
    """List all available log files"""
    
    dir_path = directory or DATA_DIR
    ensure_data_directory(dir_path)
    
    logs = []
    
    try:
        if not os.path.exists(dir_path):
            return logs
            
        files = os.listdir(dir_path)
        for filename in sorted(files, reverse=True):
            if filename.endswith(".json") and not filename.endswith(".tmp.json"):
                filepath = os.path.join(dir_path, filename)
                file_size = os.path.getsize(filepath) / (1024 * 1024)  # MB
                
                # Try to get record count
                try:
                    with open(filepath, "r", encoding="utf-8") as f:
                        data = json.load(f)
                    record_count = len(data) if isinstance(data, list) else 1
                except:
                    record_count = "unknown"
                
                logs.append({
                    "filename": filename,
                    "size_mb": round(file_size, 2),
                    "records": record_count,
                })
    except Exception as e:
        print(f"❌ Error listing logs: {e}")
    
    return logs


def cleanup_old_logs(keep_days: int = 30, directory: Optional[str] = None):
    """
    Delete log files older than keep_days
    
    Args:
        keep_days (int): Number of days to keep (default: 30)
        directory (str): Optional directory override
    """
    import time
    
    dir_path = directory or DATA_DIR
    ensure_data_directory(dir_path)
    
    if not os.path.exists(dir_path):
        return []
    
    current_time = time.time()
    cutoff_time = current_time - (keep_days * 24 * 60 * 60)
    
    deleted = []
    
    try:
        files = os.listdir(dir_path)
        for filename in files:
            if filename.endswith(".json"):
                filepath = os.path.join(dir_path, filename)
                file_mtime = os.path.getmtime(filepath)
                
                if file_mtime < cutoff_time:
                    os.remove(filepath)
                    deleted.append(filename)
                    print(f"🗑️  Deleted old log: {filename}")
    except Exception as e:
        print(f"❌ Error cleaning up logs: {e}")
    
    return deleted


def print_daily_summary(symbol: Optional[str] = None, directory: Optional[str] = None):
    """Print summary of today's data collection"""
    
    stats = get_log_stats(symbol, directory)
    
    if stats["exists"]:
        print("\n" + "=" * 60)
        print(f"📊 Today's Data Collection Summary")
        if symbol:
            print(f"Symbol: {symbol}")
        print("=" * 60)
        print(f"File: {stats['filename']}")
        print(f"Records: {stats['records']:,}")
        print(f"Size: {stats['size_mb']} MB")
        
        if stats['records'] > 0:
            # Calculate expected records (1 per 2 seconds)
            import time
            now = datetime.utcnow()
            start_of_day = now.replace(hour=0, minute=0, second=0, microsecond=0)
            seconds_elapsed = (now - start_of_day).total_seconds()
            expected_records = int(seconds_elapsed / 2)
            
            completion = (stats['records'] / expected_records * 100) if expected_records > 0 else 0
            
            print(f"Expected: ~{expected_records:,} records")
            print(f"Completion: {completion:.1f}%")
        
        print("=" * 60 + "\n")
    else:
        symbol_name = symbol or "BTCUSDT"
        print(f"\n📋 No data collected today yet: {stats['filename']}\n")
