#!/usr/bin/env python3
"""
🔄 Service de Mise à Jour des Données Historiques
==================================================
Tourne en arrière-plan et met à jour régulièrement
la base de données des klines pour les 62 cryptos.

Ce service:
- Met à jour les données toutes les heures (configurable)
- Ajoute uniquement les nouvelles bougies (incrémental)
- Fonctionne silencieusement en arrière-plan
- Écrit les logs dans un fichier

Usage:
    python historical_data_updater.py                    # Mode foreground
    python historical_data_updater.py --interval 30     # Toutes les 30 min
    python historical_data_updater.py --daemon          # Mode silencieux
"""

import json
import time
import requests
import argparse
import logging
import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Any, Optional
import threading

# ═══════════════════════════════════════════════════════════════════════════════
# CONFIGURATION
# ═══════════════════════════════════════════════════════════════════════════════

BASE_URL = "https://api.binance.com"
OUTPUT_DIR = Path(__file__).parent / "historical_data"
WATCHLIST_FILE = Path(__file__).parent / "watchlist.json"
LOG_FILE = Path(__file__).parent / "historical_data" / "updater.log"
STATUS_FILE = Path(__file__).parent / "historical_data" / "updater_status.json"

# Intervalles à maintenir à jour
INTERVALS_TO_UPDATE = ["1h", "4h", "1d"]

# Intervalle de mise à jour par défaut (en minutes)
DEFAULT_UPDATE_INTERVAL = 60

# ═══════════════════════════════════════════════════════════════════════════════
# LOGGING
# ═══════════════════════════════════════════════════════════════════════════════

def setup_logging(daemon_mode: bool = False):
    """Configure le logging"""
    OUTPUT_DIR.mkdir(exist_ok=True)
    
    handlers = [logging.FileHandler(LOG_FILE, encoding='utf-8')]
    
    if not daemon_mode:
        handlers.append(logging.StreamHandler(sys.stdout))
    
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s | %(levelname)s | %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S',
        handlers=handlers
    )

# ═══════════════════════════════════════════════════════════════════════════════
# FONCTIONS UTILITAIRES
# ═══════════════════════════════════════════════════════════════════════════════

def load_watchlist() -> List[str]:
    """Charge la liste des cryptos"""
    if WATCHLIST_FILE.exists():
        with open(WATCHLIST_FILE, "r") as f:
            data = json.load(f)
            return data.get("symbols", [])
    return []

def load_existing_data(symbol: str) -> Optional[Dict]:
    """Charge les données existantes d'un symbole"""
    filepath = OUTPUT_DIR / f"{symbol}_historical.json"
    if filepath.exists():
        try:
            with open(filepath, "r", encoding="utf-8") as f:
                return json.load(f)
        except:
            return None
    return None

def save_data(data: Dict, symbol: str):
    """Sauvegarde les données d'un symbole"""
    OUTPUT_DIR.mkdir(exist_ok=True)
    filepath = OUTPUT_DIR / f"{symbol}_historical.json"
    with open(filepath, "w", encoding="utf-8") as f:
        json.dump(data, f, indent=2, ensure_ascii=False)

def get_klines(symbol: str, interval: str, start_time: int, end_time: int) -> List[List]:
    """Récupère les klines depuis Binance"""
    url = f"{BASE_URL}/api/v3/klines"
    all_klines = []
    current_start = start_time
    
    while current_start < end_time:
        params = {
            "symbol": symbol,
            "interval": interval,
            "startTime": current_start,
            "endTime": end_time,
            "limit": 1000
        }
        
        try:
            response = requests.get(url, params=params, timeout=30)
            response.raise_for_status()
            klines = response.json()
            
            if not klines:
                break
                
            all_klines.extend(klines)
            current_start = klines[-1][6] + 1
            time.sleep(0.05)  # Rate limiting
            
        except requests.exceptions.RequestException as e:
            logging.error(f"Erreur API pour {symbol}/{interval}: {e}")
            break
    
    return all_klines

def parse_kline(kline: List) -> Dict[str, Any]:
    """Parse une kline brute"""
    return {
        "timestamp": kline[0],
        "datetime": datetime.fromtimestamp(kline[0] / 1000).isoformat(),
        "open": float(kline[1]),
        "high": float(kline[2]),
        "low": float(kline[3]),
        "close": float(kline[4]),
        "volume": float(kline[5]),
        "close_time": kline[6],
        "quote_volume": float(kline[7]),
        "trades": int(kline[8]),
        "taker_buy_base": float(kline[9]),
        "taker_buy_quote": float(kline[10])
    }

def update_symbol(symbol: str) -> Dict[str, int]:
    """
    Met à jour les données d'un symbole (incrémental)
    
    Returns:
        Dictionnaire avec le nombre de nouvelles klines par intervalle
    """
    existing = load_existing_data(symbol)
    new_klines_count = {}
    
    if existing is None:
        # Pas de données existantes, on skip (utiliser fetch_historical_data.py d'abord)
        logging.warning(f"{symbol}: Pas de données existantes, skipped")
        return {}
    
    end_time = int(datetime.now().timestamp() * 1000)
    
    for interval in INTERVALS_TO_UPDATE:
        if interval not in existing.get("intervals", {}):
            continue
        
        interval_data = existing["intervals"][interval]
        klines = interval_data.get("klines", [])
        
        if not klines:
            continue
        
        # Trouver le dernier timestamp
        last_timestamp = klines[-1]["close_time"]
        start_time = last_timestamp + 1
        
        # Récupérer les nouvelles klines
        new_klines_raw = get_klines(symbol, interval, start_time, end_time)
        
        if new_klines_raw:
            new_klines = [parse_kline(k) for k in new_klines_raw]
            klines.extend(new_klines)
            
            # Mettre à jour les métadonnées
            interval_data["klines"] = klines
            interval_data["count"] = len(klines)
            interval_data["end_date"] = klines[-1]["datetime"]
            
            new_klines_count[interval] = len(new_klines)
    
    # Mettre à jour le timestamp
    existing["fetched_at"] = datetime.now().isoformat()
    existing["last_update"] = datetime.now().isoformat()
    
    # Sauvegarder
    save_data(existing, symbol)
    
    return new_klines_count

def update_status(status: Dict):
    """Met à jour le fichier de statut"""
    OUTPUT_DIR.mkdir(exist_ok=True)
    with open(STATUS_FILE, "w", encoding="utf-8") as f:
        json.dump(status, f, indent=2)

def run_update_cycle():
    """Exécute un cycle de mise à jour complet"""
    symbols = load_watchlist()
    
    if not symbols:
        logging.warning("Aucun symbole dans la watchlist!")
        return
    
    start_time = time.time()
    total_new_klines = 0
    updated_symbols = 0
    
    logging.info(f"🔄 Début du cycle de mise à jour ({len(symbols)} symboles)")
    
    for symbol in symbols:
        try:
            new_counts = update_symbol(symbol)
            
            if new_counts:
                total_new = sum(new_counts.values())
                if total_new > 0:
                    total_new_klines += total_new
                    updated_symbols += 1
                    logging.debug(f"  {symbol}: +{total_new} klines")
            
            time.sleep(0.1)  # Rate limiting entre symboles
            
        except Exception as e:
            logging.error(f"Erreur mise à jour {symbol}: {e}")
    
    elapsed = time.time() - start_time
    
    # Mettre à jour le statut
    status = {
        "last_update": datetime.now().isoformat(),
        "symbols_checked": len(symbols),
        "symbols_updated": updated_symbols,
        "new_klines_added": total_new_klines,
        "duration_seconds": round(elapsed, 1),
        "next_update": (datetime.now() + timedelta(minutes=DEFAULT_UPDATE_INTERVAL)).isoformat()
    }
    update_status(status)
    
    logging.info(f"✅ Cycle terminé: {updated_symbols} symboles mis à jour, +{total_new_klines} klines ({elapsed:.1f}s)")

# ═══════════════════════════════════════════════════════════════════════════════
# SERVICE PRINCIPAL
# ═══════════════════════════════════════════════════════════════════════════════

class HistoricalDataUpdater:
    """Service de mise à jour en arrière-plan"""
    
    def __init__(self, interval_minutes: int = DEFAULT_UPDATE_INTERVAL, daemon: bool = False):
        self.interval = interval_minutes * 60  # Convertir en secondes
        self.daemon = daemon
        self.running = False
        self._stop_event = threading.Event()
    
    def start(self):
        """Démarre le service"""
        self.running = True
        
        logging.info("=" * 60)
        logging.info("🚀 Démarrage du service de mise à jour historique")
        logging.info(f"   Intervalle: {self.interval // 60} minutes")
        logging.info(f"   Mode: {'Daemon' if self.daemon else 'Foreground'}")
        logging.info("=" * 60)
        
        # Premier cycle immédiat
        run_update_cycle()
        
        # Boucle principale
        while self.running and not self._stop_event.is_set():
            # Attendre l'intervalle
            self._stop_event.wait(self.interval)
            
            if self.running and not self._stop_event.is_set():
                run_update_cycle()
    
    def stop(self):
        """Arrête le service"""
        logging.info("🛑 Arrêt du service demandé...")
        self.running = False
        self._stop_event.set()

def main():
    """Fonction principale"""
    parser = argparse.ArgumentParser(description="Service de mise à jour des données historiques")
    parser.add_argument("--interval", type=int, default=DEFAULT_UPDATE_INTERVAL,
                        help=f"Intervalle de mise à jour en minutes (défaut: {DEFAULT_UPDATE_INTERVAL})")
    parser.add_argument("--daemon", action="store_true",
                        help="Mode daemon (pas d'output console)")
    parser.add_argument("--once", action="store_true",
                        help="Exécuter une seule fois et quitter")
    
    args = parser.parse_args()
    
    setup_logging(daemon_mode=args.daemon)
    
    if args.once:
        # Mode one-shot
        run_update_cycle()
        return
    
    # Mode service
    updater = HistoricalDataUpdater(
        interval_minutes=args.interval,
        daemon=args.daemon
    )
    
    try:
        updater.start()
    except KeyboardInterrupt:
        updater.stop()
        logging.info("Service arrêté proprement")

if __name__ == "__main__":
    main()
