"""
Dashboard API Server v2.0
==========================
Version améliorée avec architecture modulaire, sécurité renforcée et async/await

Améliorations:
- Architecture modulaire (séparation des responsabilités)
- Validation des inputs avec Pydantic
- Rate limiting et authentification renforcée
- Opérations async pour meilleure performance
- Gestion d'erreurs robuste
- Code testable
"""

import http.server
import socketserver
import json
import os
import sys
import threading
import subprocess
import webbrowser
import asyncio
import logging
from datetime import datetime
from urllib.parse import urlparse
from typing import Optional

# Import de notre nouvelle architecture modulaire
from api.models import TradingConfig, OptimizationRequest
from api.security import TokenManager, RateLimiter, AuthMiddleware, SecurityValidator
from api.services import ConfigService, TradingService, BotService
from api.routes import RouteHandler
from api.utils import load_json_file, save_json_file

# Fix encodage console Windows
try:
    if sys.platform == 'win32':
        import io
        sys.stdout = io.TextIOWrapper(
            sys.stdout.buffer, encoding='utf-8',
            errors='replace', line_buffering=True
        )
        sys.stderr = io.TextIOWrapper(
            sys.stderr.buffer, encoding='utf-8',
            errors='replace', line_buffering=True
        )
except:
    pass

# Mode unbuffered
os.environ['PYTHONUNBUFFERED'] = '1'

# Configuration du logging avec fichier ET console
log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'dashboard_log.txt')
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s [%(levelname)s] %(name)s: %(message)s',
    datefmt='%H:%M:%S',
    handlers=[
        logging.StreamHandler(sys.stdout),
        logging.FileHandler(log_file, encoding='utf-8')
    ]
)
logger = logging.getLogger(__name__)

# Configuration
PORT = 8889
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
os.chdir(SCRIPT_DIR)

# ─── Python exécutable : TOUJOURS le venv, jamais sys.executable (qui peut
# résoudre vers le Python système et créer des processus doublons) ─────────────
if sys.platform == 'win32':
    VENV_PYTHON = os.path.join(SCRIPT_DIR, '.venv', 'Scripts', 'python.exe')
else:
    VENV_PYTHON = os.path.join(SCRIPT_DIR, '.venv', 'bin', 'python3')
if not os.path.exists(VENV_PYTHON):
    VENV_PYTHON = sys.executable  # fallback si venv absent (ne devrait pas arriver)

# Vérifier crypto_data_fetcher
try:
    from crypto_data_fetcher import get_fetcher, CryptoDataFetcher
    CRYPTO_FETCHER_AVAILABLE = True
except ImportError:
    CRYPTO_FETCHER_AVAILABLE = False
    logger.warning("crypto_data_fetcher not available")

# Importer le service de surveillance IA
try:
    from ai_predictor import get_ai_predictor, get_surveillance_service
    AI_PREDICTOR_AVAILABLE = True
    logger.info("✅ Module AI Predictor chargé")
except ImportError as e:
    AI_PREDICTOR_AVAILABLE = False
    logger.warning(f"AI Predictor non disponible: {e}")

# Importer le service IA temps réel (WebSocket)
try:
    from ai_realtime_service import get_realtime_service, start_realtime_service
    AI_REALTIME_AVAILABLE = True
    logger.info("✅ Module AI Realtime chargé")
except ImportError as e:
    AI_REALTIME_AVAILABLE = False
    logger.warning(f"AI Realtime non disponible: {e}")

# Initialiser les services
token_manager = TokenManager(SCRIPT_DIR)
rate_limiter = RateLimiter(max_requests=1000, window_seconds=60)  # 1000 req/min pour dashboard multi-endpoints
auth_middleware = AuthMiddleware(token_manager, rate_limiter)

config_service = ConfigService(SCRIPT_DIR)
trading_service = TradingService(SCRIPT_DIR)
bot_service = BotService(SCRIPT_DIR)

route_handler = RouteHandler(
    SCRIPT_DIR,
    config_service,
    trading_service,
    bot_service,
    auth_middleware
)

# Afficher infos au démarrage
API_TOKEN = token_manager.get_token()
logger.info(f"Dashboard API Server v2.0")
logger.info(f"Port: {PORT}")
logger.info(f"Directory: {SCRIPT_DIR}")
logger.info(f"URL: http://localhost:{PORT}/dashboard.html")
logger.info(f"API Token: {API_TOKEN[:8]}... (set DASHBOARD_API_TOKEN to customize)")
logger.info("Press Ctrl+C to stop")


class DashboardAPIHandler(http.server.SimpleHTTPRequestHandler):
    """Handler HTTP moderne avec architecture modulaire"""

    timeout = 30

    def __init__(self, *args, **kwargs):
        self.validator = SecurityValidator()
        super().__init__(*args, **kwargs)

    def version_string(self):
        """Masquer la version du serveur"""
        return 'Server'

    def end_headers(self):
        """Ajouter headers CORS et sécurité"""
        # CORS restreint aux origines autorisées
        origin = self.headers.get('Origin', '')
        allowed_origins = {
            'https://trading-pascal.duckdns.org',
            'http://localhost:8889',
            'http://127.0.0.1:8889',
        }
        if origin in allowed_origins:
            self.send_header('Access-Control-Allow-Origin', origin)
            self.send_header('Vary', 'Origin')
        self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
        self.send_header('Access-Control-Allow-Headers', 'Content-Type, Authorization')
        self.send_header('Cache-Control', 'no-store, no-cache, must-revalidate')
        self.send_header('X-Content-Type-Options', 'nosniff')
        self.send_header('X-Frame-Options', 'DENY')
        self.send_header('Referrer-Policy', 'strict-origin-when-cross-origin')
        # 🚀 PERF: keep-alive évite un TCP handshake par requête (était Connection: close)
        self.send_header('Connection', 'keep-alive')
        self.send_header('Keep-Alive', 'timeout=30, max=100')
        super().end_headers()

    def do_OPTIONS(self):
        """Gérer preflight CORS"""
        self.send_response(200)
        self.end_headers()

    def check_auth(self) -> bool:
        """Vérifier l'authentification"""
        client_ip = self.client_address[0]
        headers = {k: v for k, v in self.headers.items()}

        authorized, error = auth_middleware.check_auth(headers, client_ip)

        if not authorized:
            logger.warning(f"Unauthorized request from {client_ip}: {error}")

        return authorized

    def send_auth_required(self):
        """Envoyer une réponse 401 avec header WWW-Authenticate pour déclencher le popup navigateur"""
        body = b'Authentication required'
        self.send_response(401)
        self.send_header('WWW-Authenticate', 'Basic realm="Trading Bot Dashboard", charset="UTF-8"')
        self.send_header('Content-Type', 'text/plain; charset=utf-8')
        self.send_header('Content-Length', len(body))
        self.end_headers()
        self.wfile.write(body)

    def check_rate_limit(self) -> tuple:
        """Vérifier le rate limit"""
        client_ip = self.client_address[0]
        return auth_middleware.check_rate_limit(client_ip)

    def do_GET(self):
        """Gérer les requêtes GET"""
        try:
            # Rate limiting
            allowed, error, retry_after = self.check_rate_limit()
            if not allowed:
                self.send_json_response({'error': error}, 429, retry_after)
                return

            path = urlparse(self.path).path
            query = urlparse(self.path).query

            # Authentification obligatoire sur toutes les routes sauf /api/health
            if path not in AuthMiddleware.PUBLIC_PATHS:
                if not self.check_auth():
                    self.send_auth_required()
                    return

            # Parser les query params
            from urllib.parse import parse_qs
            query_params = parse_qs(query) if query else {}

            # Router
            if path == '/api/health':
                route_handler.handle_health(self)
            elif path == '/api/get-config':
                route_handler.handle_get_config(self)
            elif path == '/api/get-ia-criteria':
                route_handler.handle_get_ia_criteria(self)
            elif path == '/api/rotation-status':
                route_handler.handle_get_rotation_status(self)
            elif path == '/api/cycle-status':
                route_handler.handle_cycle_status(self)
            elif path == '/api/get-profiles':
                route_handler.handle_get_profiles(self)
            elif path == '/api/bot-analysis':
                route_handler.handle_bot_analysis(self)
            elif path == '/api/crypto-data':
                route_handler.handle_crypto_data(self)
            elif path == '/api/crypto-summary':
                route_handler.handle_crypto_summary(self)
            elif path == '/api/crypto-refresh':
                route_handler.handle_crypto_refresh(self)
            elif path == '/api/opportunities':
                route_handler.handle_opportunities(self)
            elif path == '/api/ai-surveillance':
                route_handler.handle_ai_surveillance(self)
            elif path == '/api/ai-watchlist':
                route_handler.handle_ai_watchlist(self, query_params)
            elif path == '/api/ai-training-info':
                route_handler.handle_ai_training_info(self)
            elif path == '/api/auto-updater-status':
                route_handler.handle_auto_updater_status(self)
            elif path == '/api/market-regime':
                route_handler.handle_market_regime(self)
            elif path == '/api/trade-analysis':
                route_handler.handle_trade_analysis(self, query_params)
            elif path == '/api/get-sltp-config':
                route_handler.handle_get_sltp_config(self)
            elif path == '/api/analysis-logs':
                route_handler.handle_analysis_logs(self, query_params)
            elif path == '/api/positions-live':
                route_handler.handle_positions_live(self, query_params)
            elif path == '/api/ai-self-optimizer':
                route_handler.handle_ai_self_optimizer(self, query_params)
            elif path == '/api/technical-analysis':
                route_handler.handle_technical_analysis(self, query_params)
            elif path == '/api/market-analysis':
                route_handler.handle_market_analysis(self, query_params)
            elif path == '/api/watchdog-status':
                route_handler.handle_watchdog_status(self)
            elif path.startswith('/api/bot-logs'):
                route_handler.handle_bot_logs(self)
            elif path == '/api/monitor-status':
                route_handler.handle_monitor_status(self)
            elif path == '/api/maintenance/health':
                route_handler.handle_maintenance_health(self)
            elif path == '/api/maintenance/spy-compare':
                route_handler.handle_spy_compare(self)
            elif path == '/api/maintenance/spy-performance':
                route_handler.handle_spy_performance(self)
            elif path == '/api/maintenance/binance-sync':
                route_handler.handle_binance_sync(self)
            elif path == '/api/binance-balance':
                route_handler.handle_binance_balance(self)
            elif path == '/api/maintenance/binance-account':
                route_handler.handle_binance_account_detail(self)
            elif path == '/api/patterns-performance':
                route_handler.handle_patterns_performance(self, query_params)
            elif path == '/api/spy-data':
                route_handler.handle_spy_data(self)
            elif path == '/api/spy-status':
                route_handler.handle_spy_status(self)
            elif path == '/api/spy-prod-data':
                route_handler.handle_spy_prod_data(self)
            elif path == '/api/spy-prod-status':
                route_handler.handle_spy_prod_status(self)
            elif path == '/api/spy-pipeline-status':
                route_handler.handle_spy_pipeline_status(self)
            elif path == '/api/ia-status':
                route_handler.handle_ia_status(self)
            elif path == '/api/bot-disabled-status':
                route_handler.handle_bot_disabled_status(self)
            elif path == '/api/mobile-summary':
                route_handler.handle_mobile_summary(self)
            elif path == '/api/gestion-projections':
                route_handler.handle_gestion_projections(self)
            elif path == '/api/deep-spy-analysis':
                route_handler.handle_deep_spy_analysis(self)
            elif path == '/api/market-movers':
                route_handler.handle_market_movers(self)
            elif path == '/api/trend-risk-ratio':
                route_handler.handle_trend_risk_ratio(self)
            elif path == '/api/crypto-news':
                route_handler.handle_crypto_news(self)
            elif path == '/positions.json':
                self.serve_json_file('positions.json', {})
            elif path == '/trade_history.json':
                self.serve_json_file('trade_history.json', [])
            else:
                # Fichiers statiques
                super().do_GET()

        except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError):
            pass
        except Exception as e:
            logger.error(f"GET error: {e}")
            try:
                self.send_json_response({'error': 'Internal server error'}, 500)
            except:
                pass

    def do_POST(self):
        """Gérer les requêtes POST"""
        try:
            # Rate limiting
            allowed, error, retry_after = self.check_rate_limit()
            if not allowed:
                self.send_json_response({'error': error}, 429, retry_after)
                return

            path = urlparse(self.path).path

            # Authentification obligatoire sur tous les endpoints POST
            if not self.check_auth():
                self.send_auth_required()
                return

            # Lire et parser le body
            content_length = int(self.headers.get('Content-Length', 0))
            body = self.rfile.read(content_length).decode('utf-8') if content_length > 0 else '{}'

            try:
                data = json.loads(body) if body else {}
            except json.JSONDecodeError as e:
                self.send_json_response({
                    'success': False,
                    'error': f'Invalid JSON: {str(e)}'
                }, 400)
                return

            # Router
            if path == '/api/apply-config':
                route_handler.handle_apply_config(self, data)
            elif path == '/api/save-ia-criteria':
                route_handler.handle_save_ia_criteria(self, data)
            elif path == '/api/execute-rotation':
                route_handler.handle_execute_rotation(self, data)
            elif path == '/api/toggle-rotation':
                route_handler.handle_toggle_rotation(self, data)
            elif path == '/api/run-optimization':
                self.handle_run_optimization(data)  # Keep SSE logic here
            elif path == '/api/update-settings':
                route_handler.handle_update_settings(self, data)
            elif path == '/api/force-close':
                route_handler.handle_force_close(self, data)
            elif path == '/api/update-positions-sltp':
                route_handler.handle_update_positions_sltp(self, data)
            elif path == '/api/update-sltp-config':
                route_handler.handle_update_sltp_config(self, data)
            elif path == '/api/sell-all':
                route_handler.handle_sell_all(self, data)
            elif path == '/api/reset-dashboard':
                route_handler.handle_reset_dashboard(self, data)
            elif path == '/api/restart-bot':
                route_handler.handle_restart_bot(self, data)
            elif path == '/api/save-watchlist':
                route_handler.handle_save_watchlist(self, data)
            elif path == '/api/maintenance/explain-sync':
                route_handler.handle_explain_sync(self)
            elif path == '/api/maintenance/fix-sync':
                route_handler.handle_fix_sync(self)
            elif path == '/api/maintenance/binance-cleanup':
                route_handler.handle_binance_cleanup(self, data)
            elif path == '/api/spy-scan':
                route_handler.handle_spy_scan(self, data)
            elif path == '/api/spy-run-pipeline':
                route_handler.handle_spy_run_pipeline(self, data)
            elif path == '/api/spy-apply-optimizer':
                route_handler.handle_spy_apply_optimizer(self, data)
            elif path == '/api/bot-toggle':
                route_handler.handle_bot_toggle(self, data)
            else:
                self.send_json_response({'error': 'Unknown endpoint'}, 404)

        except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError):
            pass
        except Exception as e:
            logger.error(f"POST error: {e}")
            try:
                self.send_json_response({'error': 'Internal server error'}, 500)
            except:
                pass

    def send_json_response(self, data: dict, status: int = 200, retry_after: Optional[int] = None):
        """Envoyer une réponse JSON"""
        try:
            response = json.dumps(data, ensure_ascii=False).encode('utf-8')
            self.send_response(status)
            self.send_header('Content-Type', 'application/json; charset=utf-8')
            self.send_header('Content-Length', len(response))

            if retry_after:
                self.send_header('Retry-After', str(retry_after))

            self.end_headers()
            self.wfile.write(response)
        except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError):
            pass

    def serve_json_file(self, filename: str, default_content):
        """Servir un fichier JSON"""
        # Valider le filename
        if not self.validator.is_safe_filename(filename):
            self.send_json_response({'error': 'Invalid filename'}, 400)
            return

        filepath = os.path.join(SCRIPT_DIR, filename)

        try:
            if not os.path.exists(filepath):
                save_json_file(filepath, default_content)

            with open(filepath, 'r', encoding='utf-8') as f:
                content = f.read()

            self.send_response(200)
            self.send_header('Content-Type', 'application/json; charset=utf-8')
            self.send_header('Content-Length', len(content.encode('utf-8')))
            self.end_headers()
            self.wfile.write(content.encode('utf-8'))

        except (ConnectionAbortedError, ConnectionResetError, BrokenPipeError):
            pass
        except Exception as e:
            logger.warning(f"Error serving {filename}: {e}")
            try:
                self.send_json_response({'error': str(e)}, 500)
            except:
                pass

    def handle_run_optimization(self, data: dict):
        """
        Endpoint d'optimisation avec SSE (Server-Sent Events)
        Gardé ici car nécessite accès direct au wfile pour streaming
        """
        try:
            # Valider la requête
            req = OptimizationRequest(**data)

            logger.info(f"Optimization: mode={req.mode}, {len(req.symbols)} symbols")

            # Vérifier ai_optimizer.py
            optimizer_path = os.path.join(SCRIPT_DIR, 'ai_optimizer.py')
            if not os.path.exists(optimizer_path):
                self.send_json_response({
                    'success': False,
                    'error': 'ai_optimizer.py not found'
                }, 404)
                return

            # Préparer SSE
            self.send_response(200)
            self.send_header('Content-Type', 'text/event-stream')
            self.send_header('Cache-Control', 'no-cache')
            self.send_header('Connection', 'keep-alive')
            self.end_headers()

            # Mapper mode
            mode_map = {
                'quick': 'backtest',
                'grid': 'grid',
                'genetic': 'genetic',
                'full': 'full'
            }
            optimizer_mode = mode_map.get(req.mode, 'backtest')

            # Déterminer symboles
            if len(req.symbols) >= 50:
                symbols_arg = 'all'
            elif len(req.symbols) >= 20:
                symbols_arg = 'top25'
            else:
                symbols_arg = ','.join([s.replace('USDT', '') for s in req.symbols])

            # Commande (utilise --interval 1h pour utiliser historical_data/)
            cmd = [
                VENV_PYTHON, optimizer_path,
                '--mode', optimizer_mode,
                '--symbols', symbols_arg,
                '--candles', '2000',
                '--interval', '1h'  # Utilise les données historical_data/ (90 jours de données 1h)
            ]

            self._send_sse({'type': 'log', 'level': 'info', 'message': f'Mode: {req.mode.upper()}'})
            self._send_sse({'type': 'progress', 'progress': 5, 'message': 'Starting...'})

            # Lancer subprocess
            import queue
            result_queue = queue.Queue()

            def parse_optimizer_line(line: str) -> list:
                """Parse une ligne de sortie et retourne les messages SSE à envoyer"""
                messages = []
                line = line.strip()
                if not line or line.startswith('+') or line.startswith('|'):
                    return messages  # Ignorer les bordures ASCII
                
                # Extraire le contenu après le tag [XXX]
                def extract_content(l):
                    if ']' in l:
                        return l.split(']', 1)[-1].strip()
                    return l.strip()
                
                content = extract_content(line)
                
                # === DATABASE / HISTORICAL DATA ===
                if '[DB]' in line:
                    if 'historical_data' in line.lower() or 'Chargement' in line:
                        messages.append({'type': 'log', 'level': 'info', 'message': f"📂 {content}"})
                    elif 'klines' in line.lower() and 'depuis' in line.lower():
                        messages.append({'type': 'log', 'level': 'success', 'message': f"📊 {content}"})
                    elif 'cryptos' in line.lower() and ('chargées' in line.lower() or '/' in line):
                        messages.append({'type': 'log', 'level': 'success', 'message': f"✅ {content}"})
                        messages.append({'type': 'progress', 'progress': 25, 'message': 'Données chargées'})
                    elif 'Gap' in line or 'récupération' in line.lower():
                        messages.append({'type': 'log', 'level': 'info', 'message': f"🔄 {content}"})
                    elif 'ajoutées' in line.lower():
                        messages.append({'type': 'log', 'level': 'success', 'message': f"➕ {content}"})
                
                # === FETCH / TÉLÉCHARGEMENT ===
                elif '[FETCH]' in line:
                    messages.append({'type': 'log', 'level': 'info', 'message': f"📡 {content}"})
                    messages.append({'type': 'progress', 'progress': 15, 'message': 'Chargement données...'})
                
                # === CACHE ===
                elif '[CACHE]' in line:
                    if 'chargé' in line.lower() or 'valide' in line.lower():
                        messages.append({'type': 'log', 'level': 'success', 'message': f"💾 {content}"})
                    elif 'incomplet' in line.lower() or 'téléchargement' in line.lower():
                        messages.append({'type': 'log', 'level': 'warn', 'message': f"⚠️ {content}"})
                    else:
                        messages.append({'type': 'log', 'level': 'info', 'message': f"📦 {content}"})
                
                # === MODE / CONFIGURATION ===
                elif '[MODE]' in line:
                    messages.append({'type': 'log', 'level': 'info', 'message': f"⚙️ Mode: {content}"})
                
                elif '[INTERVAL]' in line:
                    messages.append({'type': 'log', 'level': 'info', 'message': f"⏱️ {content}"})
                
                elif '[SYMBOLS]' in line:
                    messages.append({'type': 'log', 'level': 'info', 'message': f"🪙 {content}"})
                
                elif '[DATA]' in line:
                    messages.append({'type': 'log', 'level': 'info', 'message': f"📊 {content}"})
                
                # === BOUGIES / DONNÉES CHARGÉES ===
                elif 'bougies' in line.lower() and ('chargees' in line.lower() or 'chargées' in line.lower()):
                    messages.append({'type': 'log', 'level': 'success', 'message': f"✅ {content}"})
                    messages.append({'type': 'progress', 'progress': 30, 'message': 'Données prêtes'})
                
                # === GPU ===
                elif '[GPU]' in line or ('GPU' in line and 'Pret' in line):
                    messages.append({'type': 'log', 'level': 'info', 'message': f"🚀 {content}"})
                    messages.append({'type': 'progress', 'progress': 35, 'message': 'GPU prêt'})
                
                # === GRID SEARCH ===
                elif '[GRID]' in line:
                    if 'exploration' in line.lower() or 'combinaisons' in line.lower():
                        messages.append({'type': 'log', 'level': 'info', 'message': f"🔍 {content}"})
                        messages.append({'type': 'progress', 'progress': 40, 'message': 'Exploration GRID...'})
                    elif 'test' in line.lower() or 'progress' in line.lower():
                        messages.append({'type': 'log', 'level': 'info', 'message': f"⏳ {content}"})
                    elif 'meilleur' in line.lower() or 'best' in line.lower():
                        messages.append({'type': 'log', 'level': 'success', 'message': f"🏆 {content}"})
                        messages.append({'type': 'progress', 'progress': 80, 'message': 'Meilleur trouvé!'})
                    else:
                        messages.append({'type': 'log', 'level': 'info', 'message': f"📊 {content}"})
                
                # === TEST / BACKTEST ===
                elif '[TEST]' in line or 'BACKTEST' in line.upper():
                    messages.append({'type': 'log', 'level': 'info', 'message': f"📈 {content}"})
                    messages.append({'type': 'progress', 'progress': 45, 'message': 'Backtesting...'})
                
                # === RÉSULTATS ===
                elif 'meilleur' in line.lower() or 'best' in line.lower():
                    messages.append({'type': 'log', 'level': 'success', 'message': f"🏆 {content}"})
                
                elif 'win_rate' in line.lower() or 'win rate' in line.lower() or 'winrate' in line.lower():
                    messages.append({'type': 'log', 'level': 'success', 'message': f"📊 {content}"})
                
                elif 'profit' in line.lower() and ('factor' in line.lower() or 'pnl' in line.lower()):
                    messages.append({'type': 'log', 'level': 'success', 'message': f"💰 {content}"})
                
                elif 'trades' in line.lower() and any(c.isdigit() for c in line):
                    messages.append({'type': 'log', 'level': 'info', 'message': f"📈 {content}"})
                
                # === SAUVEGARDE ===
                elif 'sauvegarde' in line.lower() or 'saved' in line.lower() or 'saving' in line.lower():
                    messages.append({'type': 'log', 'level': 'success', 'message': f"💾 {content}"})
                    messages.append({'type': 'progress', 'progress': 90, 'message': 'Sauvegarde...'})
                
                # === ERREURS ===
                elif '[ERROR]' in line or 'error' in line.lower() or 'erreur' in line.lower():
                    messages.append({'type': 'log', 'level': 'error', 'message': f"❌ {line}"})
                
                # === WARNING ===
                elif '[WARN]' in line or '⚠️' in line:
                    messages.append({'type': 'log', 'level': 'warn', 'message': f"⚠️ {content}"})
                
                # === LIGNES AVEC SYMBOLES (BTC, ETH, etc.) ===
                elif 'Symboles:' in line or ('cryptos' in line.lower() and 'Top:' in line):
                    messages.append({'type': 'log', 'level': 'info', 'message': f"🪙 {line.strip()}"})
                
                # === OK / SUCCESS générique ===
                elif '[OK]' in line or '[✓]' in line or '✅' in line:
                    messages.append({'type': 'log', 'level': 'success', 'message': f"✅ {content}"})
                
                return messages

            def run_optimizer():
                try:
                    # Forcer l'encodage UTF-8 pour Windows
                    env = os.environ.copy()
                    env['PYTHONIOENCODING'] = 'utf-8'
                    env['PYTHONUTF8'] = '1'
                    
                    process = subprocess.Popen(
                        cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                        text=True, cwd=SCRIPT_DIR, bufsize=1,
                        encoding='utf-8', errors='replace',
                        env=env
                    )

                    output = []
                    for line in iter(process.stdout.readline, ''):
                        if line:
                            line_stripped = line.strip()
                            output.append(line_stripped)
                            
                            # Parser et transmettre les messages
                            messages = parse_optimizer_line(line_stripped)
                            for msg in messages:
                                result_queue.put(msg)

                    process.wait()
                    result_queue.put({'type': 'done', 'output': output, 'code': process.returncode})

                except Exception as e:
                    result_queue.put({'type': 'error', 'message': str(e)})

            thread = threading.Thread(target=run_optimizer)
            thread.start()

            # Attendre et streamer
            import time
            progress = 10
            last_progress_time = time.time()
            timeout = 300
            start = time.time()

            while thread.is_alive() and (time.time() - start) < timeout:
                try:
                    msg = result_queue.get(timeout=0.3)  # Très réactif
                    if msg['type'] == 'progress':
                        progress = max(progress, msg.get('progress', progress))  # Ne jamais reculer
                        self._send_sse(msg)
                        last_progress_time = time.time()
                    elif msg['type'] == 'log':
                        self._send_sse(msg)
                    elif msg['type'] == 'done':
                        break
                    elif msg['type'] == 'error':
                        self._send_sse({'type': 'log', 'level': 'error', 'message': msg['message']})
                except queue.Empty:
                    # Mise à jour progress seulement si pas de message depuis 2s
                    if time.time() - last_progress_time > 2 and progress < 85:
                        progress += 1
                        self._send_sse({'type': 'progress', 'progress': progress, 'message': 'Calcul en cours...'})
                        last_progress_time = time.time()

            thread.join(timeout=5)

            # Résultat final
            self._send_sse({'type': 'progress', 'progress': 95, 'message': 'Finalizing...'})

            result_file = os.path.join(SCRIPT_DIR, 'optimization_results.json')
            best_config = req.currentConfig or {}
            metrics = {'winRate': 60.0, 'profitFactor': 1.5, 'totalPnL': 10.0, 'trades': 50}

            if os.path.exists(result_file):
                try:
                    results_data = load_json_file(result_file)
                    if results_data and 'best_params' in results_data:
                        params = results_data['best_params']
                        best_config = {
                            'STOP_LOSS_PERCENT': params.get('stop_loss', 4.5),
                            'TAKE_PROFIT_PERCENT': params.get('take_profit', 6.8),
                            'RSI_PERIOD': params.get('rsi_period', 17),
                            'RSI_OVERSOLD': params.get('rsi_oversold', 24.9),
                            'RSI_OVERBOUGHT': params.get('rsi_overbought', 60.6),
                            'EMA_SHORT': params.get('ema_short', 7),
                            'EMA_LONG': params.get('ema_long', 25),
                            'BB_PERIOD': params.get('bb_period', 18),
                            'BB_STD': params.get('bb_std', 2.6),
                            'REQUIRED_SIGNALS': 2
                        }
                    if 'metrics' in results_data:
                        m = results_data['metrics']
                        metrics = {
                            'winRate': m.get('win_rate', 60.0),
                            'profitFactor': m.get('profit_factor', 1.5),
                            'totalPnL': m.get('total_pnl', 10.0),
                            'trades': m.get('total_trades', 50)
                        }
                except Exception as e:
                    logger.error(f"Error reading results: {e}")

            self._send_sse({
                'type': 'result',
                'results': {
                    'winRate': metrics['winRate'],
                    'profitFactor': metrics['profitFactor'],
                    'totalPnL': metrics['totalPnL'],
                    'trades': metrics['trades'],
                    'bestConfig': best_config
                }
            })

            logger.info("Optimization completed")

        except ValueError as e:
            self.send_json_response({
                'success': False,
                'error': f'Validation error: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Optimization error: {e}")
            self.send_json_response({'success': False, 'error': str(e)}, 500)

    def _send_sse(self, data: dict):
        """Envoyer un événement SSE"""
        try:
            message = f"data: {json.dumps(data)}\n\n"
            self.wfile.write(message.encode('utf-8'))
            self.wfile.flush()
        except Exception as e:
            logger.error(f"SSE error: {e}")

    def log_message(self, fmt, *args):
        """Log HTTP requests (filtrer les succès)"""
        if args and len(args) >= 2:
            status = str(args[1])
            if '200' in status or '304' in status:
                return
        logger.debug(f"HTTP: {args[0] if args else ''}")


def run_server():
    """Lancer le serveur avec gestion d'erreurs - VERSION MULTI-THREADED (pool limité)"""
    import concurrent.futures
    from http.server import ThreadingHTTPServer

    # Limiter le nombre de threads simultanés pour éviter la saturation mémoire
    _pool = concurrent.futures.ThreadPoolExecutor(max_workers=20)

    class BoundedThreadingHTTPServer(ThreadingHTTPServer):
        """ThreadingHTTPServer avec pool de threads borné (max 20 workers)"""
        def process_request(self, request, client_address):
            try:
                _pool.submit(self.process_request_thread, request, client_address)
            except Exception:
                self.handle_error(request, client_address)
                self.shutdown_request(request)

    BoundedThreadingHTTPServer.allow_reuse_address = True

    while True:
        try:
            with BoundedThreadingHTTPServer(("127.0.0.1", PORT), DashboardAPIHandler) as httpd:
                logger.info("=" * 60)
                logger.info("API Server v2.0 running! (Thread pool: max 20 workers)")
                logger.info("=" * 60)
                logger.info("Endpoints:")
                logger.info("   GET  /api/get-config       - Read config")
                logger.info("   POST /api/apply-config     - Apply config")
                logger.info("   POST /api/run-optimization - Run optimization")
                logger.info("   POST /api/sell-all         - Sell all positions")
                logger.info("   POST /api/restart-bot      - Restart bot")
                logger.info("   POST /api/save-watchlist   - Save watchlist")
                logger.info("=" * 60)
                httpd.serve_forever()

        except KeyboardInterrupt:
            logger.info("\nServer stopped by user (Ctrl+C)")
            break
        except OSError as e:
            if "Address already in use" in str(e) or "10048" in str(e):
                logger.warning(f"Port {PORT} in use, retrying in 5s...")
                import time
                time.sleep(5)
            else:
                logger.error(f"OSError: {e}")
                import time
                time.sleep(3)
        except Exception as e:
            logger.error(f"Server error: {e}")
            import time
            time.sleep(3)


def init_crypto_cache():
    """Initialiser le cache crypto et lancer la mise à jour automatique"""
    if not CRYPTO_FETCHER_AVAILABLE:
        return

    try:
        # Pré-charger le cache disque immédiatement (pas de sleep) pour que
        # le premier appel JS /api/crypto-summary trouve toujours des données
        fetcher = get_fetcher(use_testnet=False)
        # Le cache disque est chargé dans __init__ → marquer comme récent pour 2min
        # afin d'éviter le fetch bloquant inline dans handle_crypto_summary au démarrage
        if fetcher.cache.get('symbols'):
            from datetime import datetime, timedelta
            fetcher.last_update = datetime.now() - timedelta(minutes=1)
            count_disk = len(fetcher.cache.get('symbols', {}))
            logger.info(f"Cache disque pre-chargé: {count_disk} cryptos (TTL 2min)")
        # Lancer la mise à jour automatique en arrière-plan (toutes les 120s)
        # Le premier cycle force un vrai refresh complet après 120s
        fetcher.start_auto_update(interval_seconds=120)
        logger.info("Crypto cache auto-update started (120s interval)")
        # Fetch initial complet (force=True) en background dans ce même thread
        logger.info("Initialisation du cache crypto (228 coins)...")
        asyncio.run(fetcher.fetch_all_data(force=True))
        count = len(fetcher.cache.get('symbols', {}))
        logger.info(f"Cache initialized: {count} cryptos")
    except Exception as e:
        logger.warning(f"Cache init error: {e}")


def start_trading_bot():
    """Lancer le bot de trading"""
    import time
    time.sleep(1)

    bot_script = os.path.join(SCRIPT_DIR, 'trading_bot.py')
    if not os.path.exists(bot_script):
        logger.warning(f"Bot script not found: {bot_script}")
        return

    try:
        CREATE_NO_WINDOW = 0x08000000
        DETACHED_PROCESS = 0x00000008
        CREATE_NEW_PROCESS_GROUP = 0x00000200
        if sys.platform == 'win32':
            subprocess.Popen(
                [VENV_PYTHON, '-u', bot_script],
                creationflags=CREATE_NO_WINDOW | DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP,
                stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
                stdin=subprocess.DEVNULL, start_new_session=True,
                cwd=SCRIPT_DIR
            )
        else:
            subprocess.Popen([VENV_PYTHON, '-u', bot_script],
                             stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
                             cwd=SCRIPT_DIR, start_new_session=True)

        logger.info("Trading bot started")
    except Exception as e:
        logger.warning(f"Error starting bot: {e}")


def open_browser():
    """Ouvrir le dashboard dans le navigateur"""
    import time
    time.sleep(2)
    url = f"http://localhost:{PORT}/dashboard.html"
    logger.info(f"Opening browser: {url}")
    webbrowser.open(url)


def fetch_klines_for_ai(symbol: str, interval: str, limit: int) -> list:
    """Récupère les klines depuis Binance pour l'IA"""
    import requests
    try:
        url = "https://api.binance.com/api/v3/klines"
        params = {"symbol": symbol, "interval": interval, "limit": limit}
        response = requests.get(url, params=params, timeout=10)
        if response.status_code == 200:
            return response.json()
        return []
    except Exception as e:
        logger.warning(f"Erreur fetch klines {symbol}: {e}")
        return []


def start_ai_surveillance():
    """Démarre le service de surveillance IA"""
    if not AI_PREDICTOR_AVAILABLE:
        logger.warning("⚠️ Service de surveillance IA non disponible")
        return
    
    try:
        # Charger la watchlist
        watchlist_file = os.path.join(SCRIPT_DIR, "watchlist.json")
        symbols = []
        if os.path.exists(watchlist_file):
            with open(watchlist_file, 'r') as f:
                data = json.load(f)
                symbols = data.get('symbols', [])
                # Inclure aussi les auto_added (spy USDC) et spy_injected
                auto_added = list(data.get('auto_added', {}).keys())
                spy_injected = list(data.get('spy_injected', {}).keys())
                symbols = list(dict.fromkeys(symbols + auto_added + spy_injected))  # dédupliquer

        if not symbols:
            # Fallback: symboles populaires
            symbols = ['BTCUSDT', 'ETHUSDT', 'BNBUSDT', 'SOLUSDT', 'XRPUSDT',
                       'DOGEUSDT', 'ADAUSDT', 'AVAXUSDT', 'DOTUSDT', 'MATICUSDT']
        
        # === MODE POLLING RAPIDE (5 secondes) ===
        # Note: Le mode WebSocket temps réel est disponible mais nécessite
        # une architecture de processus partagé pour fonctionner correctement.
        # Pour l'instant, on utilise le polling rapide qui fonctionne bien.
        
        # Configurer l'AIPredictor (singleton global)
        ai_predictor = get_ai_predictor()
        ai_predictor.set_klines_fetcher(fetch_klines_for_ai)
        # Note: binance_client reste None car on utilise fetch_klines_for_ai via HTTP
        
        # Configurer le service de surveillance
        surveillance = get_surveillance_service()
        surveillance.klines_fetcher = fetch_klines_for_ai
        surveillance.set_symbols(symbols)
        surveillance.update_interval = 30  # Réduit de 5s à 30s pour limiter la consommation mémoire LSTM
        
        # Démarrer
        surveillance.start()
        logger.info(f"🧠 Service de surveillance IA démarré - {len(symbols)} symboles")
        logger.info(f"   Mode: Polling toutes les 30 secondes")
        
    except Exception as e:
        logger.error(f"Erreur démarrage surveillance IA: {e}")


def ensure_market_spy_running():
    """S'assure que le Market Spy est en cours d'exécution (UNE SEULE instance VENV)"""
    import time
    time.sleep(8)  # Attendre que le serveur démarre

    try:
        import psutil
        venv_python_lower = VENV_PYTHON.lower()

        # 🔧 FIX DOUBLE: Tuer toute instance SYSTEM Python market_spy (non-venv)
        # et repérer les instances VENV déjà actives.
        venv_spy_running = False
        for proc in psutil.process_iter(['pid', 'cmdline', 'cwd']):
            try:
                cmdline_parts = proc.info['cmdline'] or []
                cmdline = ' '.join(cmdline_parts).lower()
                if 'market_spy' not in cmdline or 'python' not in cmdline:
                    continue
                # Ignorer les spy d'autres répertoires (ex: crypto_trading_prod)
                proc_cwd = (proc.info.get('cwd') or '').rstrip('/')
                if proc_cwd and proc_cwd != SCRIPT_DIR.rstrip('/'):
                    continue
                exe = cmdline_parts[0].lower() if cmdline_parts else ''
                if '.venv' in exe or 'venv' in exe or exe == venv_python_lower:
                    # Instance VENV — garder la première, tuer les suivantes
                    if venv_spy_running:
                        logger.info(f"   🔧 Double VENV market_spy tué (PID: {proc.info['pid']})")
                        proc.kill()
                    else:
                        venv_spy_running = True
                        logger.info(f"✅ Market Spy VENV déjà actif (PID: {proc.info['pid']})")
                else:
                    # Instance SYSTEM Python — toujours tuer
                    logger.info(f"   🔧 SYSTEM Python market_spy tué (PID: {proc.info['pid']})")
                    proc.kill()
            except (psutil.NoSuchProcess, psutil.AccessDenied):
                continue

        if venv_spy_running:
            return

        # Lancer market_spy.py en mode continu
        script_path = os.path.join(SCRIPT_DIR, "market_spy.py")
        if os.path.exists(script_path):
            python_exe = VENV_PYTHON  # Toujours .venv — jamais sys.executable
            daemon_log = os.path.join(SCRIPT_DIR, "market_spy_daemon.log")

            if sys.platform == 'win32':
                with open(daemon_log, 'a', encoding='utf-8') as log_file:
                    proc = subprocess.Popen(
                        [python_exe, script_path],
                        creationflags=subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.CREATE_NO_WINDOW,
                        stdout=log_file,
                        stderr=log_file,
                        cwd=SCRIPT_DIR
                    )
            else:
                with open(daemon_log, 'a', encoding='utf-8') as log_file:
                    proc = subprocess.Popen(
                        [python_exe, script_path],
                        stdout=log_file,
                        stderr=log_file,
                        cwd=SCRIPT_DIR,
                        start_new_session=True
                    )

            logger.info(f"🕵️ Market Spy démarré automatiquement (PID: {proc.pid})")
            logger.info(f"   • Scan: toutes les 12s | Max positions: 3")
            logger.info(f"   • Logs: {daemon_log}")
        else:
            logger.warning("⚠️ market_spy.py non trouvé")

    except Exception as e:
        logger.error(f"Erreur démarrage Market Spy: {e}")


def ensure_auto_updater_running():
    """S'assure que le service auto_updater est en cours d'exécution"""
    import time
    time.sleep(5)  # Attendre que le serveur démarre
    
    try:
        pid_file = os.path.join(SCRIPT_DIR, "auto_updater.pid")
        
        # Vérifier si déjà actif
        if os.path.exists(pid_file):
            with open(pid_file, 'r') as f:
                pid = int(f.read().strip())
            
            # Vérifier si le processus existe
            import psutil
            if psutil.pid_exists(pid):
                try:
                    proc = psutil.Process(pid)
                    if proc.is_running() and 'python' in proc.name().lower():
                        logger.info(f"✅ Auto-Updater déjà actif (PID: {pid})")
                        return
                except:
                    pass
        
        # Lancer auto_updater_service.py en mode daemon
        script_path = os.path.join(SCRIPT_DIR, "auto_updater_service.py")
        if os.path.exists(script_path):
            python_exe = VENV_PYTHON  # Toujours .venv — jamais sys.executable
            
            # Créer un fichier log pour le processus daemon
            daemon_log = os.path.join(SCRIPT_DIR, "auto_updater_daemon.log")
            
            if sys.platform == 'win32':
                # Sur Windows, utiliser CREATE_NEW_PROCESS_GROUP pour détacher le processus
                with open(daemon_log, 'a', encoding='utf-8') as log_file:
                    proc = subprocess.Popen(
                        [python_exe, script_path, "--daemon"],
                        creationflags=subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.CREATE_NO_WINDOW,
                        stdout=log_file,
                        stderr=log_file,
                        cwd=SCRIPT_DIR
                    )
            else:
                with open(daemon_log, 'a', encoding='utf-8') as log_file:
                    proc = subprocess.Popen(
                        [python_exe, script_path, "--daemon"],
                        stdout=log_file,
                        stderr=log_file,
                        cwd=SCRIPT_DIR,
                        start_new_session=True
                    )
            
            logger.info(f"🔄 Auto-Updater démarré automatiquement (PID: {proc.pid})")
            logger.info(f"   • Sync rapide: 1h | MAJ données: 6h | IA training: 24h")
            logger.info(f"   • Logs: {daemon_log}")
        else:
            logger.warning("⚠️ auto_updater_service.py non trouvé")
            
    except Exception as e:
        logger.error(f"Erreur démarrage Auto-Updater: {e}")


if __name__ == "__main__":
    import gc

    def memory_watchdog():
        """Surveille la consommation mémoire — redémarre le process si trop élevée."""
        import time, os, sys
        try:
            import psutil
            proc = psutil.Process()
        except ImportError:
            proc = None

        warn_count = 0
        while True:
            try:
                time.sleep(60)
                gc.collect()
                if proc:
                    rss_mb = proc.memory_info().rss / 1024 / 1024
                    if rss_mb > 800:
                        logger.info(f"💾 Mémoire: {rss_mb:.0f} MB")
                    if rss_mb > 1200:
                        logger.warning(f"⚠️ Mémoire élevée: {rss_mb:.0f} MB — gc.collect() forcé")
                        gc.collect(2)
                        warn_count += 1
                    else:
                        warn_count = 0
                    # Auto-restart si > 2 GB ou si > 1,2 GB depuis 10 min consécutives
                    if rss_mb > 2048 or warn_count >= 10:
                        logger.error(
                            f"🔴 MÉMOIRE CRITIQUE: {rss_mb:.0f} MB — redémarrage automatique du dashboard"
                        )
                        time.sleep(1)
                        os.execv(sys.executable, [sys.executable] + sys.argv)
            except Exception:
                pass

    # Thread de surveillance mémoire (toutes les 60s)
    mem_thread = threading.Thread(target=memory_watchdog, daemon=True, name="MemoryWatchdog")
    mem_thread.start()

    # Lancer les threads de fond
    crypto_thread = threading.Thread(target=init_crypto_cache, daemon=True)
    crypto_thread.start()

    # Démarrer le service de surveillance IA (rafraîchit les données toutes les 30s)
    ai_thread = threading.Thread(target=start_ai_surveillance, daemon=True)
    ai_thread.start()
    
    # S'assurer que l'auto-updater est actif (MAJ données + IA)
    updater_thread = threading.Thread(target=ensure_auto_updater_running, daemon=True)
    updater_thread.start()
    
    # Démarrer Market Spy automatiquement
    spy_thread = threading.Thread(target=ensure_market_spy_running, daemon=True)
    spy_thread.start()

    # Désactivé: le bot est lancé par launch_all.py, pas besoin de le relancer ici
    # bot_thread = threading.Thread(target=start_trading_bot, daemon=True)
    # bot_thread.start()

    # Désactivé: ne plus ouvrir le navigateur automatiquement
    # browser_thread = threading.Thread(target=open_browser, daemon=True)
    # browser_thread.start()

    # Lancer le serveur
    run_server()
