"""
HTTP Routes Handler
Separation of HTTP handling from business logic
"""

import json
import asyncio
import logging
import requests
import os
from typing import Dict, Any, Optional
from urllib.parse import urlparse
from datetime import datetime, timedelta, timezone

from .models import (
    TradingConfig, OptimizationRequest, SettingsUpdate,
    ForceCloseRequest, WatchlistRequest, APIResponse
)
from .services import ConfigService, TradingService, BotService
from .security import AuthMiddleware, SecurityValidator


def _is_blocked_until_active(blocked_until_str):
    """Retourne True seulement si blocked_until est défini ET encore dans le futur."""
    if not blocked_until_str:
        return False
    try:
        from dateutil.parser import parse as _dtparse
        bt = _dtparse(str(blocked_until_str))
        now_aware = datetime.now(bt.tzinfo) if bt.tzinfo else datetime.now()
        return now_aware < bt
    except Exception:
        return False
from .utils import load_json_file, save_json_file, async_to_sync

logger = logging.getLogger(__name__)

# 🚀 Cache serveur en mémoire — évite de relire les fichiers JSON à chaque requête
# Structure: { 'endpoint_key': {'data': ..., 'ts': float} }
import time as _time
_SERVER_CACHE: Dict[str, Any] = {}

def _cache_get(key: str, ttl: float = 10.0):
    """Retourner la valeur du cache si encore valide, sinon None."""
    entry = _SERVER_CACHE.get(key)
    if entry and (_time.time() - entry['ts']) < ttl:
        return entry['data']
    return None

def _cache_set(key: str, data: Any):
    """Stocker une valeur dans le cache avec timestamp."""
    _SERVER_CACHE[key] = {'data': data, 'ts': _time.time()}
    # Nettoyer les entrées expirées si le cache dépasse 100 clés
    if len(_SERVER_CACHE) > 100:
        now = _time.time()
        expired = [k for k, v in _SERVER_CACHE.items() if now - v['ts'] > 60]
        for k in expired:
            del _SERVER_CACHE[k]

# 🚀 Import au niveau module pour éviter 2s de latence par requête
try:
    from ai_predictor import get_surveillance_service
    _surveillance_service_getter = get_surveillance_service
except ImportError:
    _surveillance_service_getter = None


class RouteHandler:
    """Gestionnaire centralisé des routes"""

    def __init__(
        self,
        script_dir: str,
        config_service: ConfigService,
        trading_service: TradingService,
        bot_service: BotService,
        auth_middleware: AuthMiddleware
    ):
        self.script_dir = script_dir
        self.config_service = config_service
        self.trading_service = trading_service
        self.bot_service = bot_service
        self.auth_middleware = auth_middleware
        self.validator = SecurityValidator()

    def handle_health(self, request_handler) -> None:
        """GET /api/health - Health check endpoint"""
        try:
            request_handler.send_json_response({
                'status': 'ok',
                'timestamp': datetime.now().isoformat(),
                'service': 'dashboard_api_server'
            })
        except Exception as e:
            logger.error(f"Error in health check: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e)
            }, 500)

    def handle_maintenance_health(self, request_handler) -> None:
        """GET /api/maintenance/health - Full maintenance health check"""
        try:
            import glob
            
            # Collect metrics
            metrics = {
                'positions': 0,
                'trades': 0,
                'logs_kb': 0,
                'processes': 0
            }
            
            output_lines = []
            status = 'ok'
            
            # Check positions.json
            positions_file = os.path.join(self.script_dir, 'positions.json')
            if os.path.exists(positions_file):
                try:
                    with open(positions_file, 'r') as f:
                        positions = json.load(f)
                        metrics['positions'] = len(positions) if isinstance(positions, dict) else 0
                    output_lines.append(f"✅ positions.json: {metrics['positions']} positions")
                except:
                    output_lines.append("⚠️ positions.json: Erreur de lecture")
                    status = 'warning'
            else:
                output_lines.append("ℹ️ positions.json: Non trouvé")
            
            # Check trade_history.json
            history_file = os.path.join(self.script_dir, 'trade_history.json')
            if os.path.exists(history_file):
                try:
                    with open(history_file, 'r') as f:
                        history = json.load(f)
                        metrics['trades'] = len(history) if isinstance(history, list) else 0
                    output_lines.append(f"✅ trade_history.json: {metrics['trades']} trades")
                except:
                    output_lines.append("⚠️ trade_history.json: Erreur de lecture")
                    status = 'warning'
            else:
                output_lines.append("ℹ️ trade_history.json: Non trouvé")
            
            # Check log files size
            log_files = glob.glob(os.path.join(self.script_dir, '*.log'))
            total_log_kb = 0
            for log_file in log_files:
                try:
                    total_log_kb += os.path.getsize(log_file) / 1024
                except:
                    pass
            metrics['logs_kb'] = round(total_log_kb, 1)
            output_lines.append(f"📄 Logs: {metrics['logs_kb']} KB")
            
            # Check Python processes
            try:
                import subprocess
                result = subprocess.run(
                    ['powershell', '-Command', '(Get-Process python -ErrorAction SilentlyContinue | Measure-Object).Count'],
                    capture_output=True, text=True, timeout=5
                )
                metrics['processes'] = int(result.stdout.strip()) if result.stdout.strip().isdigit() else 0
                output_lines.append(f"🐍 Processus Python: {metrics['processes']}")
            except:
                output_lines.append("⚠️ Processus: Impossible de vérifier")
            
            # Check config.py
            config_file = os.path.join(self.script_dir, 'config.py')
            if os.path.exists(config_file):
                output_lines.append("✅ config.py: OK")
            else:
                output_lines.append("❌ config.py: MANQUANT")
                status = 'error'
            
            # Check watchlist.json
            watchlist_file = os.path.join(self.script_dir, 'watchlist.json')
            if os.path.exists(watchlist_file):
                try:
                    with open(watchlist_file, 'r') as f:
                        watchlist = json.load(f)
                        symbols_count = len(watchlist.get('symbols', []))
                    output_lines.append(f"✅ watchlist.json: {symbols_count} symboles")
                except:
                    output_lines.append("⚠️ watchlist.json: Erreur de lecture")
            else:
                output_lines.append("⚠️ watchlist.json: Non trouvé")
            
            request_handler.send_json_response({
                'status': status,
                'metrics': metrics,
                'output': '\n'.join(output_lines),
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in maintenance health: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e),
                'metrics': {'positions': 0, 'trades': 0, 'logs_kb': 0, 'processes': 0},
                'output': f'Erreur: {str(e)}'
            }, 500)

    def handle_binance_sync(self, request_handler) -> None:
        """GET /api/maintenance/binance-sync - Check Binance sync status"""
        try:
            output_lines = []
            
            # Load local positions
            positions_file = os.path.join(self.script_dir, 'positions.json')
            local_positions = {}
            if os.path.exists(positions_file):
                try:
                    with open(positions_file, 'r') as f:
                        local_positions = json.load(f)
                except:
                    pass
            
            local_count = len(local_positions) if isinstance(local_positions, dict) else 0
            output_lines.append(f"📁 Positions locales: {local_count}")
            
            # Try to get Binance positions
            binance_count = 0
            mode = 'UNKNOWN'
            
            try:
                from binance.client import Client
                from binance.exceptions import BinanceAPIException
                import config
                
                # Use correct config variable names
                api_key = getattr(config, 'BINANCE_API_KEY', None) or getattr(config, 'API_KEY', None)
                api_secret = getattr(config, 'BINANCE_API_SECRET', None) or getattr(config, 'API_SECRET', None)
                testnet_mode = getattr(config, 'TESTNET_MODE', True)
                
                if not api_key or not api_secret:
                    output_lines.append("⚠️ Clés API Binance non configurées")
                    mode = 'NO_API'
                else:
                    client = Client(api_key, api_secret, testnet=testnet_mode, requests_params={'timeout': 15})
                    # 🔧 FIX: Synchroniser l'horloge avec le serveur Binance
                    # recvWindow ne protège PAS contre un timestamp dans le futur (>1000ms ahead = rejeté)
                    try:
                        st = client.get_server_time()
                        client.timestamp_offset = st['serverTime'] - int(time.time() * 1000)
                        if abs(client.timestamp_offset) > 500:
                            logger.info(f"⏰ Binance sync: offset={client.timestamp_offset}ms")
                    except Exception as te:
                        logger.warning(f"⚠️ Time sync failed: {te}")
                
                    # Try FUTURES first
                    try:
                        futures_positions = client.futures_position_information()
                        active_futures = [p for p in futures_positions if float(p.get('positionAmt', 0)) != 0]
                        binance_count = len(active_futures)
                        mode = 'FUTURES'
                        output_lines.append(f"🔗 Positions Binance FUTURES: {binance_count}")
                    except BinanceAPIException as be:
                        # Fallback to SPOT (permissions FUTURES manquantes = normal)
                        if be.code == -2015:
                            # Erreur de permissions - utiliser SPOT
                            try:
                                account = client.get_account()
                                balances = account.get('balances', [])
                                
                                # Liste des tokens à ignorer (stablecoins, tokens de test, fiat)
                                ignored_assets = [
                                    'USDT', 'BUSD', 'USDC', 'DAI', 'USD', 'EUR', 'BRL', 'ARS', 'TRY', 
                                    'PLN', 'RON', 'CZK', 'MXN', 'COP', 'JPY', 'ZAR', 'UAH', 'IDR',
                                    '456', '这是测试币'  # Tokens de test testnet
                                ]
                                
                                # Filtrer les balances actives significatives
                                active_spot = []
                                for b in balances:
                                    asset = b['asset']
                                    total = float(b.get('free', 0)) + float(b.get('locked', 0))
                                    
                                    # Ignorer les tokens listés et les balances < 0.001
                                    if asset in ignored_assets or total < 0.001:
                                        continue
                                    
                                    # Pour le testnet, ignorer aussi les balances très faibles ou suspectes
                                    # (nombre magique 18446 = probablement des tokens de test corrompus)
                                    if testnet_mode and (total > 10000 or int(total) == 18446):
                                        continue
                                    
                                    active_spot.append(b)
                                
                                binance_count = len(active_spot)
                                mode = 'SPOT'
                                output_lines.append(f"🔗 Balances SPOT significatives: {binance_count}")
                                if binance_count > 0:
                                    output_lines.append(f"   (Restes de trades: {', '.join([b['asset'] for b in active_spot[:5]])}{'...' if binance_count > 5 else ''})")
                                output_lines.append(f"💡 Mode: SPOT (permissions FUTURES désactivées)")
                            except Exception as spot_e:
                                output_lines.append(f"❌ Erreur API SPOT: {str(spot_e)[:80]}")
                                mode = 'ERROR'
                        else:
                            # Autre erreur Binance
                            output_lines.append(f"⚠️ Erreur Binance ({be.code}): {be.message[:80]}")
                            mode = 'ERROR'
                    except Exception as e:
                        # Autre exception (non Binance)
                        try:
                            # Essayer SPOT directement
                            account = client.get_account()
                            balances = account.get('balances', [])
                            
                            # Même filtrage que ci-dessus
                            ignored_assets = [
                                'USDT', 'BUSD', 'USDC', 'DAI', 'USD', 'EUR', 'BRL', 'ARS', 'TRY', 
                                'PLN', 'RON', 'CZK', 'MXN', 'COP', 'JPY', 'ZAR', 'UAH', 'IDR',
                                '456', '这是测试币'
                            ]
                            
                            active_spot = []
                            for b in balances:
                                asset = b['asset']
                                total = float(b.get('free', 0)) + float(b.get('locked', 0))
                                
                                if asset in ignored_assets or total < 0.001:
                                    continue
                                
                                if testnet_mode and (total > 10000 or int(total) == 18446):
                                    continue
                                
                                active_spot.append(b)
                            
                            binance_count = len(active_spot)
                            mode = 'SPOT'
                            output_lines.append(f"🔗 Balances SPOT significatives: {binance_count}")
                            if binance_count > 0:
                                output_lines.append(f"   (Restes: {', '.join([b['asset'] for b in active_spot[:5]])}{'...' if binance_count > 5 else ''})")
                            output_lines.append(f"💡 Mode: SPOT (FUTURES non disponible)")
                        except:
                            output_lines.append(f"⚠️ Erreur API: {str(e)[:80]}")
                            mode = 'ERROR'
                
            except ImportError:
                output_lines.append("⚠️ Module binance non installé")
                mode = 'NO_API'
            except BinanceAPIException as be:
                if be.code == -2015:
                    output_lines.append(f"⚠️ Clés API invalides ou permissions insuffisantes")
                    output_lines.append(f"💡 Vérifie tes clés dans config.py")
                else:
                    output_lines.append(f"⚠️ Erreur Binance ({be.code}): {be.message[:80]}")
                mode = 'ERROR'
            except Exception as e:
                output_lines.append(f"⚠️ Erreur connexion: {str(e)[:80]}")
                mode = 'ERROR'
            
            # Calculate differences
            # En mode SPOT: seules les positions locales comptent comme "vraies positions"
            # Les balances SPOT sont des restes de trades, pas des désynchronisations
            if mode == 'SPOT':
                # En SPOT, on ne peut pas vraiment comparer (différentes natures)
                # Désynchronisation = uniquement si positions locales sans crypto correspondante
                if local_count == 0 and binance_count > 0:
                    issues = 0  # Normal: restes de trades passés
                    output_lines.append(f"ℹ️ {binance_count} crypto(s) en wallet sont des restes de trades passés")
                    output_lines.append("✅ Aucune position active - Synchronisation correcte")
                elif local_count > 0 and binance_count == 0:
                    issues = local_count  # Problème: positions trackées mais pas de crypto
                    output_lines.append(f"⚠️ {issues} position(s) trackée(s) mais absente(s) du wallet")
                else:
                    # Cas complexe: il faudrait vérifier symbole par symbole
                    issues = 0
                    output_lines.append(f"ℹ️ {local_count} position(s) trackée(s), {binance_count} crypto(s) en wallet")
                    output_lines.append("💡 Vérification détaillée nécessaire pour comparaison exacte")
            elif mode == 'FUTURES':
                # En FUTURES, comparaison directe possible
                issues = abs(local_count - binance_count)
                if issues > 0:
                    output_lines.append(f"⚠️ {issues} désynchronisation(s) détectée(s)")
                else:
                    output_lines.append("✅ Synchronisation parfaite")
            else:
                issues = 0
            
            # Message final supprimé - déjà inclus dans la logique ci-dessus
            
            request_handler.send_json_response({
                'status': 'ok' if issues == 0 else 'warning',
                'local_count': local_count,
                'binance_count': binance_count,
                'issues': issues,
                'mode': mode,
                'output': '\n'.join(output_lines),
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in binance sync: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e),
                'output': f'Erreur: {str(e)}'
            }, 500)

    def handle_explain_sync(self, request_handler) -> None:
        """POST /api/maintenance/explain-sync - Explain sync differences"""
        try:
            output_lines = []
            output_lines.append("📋 ANALYSE DE SYNCHRONISATION")
            output_lines.append("=" * 40)
            output_lines.append("")
            output_lines.append("💡 Mode actuel: SPOT (permissions FUTURES manquantes)")
            output_lines.append("• Vérifier: Compare positions locales vs Binance")
            output_lines.append("• Expliquer: Détaille pourquoi il y a des différences (tokens test, etc.)")
            output_lines.append("• Corriger: Synchronise les positions réelles si besoin")
            output_lines.append("")
            output_lines.append("ℹ️ Différences possibles:")
            output_lines.append("  - Tokens de test (non tradables)")
            output_lines.append("  - Positions fermées manuellement sur Binance")
            output_lines.append("  - Fichier positions.json désynchronisé")
            output_lines.append("")
            output_lines.append("🔧 Pour corriger:")
            output_lines.append("  1. Cliquez sur 'Corriger' pour synchro automatique")
            output_lines.append("  2. Ou éditez positions.json manuellement")
            
            request_handler.send_json_response({
                'status': 'ok',
                'output': '\n'.join(output_lines),
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in explain sync: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e),
                'output': f'Erreur: {str(e)}'
            }, 500)

    def handle_fix_sync(self, request_handler) -> None:
        """POST /api/maintenance/fix-sync - Fix sync issues"""
        try:
            output_lines = []
            output_lines.append("🔧 CORRECTION DE SYNCHRONISATION")
            output_lines.append("=" * 40)
            
            # Load local positions
            positions_file = os.path.join(self.script_dir, 'positions.json')
            
            try:
                from binance.client import Client
                import config
                
                # Use correct config variable names
                api_key = getattr(config, 'BINANCE_API_KEY', None) or getattr(config, 'API_KEY', None)
                api_secret = getattr(config, 'BINANCE_API_SECRET', None) or getattr(config, 'API_SECRET', None)
                
                if not api_key or not api_secret:
                    output_lines.append("⚠️ Clés API non configurées - correction impossible")
                else:
                    testnet_mode = getattr(config, 'TESTNET_MODE', True)
                    client = Client(api_key, api_secret, testnet=testnet_mode, requests_params={'timeout': 15})
                    # 🔧 FIX: Synchroniser l'horloge (local clock ahead = -1021)
                    try:
                        st = client.get_server_time()
                        client.timestamp_offset = st['serverTime'] - int(time.time() * 1000)
                    except Exception:
                        pass
                    
                    # Get actual positions from Binance
                    try:
                        futures_positions = client.futures_position_information()
                        active_symbols = [p['symbol'] for p in futures_positions if float(p.get('positionAmt', 0)) != 0]
                        output_lines.append(f"✅ {len(active_symbols)} positions FUTURES actives détectées")
                    except:
                        output_lines.append("⚠️ Mode SPOT - Correction limitée")
                        active_symbols = []
                    
                    # Backup current positions
                    if os.path.exists(positions_file):
                        import shutil
                        backup_file = positions_file.replace('.json', '_backup.json')
                        shutil.copy(positions_file, backup_file)
                        output_lines.append(f"📦 Backup créé: positions_backup.json")
                
                output_lines.append("")
                output_lines.append("✅ Vérification terminée")
                output_lines.append("💡 Aucune correction automatique appliquée en mode sécurisé")
                output_lines.append("   Éditez positions.json manuellement si nécessaire")
                
            except Exception as e:
                output_lines.append(f"⚠️ Erreur API: {str(e)[:50]}")
            
            request_handler.send_json_response({
                'status': 'ok',
                'output': '\n'.join(output_lines),
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in fix sync: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e),
                'output': f'Erreur: {str(e)}'
            }, 500)

    def handle_binance_balance(self, request_handler) -> None:
        """GET /api/binance-balance - Solde USDT réel depuis Binance (léger, rapide)
        
        🔧 FIX 28/02: Endpoint léger pour récupérer le vrai solde USDT.
        Utilisé par le dashboard pour afficher le Solde Total réel.
        """
        try:
            import time as _time
            from binance.client import Client
            from binance.exceptions import BinanceAPIException
            import config
            
            api_key = getattr(config, 'BINANCE_API_KEY', None) or getattr(config, 'API_KEY', None)
            api_secret = getattr(config, 'BINANCE_API_SECRET', None) or getattr(config, 'API_SECRET', None)
            testnet_mode = getattr(config, 'TESTNET_MODE', True)
            
            if not api_key or not api_secret:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Clés API non configurées',
                    'usdt_free': 0,
                    'usdt_locked': 0,
                    'usdt_total': 0
                })
                return
            
            client = Client(api_key, api_secret, testnet=testnet_mode, requests_params={'timeout': 10})
            try:
                st = client.get_server_time()
                client.timestamp_offset = st['serverTime'] - int(_time.time() * 1000)
            except Exception:
                pass
            
            account = client.get_account()
            balances = account.get('balances', [])
            
            usdt_free = 0
            usdt_locked = 0
            for b in balances:
                if b['asset'] == 'USDT':
                    usdt_free = round(float(b.get('free', 0)), 2)
                    usdt_locked = round(float(b.get('locked', 0)), 2)
                    break
            
            request_handler.send_json_response({
                'success': True,
                'usdt_free': usdt_free,
                'usdt_locked': usdt_locked,
                'usdt_total': round(usdt_free + usdt_locked, 2),
                'testnet': testnet_mode,
                'timestamp': datetime.now().isoformat()
            })
            
        except BinanceAPIException as be:
            request_handler.send_json_response({
                'success': False,
                'error': f'Binance API ({be.code}): {be.message}',
                'usdt_free': 0, 'usdt_locked': 0, 'usdt_total': 0
            })
        except Exception as e:
            logger.error(f"Error in binance-balance: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e),
                'usdt_free': 0, 'usdt_locked': 0, 'usdt_total': 0
            })

    def handle_binance_account_detail(self, request_handler) -> None:
        """GET /api/maintenance/binance-account - Détail complet du compte Binance
        
        🔧 FIX 28/02: Endpoint dédié pour vérifier la synchronisation bot/Binance
        Retourne: balance USDT, toutes les positions Binance avec montants et valeurs,
                  positions locales du bot, et comparaison détaillée.
        """
        try:
            import time as _time
            result = {
                'usdt_balance': 0,
                'binance_positions': [],
                'local_positions': [],
                'orphans_binance': [],   # Sur Binance, pas dans le bot, vendable (>= 5$)
                'dust_orphans': [],      # Sur Binance, pas dans le bot, dust invendable (< 5$)
                'orphans_local': [],     # Dans le bot mais pas sur Binance
                'synced': [],            # Synchronisé des deux côtés
                'total_binance_value': 0,
                'total_local_value': 0,
                'mode': 'UNKNOWN',
                'testnet': True,
                'status': 'ok'
            }
            
            # 1) Charger les positions locales du bot
            positions_file = os.path.join(self.script_dir, 'positions.json')
            local_positions = {}
            if os.path.exists(positions_file):
                try:
                    with open(positions_file, 'r', encoding='utf-8') as f:
                        local_positions = json.load(f)
                except Exception:
                    pass
            
            local_symbols = {}
            for sym, pos in local_positions.items():
                entry_price = pos.get('entry_price', 0)
                quantity = pos.get('quantity', 0)
                entry_value = entry_price * quantity
                local_symbols[sym] = {
                    'symbol': sym,
                    'quantity': quantity,
                    'entry_price': entry_price,
                    'entry_value': round(entry_value, 2),
                    'current_price': 0,
                    'current_value': 0,
                    'pnl': 0,
                    'pnl_pct': 0,
                    'pattern': pos.get('pattern', '?'),
                    'entry_time': pos.get('entry_time', '?'),
                    'stop_loss': pos.get('stop_loss_pct', 0),
                    'take_profit': pos.get('take_profit_pct', 0),
                }
            result['local_positions'] = list(local_symbols.values())
            
            # 2) Connexion Binance et récupération du compte
            try:
                from binance.client import Client
                from binance.exceptions import BinanceAPIException
                import config
                
                api_key = getattr(config, 'BINANCE_API_KEY', None) or getattr(config, 'API_KEY', None)
                api_secret = getattr(config, 'BINANCE_API_SECRET', None) or getattr(config, 'API_SECRET', None)
                testnet_mode = getattr(config, 'TESTNET_MODE', True)
                result['testnet'] = testnet_mode
                
                if not api_key or not api_secret:
                    result['status'] = 'error'
                    result['error'] = 'Clés API Binance non configurées'
                    request_handler.send_json_response(result)
                    return
                
                client = Client(api_key, api_secret, testnet=testnet_mode, requests_params={'timeout': 15})
                # Sync time offset
                try:
                    st = client.get_server_time()
                    client.timestamp_offset = st['serverTime'] - int(_time.time() * 1000)
                except Exception:
                    pass
                
                # Récupérer le compte SPOT
                account = client.get_account()
                balances = account.get('balances', [])
                
                # Liste des stablecoins/fiat/tokens de test à ignorer
                IGNORED_ASSETS = {
                    'USDC', 'BUSD', 'DAI', 'USD', 'EUR', 'BRL', 'ARS', 'TRY',
                    'PLN', 'RON', 'CZK', 'MXN', 'COP', 'JPY', 'ZAR', 'UAH', 'IDR',
                    '456', '这是测试币', 'TUSD', 'FDUSD'
                }
                
                # Balance USDT
                for b in balances:
                    if b['asset'] == 'USDT':
                        result['usdt_balance'] = round(float(b.get('free', 0)) + float(b.get('locked', 0)), 2)
                        break
                
                # 3) Récupérer les prix actuels en une seule requête
                try:
                    all_tickers = {t['symbol']: float(t['price']) for t in client.get_all_tickers()}
                except Exception:
                    all_tickers = {}
                
                # 4) Charger la watchlist pour distinguer les positions pertinentes
                watchlist_symbols = set()
                watchlist_file = os.path.join(self.script_dir, 'watchlist.json')
                try:
                    if os.path.exists(watchlist_file):
                        with open(watchlist_file, 'r', encoding='utf-8') as f:
                            wl = json.load(f)
                            watchlist_symbols = set(wl.get('symbols', []))
                except Exception:
                    pass
                
                # 5) Construire la liste des positions Binance (balances non-nulles)
                binance_positions = []
                binance_other = []  # Tokens hors watchlist (testnet noise)
                for b in balances:
                    asset = b['asset']
                    total = float(b.get('free', 0)) + float(b.get('locked', 0))
                    
                    if asset in IGNORED_ASSETS or total < 0.0001:
                        continue
                    if asset == 'USDT':
                        continue  # Traité séparément
                    
                    symbol = asset + 'USDT'
                    price = all_tickers.get(symbol, 0)
                    value = round(total * price, 2)
                    
                    # Ignorer les micro-restes < 1$ (poussière)
                    if value < 1.0 and price > 0:
                        continue
                    
                    entry = {
                        'symbol': symbol,
                        'asset': asset,
                        'quantity': round(total, 6),
                        'price': round(price, 6),
                        'value_usdt': value,
                        'free': round(float(b.get('free', 0)), 6),
                        'locked': round(float(b.get('locked', 0)), 6),
                    }
                    
                    # Séparer: watchlist (positions pertinentes) vs other (tokens testnet)
                    if symbol in watchlist_symbols or symbol in local_symbols:
                        binance_positions.append(entry)
                    else:
                        binance_other.append(entry)
                
                # Trier par valeur décroissante
                binance_positions.sort(key=lambda x: x['value_usdt'], reverse=True)
                binance_other.sort(key=lambda x: x['value_usdt'], reverse=True)
                result['binance_positions'] = binance_positions
                result['binance_other'] = binance_other  # Tokens hors watchlist
                result['binance_other_count'] = len(binance_other)
                result['binance_other_value'] = round(sum(p['value_usdt'] for p in binance_other), 2)
                result['total_binance_value'] = round(sum(p['value_usdt'] for p in binance_positions), 2)
                result['mode'] = 'SPOT'
                
                # 6) Mettre à jour les prix actuels dans les positions locales
                for lp in result['local_positions']:
                    sym = lp['symbol']
                    price = all_tickers.get(sym, 0)
                    lp['current_price'] = round(price, 6)
                    lp['current_value'] = round(lp['quantity'] * price, 2)
                    if lp['entry_price'] > 0:
                        lp['pnl'] = round(lp['current_value'] - lp['entry_value'], 2)
                        lp['pnl_pct'] = round((price / lp['entry_price'] - 1) * 100, 2)
                result['total_local_value'] = round(sum(lp['current_value'] for lp in result['local_positions']), 2)
                
                # 6) Comparaison: détection des orphelins
                binance_syms = {p['symbol'] for p in binance_positions}
                local_syms = set(local_symbols.keys())
                
                # Seuil minimum notional Binance (positions en-dessous = dust invendable)
                DUST_THRESHOLD_USDT = 5.0
                _raw_orphans = [p for p in binance_positions if p['symbol'] not in local_syms]
                # Séparer vrais orphelins (vendables) et dust (invendables < 5$)
                result['orphans_binance'] = [p for p in _raw_orphans if p['value_usdt'] >= DUST_THRESHOLD_USDT]
                result['dust_orphans']    = [p for p in _raw_orphans if p['value_usdt'] < DUST_THRESHOLD_USDT]
                result['orphans_local'] = [lp for lp in result['local_positions'] if lp['symbol'] not in binance_syms]
                result['synced'] = [lp['symbol'] for lp in result['local_positions'] if lp['symbol'] in binance_syms]
                
            except BinanceAPIException as be:
                result['status'] = 'error'
                result['error'] = f'Erreur Binance ({be.code}): {be.message}'
            except ImportError:
                result['status'] = 'error'
                result['error'] = 'Module binance non installé'
            except Exception as e:
                result['status'] = 'error'
                result['error'] = str(e)
            
            result['timestamp'] = datetime.now().isoformat()
            request_handler.send_json_response(result)
            
        except Exception as e:
            logger.error(f"Error in binance-account-detail: {e}")
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e)
            }, 500)

    def handle_binance_cleanup(self, request_handler, data) -> None:
        """POST /api/maintenance/binance-cleanup - Corriger les orphelins
        
        🔧 FIX 28/02: Vend les positions orphelines sur Binance (non trackées par le bot),
        supprime les positions locales absentes de Binance, et nettoie la watchlist
        des symboles invalides (non-ASCII, etc.).
        """
        try:
            import time as _time
            from binance.client import Client
            from binance.exceptions import BinanceAPIException
            import config
            
            results = {
                'sold': [],
                'sell_errors': [],
                'dust_skipped': [],
                'local_removed': [],
                'watchlist_cleaned': [],
                'total_recovered': 0,
                'status': 'ok'
            }
            
            action = data.get('action', 'all')  # 'sell_orphans', 'clean_local', 'clean_watchlist', 'all'
            
            # --- Connexion Binance ---
            api_key = getattr(config, 'BINANCE_API_KEY', None) or getattr(config, 'API_KEY', None)
            api_secret = getattr(config, 'BINANCE_API_SECRET', None) or getattr(config, 'API_SECRET', None)
            testnet_mode = getattr(config, 'TESTNET_MODE', True)
            
            if not api_key or not api_secret:
                request_handler.send_json_response({
                    'status': 'error',
                    'error': 'Clés API Binance non configurées'
                }, 400)
                return
            
            client = Client(api_key, api_secret, testnet=testnet_mode, requests_params={'timeout': 15})
            try:
                st = client.get_server_time()
                client.timestamp_offset = st['serverTime'] - int(_time.time() * 1000)
            except Exception:
                pass
            
            # --- Charger les positions locales ---
            positions_file = os.path.join(self.script_dir, 'positions.json')
            local_positions = {}
            if os.path.exists(positions_file):
                try:
                    with open(positions_file, 'r', encoding='utf-8') as f:
                        local_positions = json.load(f)
                except Exception:
                    pass
            local_syms = set(local_positions.keys())
            
            # --- Charger la watchlist ---
            watchlist_file = os.path.join(self.script_dir, 'watchlist.json')
            watchlist_data = {}
            watchlist_symbols = set()
            try:
                if os.path.exists(watchlist_file):
                    with open(watchlist_file, 'r', encoding='utf-8') as f:
                        watchlist_data = json.load(f)
                        watchlist_symbols = set(watchlist_data.get('symbols', []))
            except Exception:
                pass
            
            # --- Récupérer les balances Binance ---
            account = client.get_account()
            balances = account.get('balances', [])
            all_tickers = {}
            try:
                all_tickers = {t['symbol']: float(t['price']) for t in client.get_all_tickers()}
            except Exception:
                pass
            
            IGNORED_ASSETS = {
                'USDC', 'BUSD', 'DAI', 'USD', 'EUR', 'BRL', 'ARS', 'TRY',
                'PLN', 'RON', 'CZK', 'MXN', 'COP', 'JPY', 'ZAR', 'UAH', 'IDR',
                '456', '这是测试币', 'TUSD', 'FDUSD', 'USDT'
            }
            
            # Construire la liste des positions watchlist sur Binance
            binance_positions = []
            for b in balances:
                asset = b['asset']
                total = float(b.get('free', 0)) + float(b.get('locked', 0))
                if asset in IGNORED_ASSETS or total < 0.0001:
                    continue
                symbol = asset + 'USDT'
                price = all_tickers.get(symbol, 0)
                value = round(total * price, 2)
                if value < 1.0 and price > 0:
                    continue
                if symbol in watchlist_symbols or symbol in local_syms:
                    binance_positions.append({
                        'symbol': symbol,
                        'asset': asset,
                        'quantity': total,
                        'price': price,
                        'value_usdt': value,
                        'free': float(b.get('free', 0)),
                    })
            
            binance_syms = {p['symbol'] for p in binance_positions}
            
            # === ACTION 1: Vendre les orphelins Binance (sur Binance mais pas dans le bot) ===
            if action in ('all', 'sell_orphans'):
                orphans_binance = [p for p in binance_positions if p['symbol'] not in local_syms]
                
                for orphan in orphans_binance:
                    symbol = orphan['symbol']
                    quantity = orphan['free']  # Utiliser seulement la partie 'free'
                    
                    if quantity <= 0:
                        results['sell_errors'].append({
                            'symbol': symbol,
                            'error': 'Quantité libre = 0 (tokens locked)'
                        })
                        continue
                    
                    try:
                        # Récupérer le step_size pour arrondir correctement
                        symbol_info = client.get_symbol_info(symbol)
                        if not symbol_info:
                            results['sell_errors'].append({
                                'symbol': symbol,
                                'error': 'Symbole invalide sur Binance'
                            })
                            continue
                        
                        step_size = None
                        min_qty = None
                        min_notional = None
                        for flt in symbol_info['filters']:
                            if flt['filterType'] == 'LOT_SIZE':
                                step_size = float(flt['stepSize'])
                                min_qty = float(flt['minQty'])
                            elif flt['filterType'] in ('MIN_NOTIONAL', 'NOTIONAL'):
                                min_notional = float(flt.get('minNotional', flt.get('minVal', 5.0)))
                        
                        if step_size:
                            precision = len(str(step_size).rstrip('0').split('.')[-1]) if '.' in str(step_size) else 0
                            quantity = round(quantity - (quantity % step_size), precision)
                        
                        if min_qty and quantity < min_qty:
                            results['sell_errors'].append({
                                'symbol': symbol,
                                'error': f'Quantité trop faible ({quantity} < {min_qty})'
                            })
                            continue
                        
                        # Vérifier valeur minimale NOTIONAL (ex: 5 USDT minimum Binance)
                        order_value = quantity * orphan.get('price', 0)
                        effective_min_notional = min_notional if min_notional else 5.0
                        if order_value < effective_min_notional:
                            results['dust_skipped'].append({
                                'symbol': symbol,
                                'value_usdt': round(order_value, 2),
                                'reason': f'Valeur {order_value:.2f}$ < minimum {effective_min_notional}$ (dust)'
                            })
                            continue
                        
                        order = client.order_market_sell(symbol=symbol, quantity=quantity)
                        fills = order.get('fills', [])
                        total_value = sum(float(f['price']) * float(f['qty']) for f in fills)
                        
                        results['sold'].append({
                            'symbol': symbol,
                            'quantity': quantity,
                            'value_usdt': round(total_value, 2)
                        })
                        results['total_recovered'] += total_value
                        
                    except BinanceAPIException as be:
                        results['sell_errors'].append({
                            'symbol': symbol,
                            'error': f'API {be.code}: {be.message}'
                        })
                    except Exception as e:
                        results['sell_errors'].append({
                            'symbol': symbol,
                            'error': str(e)
                        })
            
            # === ACTION 2: Supprimer les positions locales orphelines (dans le bot mais pas sur Binance) ===
            if action in ('all', 'clean_local'):
                orphans_local = [sym for sym in local_syms if sym not in binance_syms]
                
                if orphans_local:
                    # Backup
                    import shutil
                    backup = positions_file.replace('.json', '_pre_cleanup.json')
                    if os.path.exists(positions_file):
                        shutil.copy(positions_file, backup)
                    
                    for sym in orphans_local:
                        if sym in local_positions:
                            del local_positions[sym]
                            results['local_removed'].append(sym)
                    
                    with open(positions_file, 'w', encoding='utf-8') as f:
                        json.dump(local_positions, f, indent=2, ensure_ascii=False)
            
            # === ACTION 3: Nettoyer la watchlist des symboles invalides ===
            if action in ('all', 'clean_watchlist'):
                import re
                symbols_list = watchlist_data.get('symbols', [])
                clean_symbols = []
                for sym in symbols_list:
                    # Rejeter les symboles avec caractères non-ASCII
                    if not re.match(r'^[A-Z0-9]+$', sym):
                        results['watchlist_cleaned'].append(sym)
                        continue
                    clean_symbols.append(sym)
                
                if results['watchlist_cleaned']:
                    watchlist_data['symbols'] = clean_symbols
                    with open(watchlist_file, 'w', encoding='utf-8') as f:
                        json.dump(watchlist_data, f, indent=2, ensure_ascii=False)
            
            results['total_recovered'] = round(results['total_recovered'], 2)
            results['timestamp'] = datetime.now().isoformat()
            
            logger.info(f"Binance cleanup: sold={len(results['sold'])}, local_removed={len(results['local_removed'])}, watchlist_cleaned={len(results['watchlist_cleaned'])}")
            request_handler.send_json_response(results)
            
        except Exception as e:
            logger.error(f"Error in binance-cleanup: {e}")
            import traceback
            traceback.print_exc()
            request_handler.send_json_response({
                'status': 'error',
                'error': str(e)
            }, 500)

    def handle_get_config(self, request_handler) -> None:
        """GET /api/get-config - Récupérer la configuration"""
        try:
            config = self.config_service.read_config()

            if config is None:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Config file not found'
                }, 404)
                return

            request_handler.send_json_response({
                'success': True,
                'config': config
            })

        except Exception as e:
            logger.error(f"Error in get_config: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_apply_config(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/apply-config - Appliquer une configuration"""
        try:
            # Valider avec Pydantic
            config = TradingConfig(**data)

            # Appliquer
            success = self.config_service.apply_config(config)

            if success:
                request_handler.send_json_response({
                    'success': True,
                    'message': 'Configuration updated'
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to update configuration'
                }, 500)

        except ValueError as e:
            # Erreur de validation Pydantic
            request_handler.send_json_response({
                'success': False,
                'error': f'Validation error: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in apply_config: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_get_ia_criteria(self, request_handler) -> None:
        """GET /api/get-ia-criteria - Récupérer les critères IA"""
        try:
            criteria_path = f"{self.script_dir}/ia_criteria.json"
            
            if os.path.exists(criteria_path):
                criteria = load_json_file(criteria_path, {})
                request_handler.send_json_response({
                    'success': True,
                    'criteria': criteria
                })
            else:
                # Retourner les valeurs par défaut
                request_handler.send_json_response({
                    'success': True,
                    'criteria': {
                        'buy': {
                            'bb_squeeze_max': 3.0,
                            'ema_diff_min': -0.05,
                            'momentum_min': 0.05,
                            'rsi_max': 45,
                            'score_min': 65
                        },
                        'sell': {
                            'stop_loss': -1.5,
                            'death_cross': -0.15,
                            'momentum_sell': -0.5,
                            'rsi_sell': 75,
                            'profit_secure': 2.0
                        },
                        'compatibility': {
                            'volatility_ideal': 1.5,
                            'min_grade': 'C'
                        }
                    }
                })
        except Exception as e:
            logger.error(f"Error in get_ia_criteria: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_save_ia_criteria(self, request_handler, data: Dict) -> None:
        """POST /api/save-ia-criteria - Sauvegarder les critères IA"""
        try:
            criteria_path = f"{self.script_dir}/ia_criteria.json"
            
            # Ajouter timestamp
            data['last_update'] = datetime.now().isoformat()
            
            # Sauvegarder
            save_json_file(criteria_path, data)
            
            logger.info(f"✅ Critères IA sauvegardés: {criteria_path}")
            
            request_handler.send_json_response({
                'success': True,
                'message': 'Critères IA sauvegardés'
            })
        except Exception as e:
            logger.error(f"Error in save_ia_criteria: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_get_rotation_status(self, request_handler) -> None:
        """GET /api/rotation-status - Statut du système de rotation intelligente"""
        try:
            rotation_history_path = f"{self.script_dir}/rotation_history.json"
            rotation_history = load_json_file(rotation_history_path, [])
            
            # Essayer d'obtenir le statut temps réel via l'IA
            rotation_opportunities = []
            rotation_config = {}
            
            try:
                from smart_rotation import get_smart_rotation
                rotation_mgr = get_smart_rotation()
                status = rotation_mgr.get_status()
                rotation_config = status.get('config', {})
            except:
                pass
            
            # Obtenir les opportunités depuis l'IA
            try:
                from ai_predictor import get_surveillance_service
                service = get_surveillance_service()
                ai_status = service.get_surveillance_status()
                rotation_opportunities = ai_status.get('rotation_opportunities', [])
            except:
                pass
            
            request_handler.send_json_response({
                'success': True,
                'rotation': {
                    'opportunities': rotation_opportunities[:10],
                    'history': rotation_history[-20:],
                    'config': rotation_config,
                    'enabled': rotation_config.get('enabled', True)
                }
            })
            
        except Exception as e:
            logger.error(f"Error in get_rotation_status: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_cycle_status(self, request_handler) -> None:
        """GET /api/cycle-status - Statut simple et compréhensible du cycle IA"""
        try:
            # 🚀 Utiliser la fonction pré-importée (pas d'import = pas de latence)
            try:
                service = _surveillance_service_getter() if _surveillance_service_getter else None
                is_running = bool(service.is_running) if service else False
                last_cycle_time = getattr(service, 'last_cycle_time', None) if service else None
            except:
                is_running = False
                last_cycle_time = None
            
            # Calcul simple du cycle
            now = datetime.now()
            cycle_duration = 5  # seconds
            
            if last_cycle_time and is_running:
                elapsed = (now - last_cycle_time).total_seconds()
                cycle_number = int(elapsed / cycle_duration) + 1
                time_in_cycle = elapsed % cycle_duration
                time_to_next = cycle_duration - time_in_cycle
                
                progress_pct = int((time_in_cycle / cycle_duration) * 100)
                
                # Émojis selon progression
                if progress_pct < 33:
                    emoji = '🟢'
                    visual = '🟢⚪⚪'
                elif progress_pct < 66:
                    emoji = '🟡'
                    visual = '🟢🟡⚪'
                else:
                    emoji = '🔴'
                    visual = '🟢🟡🔴'
                
                simple_display = f"{emoji} Cycle #{cycle_number} • {int(time_to_next)}s"
                message = f"Cycle #{cycle_number} en cours"
            else:
                visual = '⚪⚪⚪'
                emoji = '🔵'
                simple_display = '🔵 En attente du prochain cycle'
                message = 'Surveillance en cours'
                cycle_number = 0
                progress_pct = 0
            
            request_handler.send_json_response({
                'success': True,
                'cycle': {
                    'simple': simple_display,
                    'detailed': {
                        'visual_bar': visual,
                        'progress': f"{progress_pct}%",
                        'status': 'Actif' if is_running else 'Inactif',
                        'cycle': cycle_number,
                        'emoji': emoji,
                        'message': message
                    }
                }
            })
            
        except Exception as e:
            logger.error(f"Error in cycle_status: {e}")
            import traceback
            traceback.print_exc()
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_execute_rotation(self, request_handler, data: Dict) -> None:
        """POST /api/execute-rotation - Exécuter une rotation manuellement"""
        try:
            sell_symbol = data.get('sell_symbol')
            buy_symbol = data.get('buy_symbol')
            
            if not sell_symbol or not buy_symbol:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'sell_symbol et buy_symbol requis'
                }, 400)
                return
            
            # La rotation sera exécutée par le bot au prochain cycle
            # On crée un fichier trigger
            rotation_trigger = {
                'sell_symbol': sell_symbol,
                'buy_symbol': buy_symbol,
                'timestamp': datetime.now().isoformat(),
                'manual': True
            }
            
            trigger_path = f"{self.script_dir}/rotation_trigger.json"
            save_json_file(trigger_path, rotation_trigger)
            
            request_handler.send_json_response({
                'success': True,
                'message': f'Rotation {sell_symbol} → {buy_symbol} programmée'
            })
            
        except Exception as e:
            logger.error(f"Error in execute_rotation: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_toggle_rotation(self, request_handler, data: Dict) -> None:
        """POST /api/toggle-rotation - Activer/Désactiver la rotation"""
        try:
            enabled = data.get('enabled', True)
            
            try:
                from smart_rotation import get_smart_rotation
                rotation_mgr = get_smart_rotation()
                rotation_mgr.config['enabled'] = enabled
                
                request_handler.send_json_response({
                    'success': True,
                    'enabled': enabled,
                    'message': f'Rotation {"activée" if enabled else "désactivée"}'
                })
            except ImportError:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Module Smart Rotation non disponible'
                }, 500)
                
        except Exception as e:
            logger.error(f"Error in toggle_rotation: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_get_profiles(self, request_handler) -> None:
        """GET /api/get-profiles - Récupérer les profils de trading"""
        try:
            profiles_path = f"{self.script_dir}/trading_profiles.json"
            profiles_data = load_json_file(profiles_path, {'profiles': []})

            request_handler.send_json_response({
                'success': True,
                'profiles': profiles_data.get('profiles', [])
            })

        except Exception as e:
            logger.error(f"Error in get_profiles: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_bot_analysis(self, request_handler) -> None:
        """GET /api/bot-analysis - Données d'analyse en temps réel du bot"""
        try:
            # 🚀 PERF: cache 10s — le bot écrit bot_analysis.json toutes les ~30s
            cached = _cache_get('bot_analysis', ttl=10.0)
            if cached is not None:
                request_handler.send_json_response(cached)
                return

            analysis_path = f"{self.script_dir}/bot_analysis.json"
            analysis_data = load_json_file(analysis_path, None)

            if analysis_data is None:
                # Le bot n'a pas encore créé le fichier
                request_handler.send_json_response({
                    'success': True,
                    'data': {
                        'timestamp': datetime.now().isoformat(),
                        'stats': {
                            'positions': 0,
                            'maxPositions': 10,
                            'winRate': 0,
                            'pnl': 0,
                            'totalTrades': 0,
                            'wins': 0,
                            'losses': 0
                        },
                        'signals': {'buy': 0, 'sell': 0},
                        'positions': [],
                        'cryptos': [],
                        'logs': [],
                        'settings': {'autoTrade': False, 'stopLoss': 2, 'takeProfit': 4, 'testnet': True}
                    },
                    'botRunning': False
                })
            else:
                # Vérifier si les données sont récentes (moins de 120 secondes)
                # Le cycle du bot peut prendre du temps avec 62 symboles
                try:
                    timestamp = datetime.fromisoformat(analysis_data.get('timestamp', ''))
                    age_seconds = (datetime.now() - timestamp).total_seconds()
                    bot_running = age_seconds < 120  # 2 minutes de tolerance
                except:
                    bot_running = False

                result = {
                    'success': True,
                    'data': analysis_data,
                    'botRunning': bot_running
                }
                _cache_set('bot_analysis', result)
                request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in bot_analysis: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_crypto_data(self, request_handler) -> None:
        """GET /api/crypto-data - Données crypto complètes"""
        try:
            # Import conditionnel
            from crypto_data_fetcher import get_fetcher

            # Utiliser les données de PRODUCTION (vraies données du marché)
            fetcher = get_fetcher(use_testnet=False)
            data = fetcher.get_cached_data()

            if not data or not data.get('symbols'):
                # Forcer mise à jour
                data = asyncio.run(fetcher.fetch_all_data(force=True))

            request_handler.send_json_response({
                'success': True,
                'data': data,
                'cache_valid': fetcher.is_cache_valid(),
                'count': len(data.get('symbols', {}))
            })

        except ImportError:
            request_handler.send_json_response({
                'success': False,
                'error': 'Crypto fetcher not available'
            }, 503)
        except Exception as e:
            logger.error(f"Error in crypto_data: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_crypto_summary(self, request_handler) -> None:
        """GET /api/crypto-summary - Résumé des données crypto + valeurs tradables spy"""
        try:
            from crypto_data_fetcher import get_fetcher

            # Utiliser les données de PRODUCTION (vraies données du marché)
            fetcher = get_fetcher(use_testnet=False)

            # Seulement si cache VIDE et pas de fetch en cours → fetch bloquant rapide
            # Si cache expiré mais a des données → on les utilise immédiatement,
            # l'auto-update (120s) gère le refresh en arrière-plan (évite blocage 30s)
            if not fetcher.cache.get('symbols') and not fetcher.is_updating:
                try:
                    loop = asyncio.new_event_loop()
                    asyncio.set_event_loop(loop)
                    loop.run_until_complete(fetcher.fetch_all_data())
                    loop.close()
                except Exception as e:
                    logger.warning(f"Could not refresh cache: {e}")

            summary = fetcher.get_summary()

            # ── Ajouter TOUTES les valeurs tradables (bot + spy) ──
            try:
                all_tradable = {}
                cache_symbols = fetcher.cache.get('symbols', {})

                # 1) Symboles du cache (bot watch)
                for sym, sdata in cache_symbols.items():
                    change_24h = sdata.get('priceChangePercent', 0)
                    rsi_sig = sdata.get('signals', {}).get('rsi_signal', 'neutral')
                    trend = sdata.get('signals', {}).get('trend', 'neutral')
                    price = sdata.get('price', 0)
                    volume_24h = sdata.get('quoteVolume', 0)
                    rsi_val = sdata.get('indicators', {}).get('rsi', 0) if isinstance(sdata.get('indicators'), dict) else 0
                    all_tradable[sym] = {
                        'symbol': sym,
                        'change_24h': round(change_24h, 2) if change_24h else 0,
                        'price': price,
                        'volume_24h': round(volume_24h, 0) if volume_24h else 0,
                        'rsi': round(rsi_val, 1) if rsi_val else 0,
                        'rsi_signal': rsi_sig,
                        'trend': trend,
                        'source': 'bot',
                    }

                # 2) Symboles spy — fusionner testnet + prod spy_coin_scores.json
                # 🔧 FIX 11/04: lire aussi la prod pour étiqueter correctement les 79 coins USDC
                spy_scores_paths = [
                    os.path.join(self.script_dir, 'spy_coin_scores.json'),  # testnet
                    os.path.join(self.script_dir, '..', 'crypto_trading_prod', 'data', 'spy_coin_scores.json'),  # prod
                ]
                merged_spy_scores = {}
                for spy_scores_path in spy_scores_paths:
                    spy_scores_path = os.path.normpath(spy_scores_path)
                    if os.path.exists(spy_scores_path):
                        with open(spy_scores_path, 'r', encoding='utf-8') as f:
                            partial = json.load(f)
                        for sym, sc in partial.items():
                            if sym not in merged_spy_scores:
                                merged_spy_scores[sym] = sc
                            else:
                                # Merge: additionner trades/wins/pnl, garder blocked_until le plus récent
                                existing = merged_spy_scores[sym]
                                existing['trades'] = existing.get('trades', 0) + sc.get('trades', 0)
                                existing['wins'] = existing.get('wins', 0) + sc.get('wins', 0)
                                existing['total_pnl_usdt'] = existing.get('total_pnl_usdt', 0) + sc.get('total_pnl_usdt', 0)
                                if sc.get('blocked_until'):
                                    existing['blocked_until'] = sc['blocked_until']

                for sym, sc in merged_spy_scores.items():
                    if sym in all_tradable:
                        all_tradable[sym]['source'] = 'both'
                        all_tradable[sym]['spy_trades'] = sc.get('trades', 0)
                        all_tradable[sym]['spy_pnl'] = sc.get('total_pnl_usdt', 0)
                        all_tradable[sym]['spy_winrate'] = round(sc['wins'] / sc['trades'] * 100, 1) if sc.get('trades', 0) > 0 else 0
                        all_tradable[sym]['spy_blocked'] = _is_blocked_until_active(sc.get('blocked_until'))
                    else:
                        all_tradable[sym] = {
                            'symbol': sym,
                            'change_24h': 0,
                            'price': 0,
                            'volume_24h': 0,
                            'rsi': 0,
                            'rsi_signal': 'unknown',
                            'trend': 'unknown',
                            'source': 'spy',
                            'spy_trades': sc.get('trades', 0),
                            'spy_pnl': sc.get('total_pnl_usdt', 0),
                            'spy_winrate': round(sc['wins'] / sc['trades'] * 100, 1) if sc.get('trades', 0) > 0 else 0,
                            'spy_blocked': _is_blocked_until_active(sc.get('blocked_until')),
                        }

                # 3) Coins dans la watchlist spy prod mais sans aucun trade → marquer source=spy
                prod_wl_path = os.path.normpath(os.path.join(self.script_dir, '..', 'crypto_trading_prod', 'watchlist.json'))
                if os.path.exists(prod_wl_path):
                    with open(prod_wl_path, 'r', encoding='utf-8') as f:
                        prod_wl_syms = json.load(f).get('symbols', [])
                    for sym in prod_wl_syms:
                        if sym in all_tradable and all_tradable[sym]['source'] == 'bot':
                            all_tradable[sym]['source'] = 'both'
                        elif sym not in all_tradable:
                            all_tradable[sym] = {
                                'symbol': sym, 'change_24h': 0, 'price': 0,
                                'volume_24h': 0, 'rsi': 0, 'rsi_signal': 'unknown',
                                'trend': 'unknown', 'source': 'spy',
                                'spy_trades': 0, 'spy_pnl': 0, 'spy_winrate': 0, 'spy_blocked': False,
                            }

                # 4) Enrichir les coins sans prix (spy-only) depuis Binance public API
                missing_syms = [
                    sym for sym, v in all_tradable.items()
                    if v.get('price', 0) == 0
                    and sym.isascii() and sym.isalnum()
                    and (sym.endswith('USDT') or sym.endswith('USDC'))
                    and len(sym) >= 5
                ]
                if missing_syms:
                    try:
                        import urllib.request as _ureq, urllib.parse as _uparse
                        # Binance accepte max ~100 symboles en une requête via ?symbols=[...]
                        chunk_size = 100
                        for i in range(0, len(missing_syms), chunk_size):
                            chunk = missing_syms[i:i + chunk_size]
                            syms_json = json.dumps(chunk, separators=(',', ':'))
                            url = f'https://api.binance.com/api/v3/ticker/24hr?symbols={_uparse.quote(syms_json)}'
                            req = _ureq.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
                            with _ureq.urlopen(req, timeout=8) as resp:
                                tickers = json.loads(resp.read().decode())
                            for t in tickers:
                                sym = t.get('symbol', '')
                                price = float(t.get('lastPrice', 0))
                                if sym in all_tradable and price > 0:
                                    all_tradable[sym]['price'] = round(price, 8)
                                    all_tradable[sym]['change_24h'] = round(float(t.get('priceChangePercent', 0)), 2)
                                    all_tradable[sym]['volume_24h'] = round(float(t.get('quoteVolume', 0)), 0)
                    except Exception as e_binance:
                        logger.warning(f"Could not enrich spy coins from Binance: {e_binance}")

                # Trier par change_24h desc
                sorted_tradable = sorted(all_tradable.values(), key=lambda x: x.get('change_24h', 0), reverse=True)

                summary['all_tradable'] = sorted_tradable
                summary['tradable_count'] = len(sorted_tradable)
                summary['bot_count'] = sum(1 for v in sorted_tradable if v['source'] in ('bot', 'both'))
                summary['spy_count'] = sum(1 for v in sorted_tradable if v['source'] in ('spy', 'both'))
                summary['both_count'] = sum(1 for v in sorted_tradable if v['source'] == 'both')
                summary['spy_blocked_count'] = sum(1 for v in sorted_tradable if v.get('spy_blocked'))
            except Exception as e:
                logger.warning(f"Could not load tradable values: {e}")

            request_handler.send_json_response({
                'success': True,
                'summary': summary
            })

        except ImportError:
            request_handler.send_json_response({
                'success': False,
                'error': 'Crypto fetcher not available'
            }, 503)
        except Exception as e:
            logger.error(f"Error in crypto_summary: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_crypto_refresh(self, request_handler) -> None:
        """GET /api/crypto-refresh - Rafraîchir le cache crypto"""
        try:
            from crypto_data_fetcher import get_fetcher

            # Utiliser les données de PRODUCTION (vraies données du marché)
            fetcher = get_fetcher(use_testnet=False)
            
            # Utiliser une nouvelle event loop pour éviter les conflits
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)
            data = loop.run_until_complete(fetcher.fetch_all_data(force=True))
            loop.close()

            request_handler.send_json_response({
                'success': True,
                'message': 'Cache refreshed',
                'count': len(data.get('symbols', {})),
                'updated_at': data.get('updated_at')
            })

        except ImportError:
            request_handler.send_json_response({
                'success': False,
                'error': 'Crypto fetcher not available'
            }, 503)
        except Exception as e:
            logger.error(f"Error in crypto_refresh: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_opportunities(self, request_handler) -> None:
        """GET /api/opportunities - Opportunités de trading"""
        try:
            from crypto_data_fetcher import get_opportunities

            # Utiliser les données de PRODUCTION (vraies données du marché)
            opportunities = get_opportunities(use_testnet=False)

            request_handler.send_json_response({
                'success': True,
                **opportunities
            })

        except ImportError:
            request_handler.send_json_response({
                'success': False,
                'error': 'Crypto fetcher not available'
            }, 503)
        except Exception as e:
            logger.error(f"Error in opportunities: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_update_settings(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/update-settings - Mettre à jour les paramètres"""
        try:
            # Valider
            settings = SettingsUpdate(**data)

            # Sauvegarder
            settings_file = f"{self.script_dir}/bot_settings.json"
            existing = load_json_file(settings_file, {})

            # Mettre à jour seulement les champs fournis
            for key, value in settings.dict(exclude_none=True).items():
                existing[key] = value

            if save_json_file(settings_file, existing):
                request_handler.send_json_response({
                    'success': True,
                    'saved': existing
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to save settings'
                }, 500)

        except ValueError as e:
            request_handler.send_json_response({
                'success': False,
                'error': f'Validation error: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in update_settings: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_force_close(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/force-close - Fermer une position"""
        try:
            # Valider
            request = ForceCloseRequest(**data)

            # Vérifier la validité du symbole
            if not self.validator.is_safe_symbol(request.symbol):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Invalid symbol'
                }, 400)
                return

            # Récupérer prix actuel
            prices = asyncio.run(self.trading_service.get_current_prices())

            if request.symbol not in prices:
                request_handler.send_json_response({
                    'success': False,
                    'error': f'Price not found for {request.symbol}'
                }, 404)
                return

            exit_price = prices[request.symbol]

            # Fermer la position
            success = self.trading_service.close_position(
                request.symbol,
                exit_price,
                reason='manual_close'
            )

            if success:
                request_handler.send_json_response({
                    'success': True,
                    'message': f'Position {request.symbol} closed'
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to close position'
                }, 500)

        except ValueError as e:
            request_handler.send_json_response({
                'success': False,
                'error': f'Validation error: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in force_close: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_update_positions_sltp(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/update-positions-sltp - Mettre à jour SL/TP des positions en cours"""
        try:
            stop_loss_pct = data.get('stop_loss_pct')
            take_profit_pct = data.get('take_profit_pct')
            
            if stop_loss_pct is None or take_profit_pct is None:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'stop_loss_pct et take_profit_pct requis'
                }, 400)
                return
            
            # Convertir en float
            stop_loss_pct = float(stop_loss_pct)
            take_profit_pct = float(take_profit_pct)
            
            # Appliquer les changements
            result = self.trading_service.update_positions_sltp(stop_loss_pct, take_profit_pct)
            
            status = 200 if result.get('success') else 500
            request_handler.send_json_response(result, status)
            
        except ValueError as e:
            request_handler.send_json_response({
                'success': False,
                'error': f'Valeur invalide: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in update_positions_sltp: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_sell_all(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/sell-all - Vendre toutes les positions IMMÉDIATEMENT"""
        logger.info("🔴 SELL ALL REQUEST RECEIVED!")
        logger.info(f"   Data: {data}")
        try:
            logger.info("   Calling close_all_positions()...")
            sold_count, total_pnl, failed = asyncio.run(
                self.trading_service.close_all_positions()
            )
            logger.info(f"   Result: sold={sold_count}, pnl={total_pnl}, failed={len(failed)}")

            if sold_count > 0:
                message = f"{sold_count} position(s) vendues avec succès"
                if failed:
                    message += f" ({len(failed)} échec(s))"
                status = 'completed'
            elif failed:
                # 🔧 FIX: Aucune vente réussie via API → créer signal pour le bot en fallback
                logger.warning(f"⚠️ 0 ventes directes réussies, {len(failed)} échecs → signal pour le bot")
                positions = load_json_file(os.path.join(self.script_dir, 'positions.json'), {})
                if positions:
                    signal_file = os.path.join(self.script_dir, 'sell_all_signal.json')
                    save_json_file(signal_file, {
                        'action': 'SELL_ALL',
                        'timestamp': datetime.now().isoformat(),
                        'symbols': list(positions.keys()),
                        'reason': 'manual_dashboard_fallback'
                    })
                    logger.info(f"📤 Signal sell_all créé pour {len(positions)} positions (fallback bot)")
                message = f"Signal envoyé au bot pour {len(positions)} position(s)"
                status = 'pending'
            else:
                message = "Aucune position à vendre"
                status = 'empty'

            # 🔧 FIX: Ne PAS vider positions.json ici - déjà géré par close_all_positions()
            # (ne retire que les positions effectivement vendues, pas les échouées)

            # Créer fichier de pause pour empêcher rachat immédiat
            pause_file = os.path.join(self.script_dir, 'trading_pause.json')
            try:
                pause_until = datetime.now() + timedelta(minutes=5)
                pause_ts = pause_until.timestamp()  # 🔧 FIX: numeric timestamp, pas ISO string
                with open(pause_file, 'w', encoding='utf-8') as f:
                    json.dump({
                        'paused_until': pause_ts,  # 🔧 FIX: clé 'paused_until' (avec 'd') = celle que le bot lit
                        'reason': 'SELL_ALL executed',
                        'timestamp': datetime.now().isoformat()
                    }, f)
                logger.info(f"⏸️ Pause créée jusqu'à {pause_until.strftime('%H:%M:%S')} (ts={pause_ts:.0f})")
            except Exception as e:
                logger.warning(f"Erreur création pause: {e}")

            request_handler.send_json_response({
                'success': sold_count > 0 or not failed,
                'sold': sold_count,
                'total_pnl': round(total_pnl, 2),
                'failed': failed,
                'message': message,
                'status': status
            })

        except Exception as e:
            logger.error(f"❌ Error in sell_all: {e}")
            # 🔧 FIX: En cas d'erreur totale, créer signal pour le bot
            try:
                positions = load_json_file(os.path.join(self.script_dir, 'positions.json'), {})
                if positions:
                    signal_file = os.path.join(self.script_dir, 'sell_all_signal.json')
                    save_json_file(signal_file, {
                        'action': 'SELL_ALL',
                        'timestamp': datetime.now().isoformat(),
                        'symbols': list(positions.keys()),
                        'reason': 'manual_dashboard_error_fallback'
                    })
                    logger.info(f"📤 Signal sell_all d'urgence créé pour {len(positions)} positions")
                    request_handler.send_json_response({
                        'success': True,
                        'sold': 0,
                        'total_pnl': 0,
                        'failed': [str(e)],
                        'message': f"Signal envoyé au bot pour {len(positions)} position(s)",
                        'status': 'pending'
                    })
                else:
                    request_handler.send_json_response({
                        'success': False,
                        'error': str(e),
                        'message': 'Aucune position à vendre'
                    }, 500)
            except Exception as e2:
                logger.error(f"❌ Double error in sell_all: {e2}")
                request_handler.send_json_response({
                    'success': False,
                    'error': str(e)
                }, 500)

    def handle_reset_dashboard(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/reset-dashboard - Réinitialiser TOUS les compteurs et historiques
        
        🔧 FIX 28/02: Reset complet de tous les fichiers de compteurs/stats,
        avec archivage préalable dans trade_logs/archives_reset/
        """
        try:
            files_cleared = []
            errors = []
            
            # 🔧 FIX 28/02: Liste COMPLÈTE des fichiers à réinitialiser
            # Dict: nom_fichier -> contenu vide approprié
            # 🔧 FIX 14/03: trade_history.json ET espion_history.json sont désormais
            #   inclus dans le reset (archivés avant d'être vidés), sinon tous les
            #   anciens trades reviennent à chaque redémarrage du bot/spy.
            files_to_clear = {
                # Positions & trades (opérationnel)
                'positions.json': {},
                # Historiques — archivés puis vidés ← FIX 14/03
                'trade_history.json': [],
                'espion_history.json': [],
                'rotation_history.json': [],
                # Logs de trades (peuvent être nettoyés, archivés avant)
                'trade_logs/trades_log.jsonl': '__EMPTY_FILE__',
                'trade_logs/signals_log.jsonl': '__EMPTY_FILE__',
                # Stats & analyse
                'ai_opportunities.json': [],
                'ai_training_stats.json': {},
                'ai_self_optimizer_results.json': {},
                'bot_analysis.json': {},
                'performance_stats.json': {},
                'ia_surveillance_cache.json': {},
                # Auto-updater
                'auto_updater_status.json': {},
                # Spy (espion de marché) - positions en cours seulement
                'espion_trades.json': {},
                'espion_opportunities.json': [],
                'spy_status.json': {},
            }
            
            # 📦 Archiver avant de vider
            archive_dir = os.path.join(self.script_dir, 'trade_logs', 'archives_reset')
            timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
            archive_path = os.path.join(archive_dir, f'dashboard_reset_{timestamp}')
            try:
                os.makedirs(archive_path, exist_ok=True)
                logger.info(f"📦 Archive créée: {archive_path}")
            except Exception as e:
                logger.warning(f"Impossible de créer l'archive: {e}")
                archive_path = None
            
            for filename, empty_content in files_to_clear.items():
                file_path = os.path.join(self.script_dir, filename)
                try:
                    # Archiver si le fichier existe
                    if archive_path and os.path.exists(file_path):
                        import shutil
                        archive_file = os.path.join(archive_path, os.path.basename(filename))
                        shutil.copy2(file_path, archive_file)
                    
                    # Vider avec le contenu approprié
                    with open(file_path, 'w', encoding='utf-8') as f:
                        if empty_content == '__EMPTY_FILE__':
                            pass  # Fichier vide (JSONL)
                        else:
                            json.dump(empty_content, f, indent=2)
                    files_cleared.append(filename)
                    logger.info(f"✅ {filename} réinitialisé")
                except Exception as e:
                    errors.append(f"{filename}: {str(e)}")
                    logger.error(f"❌ Erreur réinitialisation {filename}: {e}")
            
            # Vider aussi les logs texte
            for log_file in ['trading_bot.log', 'bot_output.txt', 'bot_error.txt', 'bot_debug.txt']:
                log_path = os.path.join(self.script_dir, log_file)
                try:
                    if archive_path and os.path.exists(log_path):
                        import shutil
                        shutil.copy2(log_path, os.path.join(archive_path, log_file))
                    with open(log_path, 'w', encoding='utf-8') as f:
                        f.write(f"--- Reset compteurs {timestamp} ---\n")
                    files_cleared.append(log_file)
                except Exception:
                    pass  # Non critique
            
            # Créer un signal pour que le bot synchronise sa mémoire
            signal_file = os.path.join(self.script_dir, 'reset_signal.json')
            try:
                with open(signal_file, 'w', encoding='utf-8') as f:
                    json.dump({
                        'action': 'RESET_DASHBOARD',
                        'timestamp': datetime.now().isoformat(),
                        'files_cleared': files_cleared
                    }, f)
            except Exception as e:
                logger.warning(f"Signal reset non créé: {e}")
            
            if files_cleared:
                request_handler.send_json_response({
                    'success': True,
                    'cleared': files_cleared,
                    'errors': errors,
                    'archive': archive_path if archive_path else None,
                    'message': f'{len(files_cleared)} fichier(s) réinitialisé(s)'
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'errors': errors,
                    'message': 'Aucun fichier réinitialisé'
                }, 500)
                
        except Exception as e:
            logger.error(f"Error in reset_dashboard: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_restart_bot(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/restart-bot - Redémarrer le bot"""
        try:
            pid = self.bot_service.restart_bot()

            if pid:
                request_handler.send_json_response({
                    'success': True,
                    'pid': pid,
                    'message': 'Bot restarted successfully'
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to restart bot'
                }, 500)

        except Exception as e:
            logger.error(f"Error in restart_bot: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_bot_disabled_status(self, request_handler) -> None:
        """GET /api/bot-disabled-status - Retourne si le bot est désactivé manuellement"""
        try:
            flag = os.path.join(self.script_dir, 'bot.disabled')
            disabled = os.path.exists(flag)
            request_handler.send_json_response({
                'success': True,
                'disabled': disabled,
                'message': 'Bot désactivé manuellement' if disabled else 'Bot activé'
            })
        except Exception as e:
            logger.error(f"Error in bot_disabled_status: {e}")
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_bot_toggle(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/bot-toggle - Activer ou désactiver le bot manuellement"""
        try:
            flag = os.path.join(self.script_dir, 'bot.disabled')
            action = data.get('action', '')  # 'disable' ou 'enable'

            if action == 'disable':
                # Créer le fichier flag + tuer le bot et le watchdog s'ils tournent
                with open(flag, 'w') as f:
                    f.write(f"Disabled manually at {datetime.now().isoformat()}\n")
                # Tuer bot et watchdog
                import signal as sig_mod
                import psutil
                killed = []
                for proc in psutil.process_iter(['pid', 'name', 'cmdline']):
                    try:
                        cmdline = ' '.join(proc.info.get('cmdline') or [])
                        if 'trading_bot.py' in cmdline or 'bot_watchdog.py' in cmdline:
                            if proc.status() != psutil.STATUS_ZOMBIE:
                                proc.send_signal(sig_mod.SIGTERM)
                                killed.append(proc.pid)
                    except (psutil.NoSuchProcess, psutil.AccessDenied):
                        pass
                request_handler.send_json_response({
                    'success': True,
                    'disabled': True,
                    'killed_pids': killed,
                    'message': f'Bot désactivé et arrêté (PIDs: {killed})'
                })
            elif action == 'enable':
                # Supprimer le fichier flag
                if os.path.exists(flag):
                    os.remove(flag)
                request_handler.send_json_response({
                    'success': True,
                    'disabled': False,
                    'message': 'Bot activé — relancez le watchdog manuellement si nécessaire'
                })
            else:
                request_handler.send_json_response({'success': False, 'error': 'action doit être "enable" ou "disable"'}, 400)

        except Exception as e:
            logger.error(f"Error in bot_toggle: {e}")
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_save_watchlist(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/save-watchlist - Sauvegarder la watchlist"""
        try:
            # Valider
            request = WatchlistRequest(**data)

            # 🔧 FIX 02/03: Filtrer les symboles non exploitables AVANT sauvegarde
            NON_EXPLOITABLE = {
                'USDTUSDT', 'USDCUSDT', 'BUSDUSDT', 'TUSDUSDT', 'DAIUSDT',
                'FDUSDUSDT', 'USDPUSDT', 'FRAXUSDT', 'LUSDUSDT', 'USTCUSDT',
                'PYUSDUSDT', 'USD1USDT', 'GUSDUSDT',
                'WBTCUSDT', 'BETHUSDT', 'CBETHUSDT', 'STETHUSDT', 'WBNBUSDT',
                'WETHUSDT', 'RETHUSDT', 'WBETHUSDT',
                'EURUSDT', 'GBPUSDT', 'BRLUSDT', 'AUDUSDT', 'TRYUSDT',
                'BIDRUSDT', 'UAHUSDT', 'BKRWUSDT', 'JPYCUSDT', 'AEURUSDT',
                'PAXGUSDT', 'XAUTUSDT',
            }
            filtered_symbols = [
                s for s in request.symbols
                if s.isascii() and (s.endswith('USDT') or s.endswith('USDC')) and len(s) >= 5 and s not in NON_EXPLOITABLE
            ]
            removed_count = len(request.symbols) - len(filtered_symbols)
            if removed_count > 0:
                logger.info(f"[WATCHLIST] {removed_count} symbole(s) non exploitable(s) filtré(s)")

            # Sauvegarder — préserver les métadonnées spy (auto_added, spy_injected)
            watchlist_file = f"{self.script_dir}/watchlist.json"
            existing_data = {}
            existing_count = 0
            try:
                with open(watchlist_file, 'r', encoding='utf-8') as f:
                    existing_data = json.load(f)
                existing_count = len(existing_data.get('symbols', []))
            except Exception:
                pass

            # 🔒 GARDE-FOU: refuser si la nouvelle liste réduit de >50% sauf si reset explicite
            if existing_count >= 20 and len(filtered_symbols) < existing_count // 2:
                logger.warning(f"[WATCHLIST] Refus sauvegarde: {len(filtered_symbols)} < 50% de {existing_count} (sécurité)")
                request_handler.send_json_response({
                    'success': False,
                    'error': f'Safety guard: saving {len(filtered_symbols)} symbols would drop from {existing_count} (>50% reduction). Use explicit reset to override.'
                }, 400)
                return

            watchlist_data = {
                'symbols': filtered_symbols,
                'updated_at': datetime.now().isoformat(),
                'count': len(filtered_symbols),
                'auto_added': existing_data.get('auto_added', {}),
                'spy_injected': existing_data.get('spy_injected', {}),
            }

            if save_json_file(watchlist_file, watchlist_data):
                # 🔧 FIX: Mettre à jour la surveillance IA en mémoire pour cohérence SCAN/FILTRAGE
                # Sans ça, symbols_to_watch reste l'ancien nb de symboles → analyzed > total_symbols
                try:
                    from ai_predictor import get_surveillance_service
                    surveillance = get_surveillance_service()
                    if surveillance and hasattr(surveillance, 'set_symbols'):
                        # Inclure auto_added + spy_injected pour que la surveillance scanne tout
                        all_syms = list(dict.fromkeys(
                            filtered_symbols
                            + list(existing_data.get('auto_added', {}).keys())
                            + list(existing_data.get('spy_injected', {}).keys())
                        ))
                        surveillance.set_symbols(all_syms)
                        logger.info(f"[WATCHLIST] Surveillance IA synchronisée: {len(all_syms)} symboles ({len(filtered_symbols)} manuels + {len(all_syms)-len(filtered_symbols)} auto)")
                except Exception as _e:
                    logger.warning(f"[WATCHLIST] Surveillance IA non synchronisée: {_e}")

                msg = f'{len(filtered_symbols)} symbols saved'
                if removed_count > 0:
                    msg += f' ({removed_count} non-exploitable(s) filtré(s))'
                request_handler.send_json_response({
                    'success': True,
                    'count': len(filtered_symbols),
                    'message': msg
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to save watchlist'
                }, 500)

        except ValueError as e:
            request_handler.send_json_response({
                'success': False,
                'error': f'Validation error: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in save_watchlist: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_ai_surveillance(self, request_handler) -> None:
        """GET /api/ai-surveillance - Statut de la surveillance IA avec Smart Criteria"""
        try:
            status = None
            
            # === LIRE DEPUIS LE CACHE DU BOT (avec retry) ===
            import os
            import time as _time
            cache_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), "ia_surveillance_cache.json")
            
            # Retry jusqu'à 3 fois si le cache est en cours d'écriture
            for attempt in range(3):
                if os.path.exists(cache_file):
                    try:
                        with open(cache_file, 'r', encoding='utf-8') as f:
                            status = json.load(f)
                        # Vérifier que les données sont valides (pas un cache vide)
                        if status and status.get('analyzed', 0) > 0:
                            status['timestamp'] = status.get('timestamp', datetime.now().isoformat())
                            break
                        else:
                            logger.warning(f"Cache IA vide ou invalide (attempt {attempt+1})")
                            status = None
                    except Exception as e:
                        logger.warning(f"Erreur lecture cache IA (attempt {attempt+1}): {e}")
                        status = None
                if attempt < 2:
                    _time.sleep(0.2)  # Attendre 200ms avant retry
            
            # === FALLBACK: MODE POLLING DIRECT ===
            if not status:
                try:
                    from ai_predictor import get_ai_predictor, get_surveillance_service
                    
                    predictor = get_ai_predictor()
                    surveillance = get_surveillance_service()
                    watchlist = predictor.get_watchlist()
                    ready = [w for w in watchlist if w['status'] == 'ready']
                    watching = [w for w in watchlist if w['status'] == 'watching']
                    
                    # === SMART ENTRY CRITERIA v3.0 ===
                    # Classer les valeurs par statut Smart
                    # 🔴 FIX 09/02: Filtrer ACHAT par pattern achetable (exclure NEUTRAL, UNKNOWN)
                    BUYABLE_PATTERNS = [
                        'CREUX_REBOUND', 'PULLBACK', 'SQUEEZE_BREAKOUT', 'EARLY_BREAKOUT',
                        'CONSOLIDATION_BREAKOUT', 'EMA_BULLISH', 'CROSSOVER_IMMINENT',
                        'VOLUME_REVERSAL', 'RSI_REVERSAL', 'STRONG_UPTREND', 'HIGH_SCORE_OVERRIDE'
                    ]
                    smart_achat = [w for w in watchlist if w.get('smart_signal') == 'ACHAT' and w.get('pattern', '') in BUYABLE_PATTERNS]
                    smart_possible = [w for w in watchlist if w.get('smart_signal') == 'POSSIBLE']
                    # VENTE = positions avec signal de vente OU toutes les positions ouvertes (EN_POSITION)
                    smart_vente = [w for w in watchlist if w.get('smart_signal') in ['VENTE', 'EN_POSITION']]
                    smart_abandonnee = [w for w in watchlist if w.get('smart_signal') == 'ABANDONNEE']
                    
                    # Valeurs éligibles en priorité
                    eligible_values = [w for w in watchlist if w.get('smart_eligible') == True]
                    
                    # Convertir bool numpy en bool Python pour JSON
                    is_running = bool(surveillance.is_running) if surveillance else False
                    
                    # === RÉCUPÉRER LES DONNÉES DE ROTATION ===
                    rotation_opportunities = []
                    rotation_status = None
                    validated_ready = []
                    validated_summary = None
                    validated_by_status = None
                    try:
                        surveillance_status = surveillance.get_surveillance_status()
                        rotation_opportunities = surveillance_status.get('rotation_opportunities', [])
                        rotation_status = surveillance_status.get('rotation_status')
                        # 🔴 FIX 09/02: Utiliser les signaux VALIDÉS par la surveillance (régime + momentum)
                        validated_ready = surveillance_status.get('ready_signals', [])
                        validated_summary = surveillance_status.get('smart_summary')
                        validated_by_status = surveillance_status.get('by_status')
                    except Exception as rot_error:
                        logger.debug(f"Error getting rotation data: {rot_error}")
                    
                    # 🔴 FIX 09/02: Préférer les données validées par surveillance (filtres régime + momentum)
                    final_achat = validated_ready if validated_ready is not None else smart_achat
                    final_summary = validated_summary if validated_summary else {
                        'achat': int(len(final_achat)),
                        'possible': int(len(smart_possible)),
                        'vente': int(len(smart_vente)),
                        'abandonnee': int(len(smart_abandonnee)),
                        'eligible': int(len(eligible_values))
                    }
                    final_by_status = validated_by_status if validated_by_status else {
                        'achat': final_achat[:5],
                        'en_surveillance': smart_possible[:10],
                        'vente': smart_vente[:25],
                        'abandonnee': smart_abandonnee[:5]
                    }
                    
                    status = {
                        'is_running': is_running,
                        'mode': 'smart_criteria_v3',
                        'update_interval': int(surveillance.update_interval) if surveillance else 5,
                        'ai_available': True,
                        'smart_criteria_available': True,
                        # 🔧 FIX 16/04: symbols_to_watch inclut manuels + auto_added + spy_injected
                        'total_symbols': int(len(surveillance.symbols_to_watch)) if surveillance and hasattr(surveillance, 'symbols_to_watch') else 0,
                        'analyzed': int(len(surveillance.symbols_to_watch)) if surveillance and hasattr(surveillance, 'symbols_to_watch') else int(len(watchlist)),
                        'ready_to_buy': int(len(final_achat)),
                        'watching': int(len(smart_possible)),
                        'top_opportunities': eligible_values[:10],  # Top 10 éligibles
                        'ready_signals': final_achat,
                        # Rotation intelligente
                        'rotation_opportunities': rotation_opportunities[:5],
                        'rotation_status': rotation_status,
                        # États Smart Criteria (VALIDÉS par surveillance)
                        'smart_summary': final_summary,
                        # Listes par état (VALIDÉS par surveillance)
                        'by_status': final_by_status
                    }
                    
                except ImportError as e:
                    logger.warning(f"AI module not available: {e}")
                    status = {
                        'is_running': False,
                        'mode': 'none',
                        'ai_available': False,
                        'total_symbols': 0,
                        'analyzed': 0,
                        'ready_to_buy': 0,
                        'watching': 0,
                        'top_opportunities': [],
                        'ready_signals': []
                    }
            
            request_handler.send_json_response({
                'success': True,
                'data': status,
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in ai_surveillance: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_market_regime(self, request_handler) -> None:
        """GET /api/market-regime - Statut du régime de marché global"""
        try:
            # 🚀 PERF: cache 60s — detect_regime fait des appels Binance en direct
            cached = _cache_get('market_regime', ttl=60.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            regime_data = {
                'regime': 'NEUTRAL',
                'available': False,
                'max_positions': 20,
                'min_score': 65,
                'position_size_pct': 100,
                'description': 'Régime non disponible',
                'global_score': 50,
                'btc': {},
                'altcoins': {},
                'last_update': None
            }
            
            try:
                from market_regime import get_market_regime_detector, REGIME_CONFIG
                import signal
                
                # Timeout handler
                def timeout_handler(signum, frame):
                    raise TimeoutError("Market regime detection timeout")
                
                # Utiliser le singleton (sera recréé au prochain restart du serveur)
                detector = get_market_regime_detector()
                
                # Vérifier si les données sont valides et pas trop anciennes
                market_data = detector.market_data
                needs_fresh_data = False
                
                if market_data and market_data.get('timestamp'):
                    try:
                        # Calculer l'âge des données
                        timestamp_str = market_data['timestamp']
                        # Supporter les deux formats (avec/sans timezone)
                        if '+' in timestamp_str or timestamp_str.endswith('Z'):
                            last_update = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
                            if last_update.tzinfo:
                                last_update = last_update.replace(tzinfo=None)
                        else:
                            last_update = datetime.fromisoformat(timestamp_str)
                        
                        age_minutes = (datetime.now() - last_update).total_seconds() / 60
                        # Rafraîchir si > 5 minutes (réduit charge API Binance)
                        needs_fresh_data = age_minutes > 5
                    except:
                        needs_fresh_data = True
                else:
                    needs_fresh_data = True
                
                # Forcer une mise à jour uniquement si nécessaire (avec timeout)
                try:
                    # Sur Windows, signal.alarm ne fonctionne pas, utiliser threading
                    import threading
                    result_holder = {'regime_name': None, 'regime_config': None, 'error': None}
                    
                    def detect_with_timeout():
                        try:
                            r_name, r_config = detector.detect_regime(force_update=needs_fresh_data)
                            result_holder['regime_name'] = r_name
                            result_holder['regime_config'] = r_config
                        except Exception as e:
                            result_holder['error'] = str(e)
                    
                    detection_thread = threading.Thread(target=detect_with_timeout)
                    detection_thread.daemon = True
                    detection_thread.start()
                    detection_thread.join(timeout=25)  # 25 secondes max (première analyse peut être longue)
                    
                    if detection_thread.is_alive():
                        # Timeout - utiliser les données en cache
                        logger.warning("Market regime detection timeout - using cached data")
                        regime_name = detector.current_regime or 'NEUTRAL'
                        regime_config = detector.regime_config or REGIME_CONFIG.get('NEUTRAL', {})
                    elif result_holder['error']:
                        raise Exception(result_holder['error'])
                    else:
                        regime_name = result_holder['regime_name']
                        regime_config = result_holder['regime_config']
                    
                except TimeoutError:
                    logger.warning("Market regime detection timeout - using cached data")
                    regime_name = detector.current_regime or 'NEUTRAL'
                    regime_config = detector.regime_config or REGIME_CONFIG.get('NEUTRAL', {})
                
                market_data = detector.market_data
                
                # Récupérer les vrais SL/TP depuis config.py
                try:
                    from config import STOP_LOSS_PERCENT, TAKE_PROFIT_PERCENT
                    actual_sl = STOP_LOSS_PERCENT
                    actual_tp = TAKE_PROFIT_PERCENT
                except ImportError:
                    actual_sl = 2.5
                    actual_tp = 4.0
                
                # 🔧 FIX 27/02: Calculer les vrais SL/TP effectifs selon le régime
                # Identique aux valeurs hardcodées dans trading_bot.py (L3843-3871)
                # Ancien: utilisait take_profit_multiplier → valeurs incorrectes (BEAR=2.0% au lieu de 3.0%)
                tp_mult = regime_config.get('take_profit_multiplier', 1.0)
                if regime_name in ('BEAR', 'CORRECTION'):
                    effective_sl = 1.5   # SL serré
                    effective_tp = 3.0   # TP court (R/R = 2:1)
                elif regime_name == 'NEUTRAL':
                    effective_sl = 1.5
                    effective_tp = 2.5   # R/R = 1.67:1
                else:
                    effective_sl = actual_sl
                    effective_tp = actual_tp
                effective_rr = round(effective_tp / effective_sl, 1) if effective_sl > 0 else 1.0
                
                regime_data = {
                    'regime': regime_name,
                    'available': True,
                    'max_positions': regime_config['max_positions'],
                    'min_score': regime_config['min_score'],
                    'position_size_pct': regime_config['position_size_pct'],
                    'tp_multiplier': tp_mult,
                    'description': regime_config['description'],
                    'global_score': market_data.get('global_score', 50),
                    'dynamic_sl': effective_sl,
                    'dynamic_tp': effective_tp,
                    'dynamic_rr': effective_rr,
                    'sltp_mode': 'ADAPTATIF',
                    'btc': market_data.get('btc', {}),
                    'altcoins': market_data.get('altcoins', {}),
                    'last_update': market_data.get('timestamp'),
                    'history': detector.regime_history[-5:]  # 5 derniers changements
                }
                    
            except ImportError as e:
                logger.warning(f"Market Regime module not available: {e}")
            except Exception as e:
                logger.error(f"Error getting market regime: {e}")
            
            result = {
                'success': True,
                'data': regime_data,
                'timestamp': datetime.now().isoformat()
            }
            _cache_set('market_regime', result)
            request_handler.send_json_response(result)
            
        except Exception as e:
            logger.error(f"Error in market_regime: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_ai_watchlist(self, request_handler, query_params: Dict = None) -> None:
        """GET /api/ai-watchlist - Liste des cryptos sous surveillance IA
        
        Query params:
            - interval: Intervalle Binance (1m, 5m, 15m, 1h, 4h, 1d) - optionnel
            - symbol: Symbole spécifique à analyser - optionnel
        """
        try:
            # Extraire les paramètres de requête
            interval = None
            symbol = None
            
            if query_params:
                interval = query_params.get('interval', [None])[0]
                symbol = query_params.get('symbol', [None])[0]
            
            # Valider l'intervalle
            valid_intervals = ['1m', '5m', '15m', '1h', '4h', '1d']
            if interval and interval not in valid_intervals:
                interval = '5m'  # Fallback par défaut
            
            try:
                from ai_predictor import get_ai_predictor
                predictor = get_ai_predictor()
                
                # Si intervalle spécifié et symbole spécifié, analyser en direct
                if interval and symbol:
                    # Configurer un klines_fetcher temporaire si non disponible
                    if predictor.klines_fetcher is None:
                        def fetch_klines_from_binance(sym: str, intv: str, limit: int):
                            """Récupérer les klines directement depuis Binance"""
                            try:
                                url = f"https://api.binance.com/api/v3/klines?symbol={sym}&interval={intv}&limit={limit}"
                                resp = requests.get(url, timeout=10)
                                if resp.status_code == 200:
                                    return resp.json()
                            except Exception as e:
                                logger.warning(f"Erreur fetch klines {sym}: {e}")
                            return None
                        predictor.set_klines_fetcher(fetch_klines_from_binance)
                    
                    # Analyse à la demande pour un symbole/intervalle spécifique
                    analysis = predictor.analyze_symbol_for_interval(symbol, interval)
                    if analysis:
                        request_handler.send_json_response({
                            'success': True,
                            'mode': 'realtime_analysis',
                            'interval': interval,
                            'watchlist': [analysis],
                            'count': 1,
                            'timestamp': datetime.now().isoformat()
                        })
                        return
                
                # Sinon, retourner la watchlist standard (basée sur 5m)
                watchlist = predictor.get_watchlist()
                
                # Ajouter l'info d'intervalle
                response_data = {
                    'success': True,
                    'mode': 'cached_5m' if not interval else f'requested_{interval}',
                    'base_interval': '5m',  # Intervalle de base de la surveillance
                    'watchlist': watchlist,
                    'count': len(watchlist),
                    'timestamp': datetime.now().isoformat()
                }
                
            except ImportError:
                watchlist = []
                response_data = {
                    'success': True,
                    'mode': 'unavailable',
                    'watchlist': [],
                    'count': 0,
                    'timestamp': datetime.now().isoformat()
                }
            
            request_handler.send_json_response(response_data)
            
        except Exception as e:
            logger.error(f"Error in ai_watchlist: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_ai_training_info(self, request_handler) -> None:
        """GET /api/ai-training-info - Informations sur l'entraînement du modèle IA"""
        try:
            try:
                from ai_predictor import get_ai_predictor
                predictor = get_ai_predictor()
                training_info = predictor.get_training_info()
            except ImportError as e:
                training_info = {
                    'level': 0,
                    'status': 'unavailable',
                    'status_label': '❌ Module IA non disponible',
                    'samples_count': 0,
                    'epochs': 0,
                    'accuracy': 0,
                    'loss': 0,
                    'validation_accuracy': 0,
                    'predictions_made': 0,
                    'correct_predictions': 0,
                    'last_training': None,
                    'gpu_name': 'N/A',
                    'gpu_available': False,
                    'model_loaded': False,
                    'error': str(e)
                }
            
            request_handler.send_json_response({
                'success': True,
                'data': training_info,
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in ai_training_info: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_auto_updater_status(self, request_handler) -> None:
        """GET /api/auto-updater-status - Statut du service de mise à jour automatique"""
        try:
            status_file = os.path.join(self.script_dir, 'auto_updater_status.json')
            pid_file = os.path.join(self.script_dir, 'auto_updater.pid')
            
            # Vérifier si le service est actif
            is_running = False
            if os.path.exists(pid_file):
                try:
                    with open(pid_file, 'r') as f:
                        pid = int(f.read().strip())
                    # Vérifier si le processus existe
                    import psutil
                    is_running = psutil.pid_exists(pid)
                except:
                    is_running = False
            
            # Charger le statut
            status_data = {}
            if os.path.exists(status_file):
                try:
                    with open(status_file, 'r') as f:
                        status_data = json.load(f)
                except:
                    pass
            
            # Calculer les prochaines mises à jour
            from datetime import timedelta
            intervals = {
                'historical_data': 6 * 3600,  # 6h
                'ai_training': 24 * 3600,      # 24h
                'quick_sync': 1 * 3600,        # 1h
            }
            
            def calculate_next_run(last_update_str, interval_seconds):
                if not last_update_str:
                    if not is_running:
                        return "Service arrêté"
                    return "Immédiat"
                try:
                    last_dt = datetime.fromisoformat(last_update_str)
                    next_dt = last_dt + timedelta(seconds=interval_seconds)
                    now = datetime.now()
                    
                    # Si le service n'est pas actif, ne pas afficher les délais
                    if not is_running:
                        return "Service arrêté"
                    
                    if next_dt <= now:
                        return "Immédiat"
                    
                    remaining = next_dt - now
                    hours = int(remaining.total_seconds() // 3600)
                    minutes = int((remaining.total_seconds() % 3600) // 60)
                    
                    if hours > 0:
                        return f"Prévu dans ~{hours}h{minutes}m"
                    else:
                        return f"Prévu dans ~{minutes}m"
                except:
                    return "Inconnu"
            
            def format_last_update(last_update_str):
                if not last_update_str:
                    return "Jamais"
                try:
                    last_dt = datetime.fromisoformat(last_update_str)
                    now = datetime.now()
                    diff = now - last_dt
                    
                    hours = int(diff.total_seconds() // 3600)
                    minutes = int((diff.total_seconds() % 3600) // 60)
                    
                    if hours > 24:
                        days = hours // 24
                        return f"Il y a {days}j {hours % 24}h"
                    elif hours > 0:
                        return f"Il y a {hours}h {minutes}min"
                    elif minutes > 0:
                        return f"Il y a {minutes}min"
                    else:
                        return "À l'instant"
                except:
                    return last_update_str
            
            response_data = {
                'is_running': is_running,
                'started_at': status_data.get('started_at'),
                'last_historical_update': status_data.get('last_historical_update'),
                'last_historical_update_formatted': format_last_update(status_data.get('last_historical_update')),
                'last_ai_training': status_data.get('last_ai_training'),
                'last_ai_training_formatted': format_last_update(status_data.get('last_ai_training')),
                'last_quick_sync': status_data.get('last_quick_sync'),
                'last_quick_sync_formatted': format_last_update(status_data.get('last_quick_sync')),
                'next_historical_update': calculate_next_run(status_data.get('last_historical_update'), intervals['historical_data']),
                'next_ai_training': calculate_next_run(status_data.get('last_ai_training'), intervals['ai_training']),
                'next_quick_sync': calculate_next_run(status_data.get('last_quick_sync'), intervals['quick_sync']),
                'total_updates': status_data.get('total_updates', 0),
                'total_trainings': status_data.get('total_trainings', 0),
                'errors': status_data.get('errors', [])[-5:],  # 5 dernières erreurs
            }
            
            request_handler.send_json_response({
                'success': True,
                'data': response_data,
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in auto_updater_status: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)
    
    def handle_trade_analysis(self, request_handler, query_params: Dict = None) -> None:
        """GET /api/trade-analysis - Analyse des trades avec logs + spy"""
        try:
            # Récupérer la période (défaut 24h)
            hours = 24
            if query_params and 'hours' in query_params:
                try:
                    hours = int(query_params['hours'][0])
                except:
                    hours = 24
            
            # Importer l'analyseur
            import sys
            analyzer_path = os.path.join(self.script_dir, 'analyze_trade_logs.py')
            
            if not os.path.exists(analyzer_path):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Analyze module not found'
                }, 404)
                return
            
            # Importer dynamiquement le module
            import importlib.util
            spec = importlib.util.spec_from_file_location("analyzer", analyzer_path)
            analyzer_module = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(analyzer_module)
            
            # Créer l'analyseur et générer le rapport (BOT)
            analyzer = analyzer_module.TradeAnalyzer(log_dir=os.path.join(self.script_dir, 'trade_logs'))
            report = analyzer.generate_report(hours)
            
            # Ajouter timestamp
            report['generated_at'] = datetime.now().isoformat()
            report['period_hours'] = hours

            # ── Ajouter performances SPY depuis espion_history.json ──
            try:
                cutoff = datetime.now() - timedelta(hours=hours)
                spy_stats = self._compute_spy_stats(cutoff)
                report['spy'] = spy_stats

                # ── Ajouter performances BOT depuis trade_history.json ──
                bot_stats = self._compute_bot_stats(cutoff)
                report['bot'] = bot_stats
            except Exception as e:
                logger.warning(f"Could not compute spy/bot stats: {e}")
                report['spy'] = None
                report['bot'] = None
            
            request_handler.send_json_response({
                'success': True,
                'data': report
            })
            
        except Exception as e:
            logger.error(f"Error in trade_analysis: {e}")
            import traceback
            traceback.print_exc()
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)
    
    def handle_get_sltp_config(self, request_handler) -> None:
        """GET /api/get-sltp-config - Récupérer la configuration SL/TP"""
        try:
            config = self.config_service.read_config()
            
            if config is None:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Config file not found'
                }, 404)
                return
            
            # Récupérer les valeurs SL/TP actuelles
            stop_loss = config.get('STOP_LOSS_PERCENT', 2.5)
            take_profit = config.get('TAKE_PROFIT_PERCENT', 1.5)
            
            # Calculer les statistiques d'utilisation (7 derniers jours)
            try:
                stats = self._calculate_sltp_usage_stats()
            except Exception as e:
                logger.warning(f"Could not calculate SL/TP stats: {e}")
                stats = {
                    'total': 0,
                    'ia_dynamic': 0,
                    'config_fallback': 0,
                    'hybrid': 0
                }
            
            request_handler.send_json_response({
                'success': True,
                'config': {
                    'stop_loss': stop_loss,
                    'take_profit': take_profit,
                    'risk_reward_ratio': round(take_profit / stop_loss, 2)
                },
                'stats': stats
            })
            
        except Exception as e:
            logger.error(f"Error in get_sltp_config: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)
    
    def handle_analysis_logs(self, request_handler, query_params: Dict) -> None:
        """GET /api/analysis-logs - Récupérer les logs récents d'analyse IA"""
        try:
            # 🚀 PERF: cache 15s — lecture + parse de fichier log
            cached = _cache_get('analysis_logs', ttl=15.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            # Lire les dernières lignes du fichier de log
            import re
            from collections import deque
            
            # 🔥 FIX: Lire dashboard_log.txt qui contient les logs de l'IA avec timestamps
            log_file = os.path.join(self.script_dir, 'dashboard_log.txt')
            logs = []
            
            if os.path.exists(log_file):
                # Lire les 200 dernières lignes avec gestion d'encodage
                try:
                    with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
                        lines = deque(f, maxlen=200)
                except Exception as e:
                    print(f"Warning: Erreur lecture log file: {e}")
                    lines = []
                
                # Parser les logs AIPredictor
                for line in lines:
                    # Extraire les logs importants
                    if '[INFO] AIPredictor:' in line:
                        # Pattern: HH:MM:SS [INFO] AIPredictor: emoji SYMBOL: message
                        match = re.search(r'(\d{2}:\d{2}:\d{2}).*AIPredictor:\s+(.+)', line)
                        if match:
                            time_str = match.group(1)
                            content = match.group(2).strip()
                            
                            # Identifier le type de log
                            log_type = 'info'
                            symbol = ''
                            
                            # Extraire le symbole
                            symbol_match = re.search(r'([A-Z]{3,10}USDT)', content)
                            if symbol_match:
                                symbol = symbol_match.group(1)
                            
                            # Déterminer le type
                            if '⭐' in content or 'TOP 20' in content or 'WHITELIST' in content or 'BONUS' in content:
                                log_type = 'bonus'
                            elif '⛔' in content or 'BLOQUÉ' in content:
                                log_type = 'penalty'
                            elif '🎯' in content or 'Score final' in content:
                                log_type = 'final'
                            elif '⚠️' in content or 'penalty' in content:
                                log_type = 'warning'
                            elif '🚨' in content or 'CRASH' in content or 'ALERTE' in content:
                                log_type = 'alert'
                            
                            logs.append({
                                'timestamp': time_str,
                                'symbol': symbol,
                                'type': log_type,
                                'message': content
                            })
            
            # Garder les 50 derniers logs
            logs = logs[-50:]
            
            result = {
                'success': True,
                'logs': logs,
                'count': len(logs)
            }
            _cache_set('analysis_logs', result)
            request_handler.send_json_response(result)
            
        except Exception as e:
            print(f"Error in analysis_logs: {e}")
            import traceback
            traceback.print_exc()
            request_handler.send_json_response({
                'success': False,
                'error': str(e),
                'logs': []
            }, 500)
    
    def handle_positions_live(self, request_handler, query_params: Dict) -> None:
        """GET /api/positions-live - Récupérer les positions avec prix en temps réel"""
        try:
            positions_file = os.path.join(self.script_dir, 'positions.json')
            spy_trades_file = os.path.join(self.script_dir, 'espion_trades.json')
            
            positions = {}
            
            # 1. Charger positions du bot principal
            if os.path.exists(positions_file):
                with open(positions_file, 'r', encoding='utf-8-sig') as f:
                    positions = json.load(f)
            
            # 2. Fusionner les positions spy (espion_trades.json)
            if os.path.exists(spy_trades_file):
                try:
                    with open(spy_trades_file, 'r', encoding='utf-8') as f:
                        spy_trades = json.load(f)
                    for symbol, trade in spy_trades.items():
                        if symbol not in positions and isinstance(trade, dict) and 'entry_price' in trade:
                            positions[symbol] = trade
                            # Marquer comme position spy pour le dashboard
                            positions[symbol]['source'] = 'MARKET_SPY'
                        # 🔴 FIX 08/03: NE PAS réécrire positions.json ici !
                        # Ce write-back créait des positions orphelines :
                        #   Spy vend WIF → retire de positions.json + memory
                        #   → espion_trades.json pas encore mis à jour (race)
                        #   → API relit espion_trades.json → réinsère dans positions.json
                        #   → WIF revient en fantôme même après la vente !
                        # Le dashboard doit être READ-ONLY ; l'écriture appartient au spy.
                except Exception as e:
                    logger.warning(f"positions_live: spy merge error: {e}")
            
            if not positions:
                request_handler.send_json_response({
                    'success': True,
                    'positions': {},
                    'summary': 'Aucune position'
                })
                return
            
            # 🔴 FIX: Récupérer les prix actuels DIRECTEMENT depuis Binance
            try:
                symbols = list(positions.keys())
                # API Binance pour plusieurs symboles - URL directe sans encoding
                symbols_param = json.dumps(symbols).replace(' ', '')
                url = f'https://api.binance.com/api/v3/ticker/price?symbols={symbols_param}'
                resp = requests.get(url, timeout=5)
                if resp.status_code == 200:
                    prices_data = resp.json()
                    prices_map = {p['symbol']: float(p['price']) for p in prices_data}
                    
                    for symbol, pos in positions.items():
                        current_price = prices_map.get(symbol, 0)
                        if current_price > 0:
                            pos['current_price'] = current_price
                            # Recalculer le PnL avec entry_price (pas buy_price)
                            entry_price = pos.get('entry_price', pos.get('buy_price', 0))
                            if entry_price > 0:
                                pos['pnl_percent'] = ((current_price - entry_price) / entry_price) * 100
                else:
                    logger.warning(f"positions_live: Binance API {resp.status_code}")
            except Exception as e:
                logger.warning(f"positions_live: prix Binance indisponibles: {e}")
            
            count = len(positions)
            summary = f"{count} position{'s' if count > 1 else ''}"
            
            request_handler.send_json_response({
                'success': True,
                'positions': positions,
                'summary': summary,
                'count': count
            })
            
        except Exception as e:
            logger.error(f"Error in positions_live: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e),
                'positions': {}
            }, 500)
    
    def handle_update_sltp_config(self, request_handler, data: Dict[str, Any]) -> None:
        """POST /api/update-sltp-config - Mettre à jour la configuration SL/TP"""
        try:
            # Validation
            stop_loss = data.get('stop_loss')
            take_profit = data.get('take_profit')
            
            if stop_loss is None or take_profit is None:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Missing stop_loss or take_profit'
                }, 400)
                return
            
            # Validation des valeurs
            stop_loss = float(stop_loss)
            take_profit = float(take_profit)
            
            if not (0.5 <= stop_loss <= 10):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'stop_loss must be between 0.5 and 10'
                }, 400)
                return
            
            if not (0.5 <= take_profit <= 20):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'take_profit must be between 0.5 and 20'
                }, 400)
                return
            
            # Lire la config actuelle
            config = self.config_service.read_config()
            if config is None:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Config file not found'
                }, 404)
                return
            
            # Mettre à jour les valeurs
            config['STOP_LOSS_PERCENT'] = stop_loss
            config['TAKE_PROFIT_PERCENT'] = take_profit
            
            # Sauvegarder
            config_path = os.path.join(self.script_dir, 'bot_settings.json')
            success = save_json_file(config_path, config)
            
            if success:
                logger.info(f"✅ SL/TP config updated: SL={stop_loss}%, TP={take_profit}%")
                request_handler.send_json_response({
                    'success': True,
                    'message': f'Configuration updated: SL={stop_loss}%, TP={take_profit}%',
                    'config': {
                        'stop_loss': stop_loss,
                        'take_profit': take_profit,
                        'risk_reward_ratio': round(take_profit / stop_loss, 2)
                    }
                })
            else:
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Failed to save configuration'
                }, 500)
            
        except ValueError as e:
            request_handler.send_json_response({
                'success': False,
                'error': f'Invalid values: {str(e)}'
            }, 400)
        except Exception as e:
            logger.error(f"Error in update_sltp_config: {e}")
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)
    
    def _compute_spy_stats(self, cutoff: datetime) -> Dict[str, Any]:
        """Compute spy performance stats from espion_history.json"""
        spy_file = os.path.join(self.script_dir, 'espion_history.json')
        if not os.path.exists(spy_file):
            return {'total_trades': 0}

        with open(spy_file, 'r', encoding='utf-8') as f:
            all_trades = json.load(f)

        # Filter by period
        trades = []
        for t in all_trades:
            exit_time_str = t.get('exit_time', '')
            if not exit_time_str:
                continue
            try:
                exit_dt = datetime.fromisoformat(exit_time_str.replace('Z', '+00:00').replace('+00:00', ''))
            except:
                try:
                    exit_dt = datetime.strptime(exit_time_str[:19], '%Y-%m-%dT%H:%M:%S')
                except:
                    continue
            if exit_dt >= cutoff:
                trades.append(t)

        if not trades:
            return {'total_trades': 0, 'wins': 0, 'losses': 0, 'win_rate': 0, 'total_pnl_usdt': 0, 'avg_pnl_pct': 0, 'avg_duration_min': 0, 'top_cryptos': [], 'top_patterns': []}

        wins = [t for t in trades if (t.get('pnl_usdt', 0) or 0) > 0]
        losses = [t for t in trades if (t.get('pnl_usdt', 0) or 0) <= 0]
        total_pnl = sum(t.get('pnl_usdt', 0) or 0 for t in trades)
        avg_pnl_pct = sum(t.get('pnl_pct', 0) or 0 for t in trades) / len(trades)
        avg_dur = sum(t.get('hold_minutes', 0) or 0 for t in trades) / len(trades)

        # Top cryptos by PnL
        from collections import defaultdict
        by_symbol = defaultdict(list)
        for t in trades:
            by_symbol[t['symbol']].append(t)
        crypto_stats = []
        for sym, strades in by_symbol.items():
            w = sum(1 for t in strades if (t.get('pnl_usdt', 0) or 0) > 0)
            pnl = sum(t.get('pnl_usdt', 0) or 0 for t in strades)
            pnl_pct = sum(t.get('pnl_pct', 0) or 0 for t in strades)
            crypto_stats.append({
                'symbol': sym,
                'count': len(strades),
                'win_rate': round(w / len(strades) * 100, 1),
                'total_pnl_usdt': round(pnl, 2),
                'total_pnl_pct': round(pnl_pct, 2),
            })
        crypto_stats.sort(key=lambda x: x['total_pnl_usdt'], reverse=True)

        # Top patterns (surge_type)
        by_pattern = defaultdict(list)
        for t in trades:
            pat = t.get('surge_type', 'UNKNOWN')
            by_pattern[pat].append(t)
        pattern_stats = []
        for pat, ptrades in by_pattern.items():
            w = sum(1 for t in ptrades if (t.get('pnl_usdt', 0) or 0) > 0)
            avg_p = sum(t.get('pnl_pct', 0) or 0 for t in ptrades) / len(ptrades)
            pattern_stats.append({
                'pattern': pat,
                'count': len(ptrades),
                'win_rate': round(w / len(ptrades) * 100, 1),
                'avg_profit_pct': round(avg_p, 2),
            })
        pattern_stats.sort(key=lambda x: x['avg_profit_pct'], reverse=True)

        return {
            'total_trades': len(trades),
            'wins': len(wins),
            'losses': len(losses),
            'win_rate': round(len(wins) / len(trades) * 100, 1),
            'total_pnl_usdt': round(total_pnl, 2),
            'avg_pnl_pct': round(avg_pnl_pct, 2),
            'avg_duration_min': round(avg_dur, 1),
            'top_cryptos': crypto_stats[:5],
            'top_patterns': pattern_stats,
        }

    def _compute_bot_stats(self, cutoff: datetime) -> Dict[str, Any]:
        """Compute bot performance stats from trade_history.json"""
        bot_file = os.path.join(self.script_dir, 'trade_history.json')
        if not os.path.exists(bot_file):
            return {'total_trades': 0}

        with open(bot_file, 'r', encoding='utf-8') as f:
            all_trades = json.load(f)

        trades = []
        for t in all_trades:
            exit_time_str = t.get('exit_time', '')
            if not exit_time_str:
                continue
            try:
                exit_dt = datetime.fromisoformat(exit_time_str.replace('Z', '+00:00').replace('+00:00', ''))
            except:
                try:
                    exit_dt = datetime.strptime(exit_time_str[:19], '%Y-%m-%dT%H:%M:%S')
                except:
                    continue
            if exit_dt >= cutoff:
                trades.append(t)

        if not trades:
            return {'total_trades': 0, 'wins': 0, 'losses': 0, 'win_rate': 0, 'total_pnl_usdt': 0, 'avg_pnl_pct': 0, 'avg_duration_min': 0, 'top_cryptos': [], 'top_patterns': []}

        wins = [t for t in trades if (t.get('pnl', 0) or 0) > 0]
        losses = [t for t in trades if (t.get('pnl', 0) or 0) <= 0]
        total_pnl = sum(t.get('pnl', 0) or 0 for t in trades)
        avg_pnl_pct = sum(t.get('pnl_pct', 0) or 0 for t in trades) / len(trades)

        # Duration from entry_time/exit_time
        total_dur = 0
        dur_count = 0
        for t in trades:
            try:
                entry = datetime.fromisoformat(t['entry_time'].replace('Z', '+00:00').replace('+00:00', ''))
                exit_ = datetime.fromisoformat(t['exit_time'].replace('Z', '+00:00').replace('+00:00', ''))
                total_dur += (exit_ - entry).total_seconds() / 60
                dur_count += 1
            except:
                pass
        avg_dur = total_dur / dur_count if dur_count > 0 else 0

        # Top cryptos
        from collections import defaultdict
        by_symbol = defaultdict(list)
        for t in trades:
            by_symbol[t['symbol']].append(t)
        crypto_stats = []
        for sym, strades in by_symbol.items():
            w = sum(1 for t in strades if (t.get('pnl', 0) or 0) > 0)
            pnl = sum(t.get('pnl', 0) or 0 for t in strades)
            pnl_pct = sum(t.get('pnl_pct', 0) or 0 for t in strades)
            crypto_stats.append({
                'symbol': sym,
                'count': len(strades),
                'win_rate': round(w / len(strades) * 100, 1),
                'total_pnl_usdt': round(pnl, 2),
                'total_pnl_pct': round(pnl_pct, 2),
            })
        crypto_stats.sort(key=lambda x: x['total_pnl_usdt'], reverse=True)

        # Top patterns
        by_pattern = defaultdict(list)
        for t in trades:
            pat = t.get('pattern', 'UNKNOWN')
            by_pattern[pat].append(t)
        pattern_stats = []
        for pat, ptrades in by_pattern.items():
            w = sum(1 for t in ptrades if (t.get('pnl', 0) or 0) > 0)
            avg_p = sum(t.get('pnl_pct', 0) or 0 for t in ptrades) / len(ptrades)
            pattern_stats.append({
                'pattern': pat,
                'count': len(ptrades),
                'win_rate': round(w / len(ptrades) * 100, 1),
                'avg_profit_pct': round(avg_p, 2),
            })
        pattern_stats.sort(key=lambda x: x['avg_profit_pct'], reverse=True)

        return {
            'total_trades': len(trades),
            'wins': len(wins),
            'losses': len(losses),
            'win_rate': round(len(wins) / len(trades) * 100, 1),
            'total_pnl_usdt': round(total_pnl, 2),
            'avg_pnl_pct': round(avg_pnl_pct, 2),
            'avg_duration_min': round(avg_dur, 1),
            'top_cryptos': crypto_stats[:5],
            'top_patterns': pattern_stats,
        }

    def _calculate_sltp_usage_stats(self) -> Dict[str, int]:
        """Calculer les statistiques d'utilisation SL/TP (7 derniers jours)"""
        try:
            trades_log_path = os.path.join(self.script_dir, 'trade_logs', 'trades_log.jsonl')
            
            if not os.path.exists(trades_log_path):
                return {
                    'total': 0,
                    'ia_dynamic': 0,
                    'config_fallback': 0,
                    'hybrid': 0
                }
            
            # Lire les trades des 7 derniers jours
            cutoff_time = datetime.now() - timedelta(days=7)
            
            total = 0
            ia_dynamic = 0
            config_fallback = 0
            hybrid = 0
            
            with open(trades_log_path, 'r', encoding='utf-8') as f:
                for line in f:
                    try:
                        trade = json.loads(line.strip())
                        trade_time = datetime.fromisoformat(trade.get('timestamp', '2000-01-01'))
                        
                        if trade_time < cutoff_time:
                            continue
                        
                        total += 1
                        
                        # Détecter la source SL/TP
                        has_ia_sl = trade.get('has_ia_stop_loss', False)
                        has_ia_tp = trade.get('has_ia_take_profit', False)
                        
                        if has_ia_sl and has_ia_tp:
                            ia_dynamic += 1
                        elif not has_ia_sl and not has_ia_tp:
                            config_fallback += 1
                        else:
                            hybrid += 1
                    
                    except Exception as e:
                        logger.warning(f"Error parsing trade: {e}")
                        continue
            
            return {
                'total': total,
                'ia_dynamic': ia_dynamic,
                'config_fallback': config_fallback,
                'hybrid': hybrid
            }
            
        except Exception as e:
            logger.error(f"Error calculating SL/TP stats: {e}")
            return {
                'total': 0,
                'ia_dynamic': 0,
                'config_fallback': 0,
                'hybrid': 0
            }
    
    def handle_ai_self_optimizer(self, request_handler, query_params: Dict = None) -> None:
        """GET /api/ai-self-optimizer - Analyse auto-optimisation IA"""
        try:
            from ai_self_optimizer import AISelfOptimizer
            
            # Paramètres optionnels
            lookback_hours = int(query_params.get('hours', [24])[0]) if query_params else 24
            format_type = query_params.get('format', ['json'])[0] if query_params else 'json'
            
            logger.info(f"🧠 AI Self-Optimizer request: {lookback_hours}h, format={format_type}")
            
            # Créer instance optimizer
            optimizer = AISelfOptimizer(lookback_hours=lookback_hours)
            
            if format_type == 'text':
                # Générer rapport texte
                report = optimizer.generate_report()
                request_handler.send_response(200)
                request_handler.send_header('Content-type', 'text/plain; charset=utf-8')
                request_handler.end_headers()
                request_handler.wfile.write(report.encode('utf-8'))
            else:
                # Générer résultats JSON
                results = optimizer.analyze_performance()
                request_handler.send_json_response({
                    'success': True,
                    'data': results
                })
            
            logger.info(f"✅ AI Self-Optimizer analysis completed")
            
        except ImportError as e:
            logger.error(f"AI Self-Optimizer module not found: {e}")
            request_handler.send_json_response({
                'error': 'AI Self-Optimizer module not available',
                'details': str(e)
            }, 500)
        except Exception as e:
            logger.error(f"Error in AI Self-Optimizer: {e}", exc_info=True)
            request_handler.send_json_response({
                'error': 'Failed to analyze performance',
                'details': str(e)
            }, 500)
    
    def handle_technical_analysis(self, request_handler, query_params: Dict = None) -> None:
        """GET /api/technical-analysis - Analyse technique d'une crypto"""
        try:
            from technical_analyzer import TechnicalAnalyzer
            
            # Paramètres
            symbol = query_params.get('symbol', [''])[0] if query_params and 'symbol' in query_params else ''
            
            if not symbol:
                request_handler.send_json_response({
                    'error': 'Symbole requis (?symbol=BTCUSDT)',
                    'success': False
                }, 400)
                return
            
            logger.info(f"📊 Technical Analysis request: {symbol}")
            
            # Créer analyseur
            analyzer = TechnicalAnalyzer()
            
            # Analyser
            result = analyzer.analyze_symbol(symbol)
            
            if result['success']:
                request_handler.send_json_response({
                    'success': True,
                    'data': result
                })
                logger.info(f"✅ Technical Analysis completed for {symbol}")
            else:
                request_handler.send_json_response({
                    'error': result.get('error', 'Erreur inconnue'),
                    'success': False
                }, 400)
            
        except ImportError as e:
            logger.error(f"Technical Analyzer module not found: {e}")
            request_handler.send_json_response({
                'error': 'Technical Analyzer module not available',
                'details': str(e),
                'success': False
            }, 500)
        except Exception as e:
            logger.error(f"Error in Technical Analysis: {e}", exc_info=True)
            request_handler.send_json_response({
                'error': 'Failed to analyze symbol',
                'details': str(e),
                'success': False
            }, 500)

    def handle_market_analysis(self, request_handler, query_params: Dict = None) -> None:
        """GET /api/market-analysis - Analyse complète du marché et efficacité du bot"""
        try:
            import requests as req
            from datetime import datetime, timedelta
            
            result = {
                'success': True,
                'timestamp': datetime.now().isoformat(),
                'positions': {},
                'performance': {},
                'missed_opportunities': [],
                'market_overview': {},
                'perspectives': {}
            }
            
            # 1. Positions actuelles avec P&L
            positions_file = os.path.join(self.script_dir, 'positions.json')
            positions = {}
            if os.path.exists(positions_file):
                with open(positions_file, 'r') as f:
                    positions = json.load(f)
            
            # Récupérer prix actuels
            try:
                prices_resp = req.get('https://api.binance.com/api/v3/ticker/price', timeout=10)
                prices_map = {p['symbol']: float(p['price']) for p in prices_resp.json()}
            except:
                prices_map = {}
            
            total_invested = 0
            total_pnl = 0
            positions_data = []
            
            for symbol, pos in positions.items():
                entry = pos.get('entry_price', 0)
                qty = pos.get('quantity', 0)
                current = prices_map.get(symbol, entry)
                pnl_pct = ((current - entry) / entry * 100) if entry > 0 else 0
                pnl_usd = (current - entry) * qty
                invested = entry * qty
                
                total_invested += invested
                total_pnl += pnl_usd
                
                # Calculer la durée de la position
                duration_str = '--'
                timestamp = pos.get('timestamp')
                if timestamp:
                    try:
                        # Parser le timestamp (sans fuseau horaire)
                        if 'T' in timestamp:
                            entry_time = datetime.fromisoformat(timestamp.split('+')[0].split('Z')[0])
                        else:
                            entry_time = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')
                        
                        now = datetime.now()
                        diff = now - entry_time
                        total_minutes = int(diff.total_seconds() // 60)
                        hours = total_minutes // 60
                        minutes = total_minutes % 60
                        
                        if hours > 0:
                            duration_str = f"{hours}h {minutes}min"
                        else:
                            duration_str = f"{minutes}min"
                    except Exception as e:
                        print(f"Warning: Erreur calcul durée pour {symbol}: {e}")
                        duration_str = '--'
                
                positions_data.append({
                    'symbol': symbol,
                    'entry': entry,
                    'current': current,
                    'quantity': qty,
                    'pnl_pct': round(pnl_pct, 2),
                    'pnl_usd': round(pnl_usd, 4),
                    'invested': round(invested, 2),
                    'duration': duration_str
                })
            
            # Trier par P&L
            positions_data.sort(key=lambda x: x['pnl_pct'], reverse=True)
            
            result['positions'] = {
                'list': positions_data,
                'total_invested': round(total_invested, 2),
                'total_pnl_usd': round(total_pnl, 2),
                'total_pnl_pct': round((total_pnl / total_invested * 100) if total_invested > 0 else 0, 2),
                'count': len(positions_data)
            }
            
            # 2. Performance récente (trades fermés)
            trades_file = os.path.join(self.script_dir, 'trade_logs', 'trades_log.jsonl')
            closed_trades = []
            if os.path.exists(trades_file):
                with open(trades_file, 'r', encoding='utf-8') as f:
                    for line in f:
                        if line.strip():
                            try:
                                trade = json.loads(line)
                                if trade.get('type') == 'TRADE_CLOSE':
                                    closed_trades.append(trade)
                            except:
                                pass
            
            # Derniers 50 trades
            recent_trades = closed_trades[-50:]
            wins = [t for t in recent_trades if t.get('pnl_pct', 0) > 0]
            losses = [t for t in recent_trades if t.get('pnl_pct', 0) < 0]
            tp_count = len([t for t in recent_trades if t.get('reason') == 'take-profit'])
            qe_count = len([t for t in recent_trades if t.get('reason') == 'quick-exit'])
            sl_count = len([t for t in recent_trades if t.get('reason') == 'stop-loss'])
            
            total_trade_pnl = sum(t.get('pnl_pct', 0) for t in recent_trades)
            avg_pnl = total_trade_pnl / len(recent_trades) if recent_trades else 0
            
            result['performance'] = {
                'total_trades': len(recent_trades),
                'wins': len(wins),
                'losses': len(losses),
                'win_rate': round(len(wins) / len(recent_trades) * 100, 1) if recent_trades else 0,
                'total_pnl_pct': round(total_trade_pnl, 2),
                'avg_pnl_pct': round(avg_pnl, 2),
                'take_profit': tp_count,
                'quick_exit': qe_count,
                'stop_loss': sl_count
            }
            
            # 3. Signaux manqués
            signals_file = os.path.join(self.script_dir, 'trade_logs', 'signals_log.jsonl')
            missed = []
            if os.path.exists(signals_file):
                with open(signals_file, 'r', encoding='utf-8') as f:
                    for line in f:
                        if line.strip():
                            try:
                                sig = json.loads(line)
                                if not sig.get('executed') and 'MAX_POSITIONS' in sig.get('rejection_reason', ''):
                                    missed.append(sig)
                            except:
                                pass
            
            # Analyser les derniers signaux manqués
            missed_analysis = []
            checked_symbols = set()
            for sig in reversed(missed[-30:]):
                sym = sig.get('symbol', '')
                if sym in checked_symbols:
                    continue
                checked_symbols.add(sym)
                
                signal_price = sig.get('features', {}).get('price_current', 0)
                current_price = prices_map.get(sym, signal_price)
                
                if signal_price > 0:
                    variation = ((current_price - signal_price) / signal_price) * 100
                    missed_analysis.append({
                        'symbol': sym,
                        'signal_price': signal_price,
                        'current_price': current_price,
                        'variation_pct': round(variation, 2),
                        'pattern': sig.get('pattern', '?'),
                        'score': sig.get('ai_score', 0),
                        'would_profit': variation > 0
                    })
            
            result['missed_opportunities'] = missed_analysis[:15]
            
            # 4. Aperçu du marché
            try:
                tickers = req.get('https://api.binance.com/api/v3/ticker/24hr', timeout=10).json()
                usdt_pairs = [t for t in tickers if t['symbol'].endswith('USDT') and float(t['quoteVolume']) > 10000000]
                
                gainers = sorted(usdt_pairs, key=lambda x: float(x['priceChangePercent']), reverse=True)[:5]
                losers = sorted(usdt_pairs, key=lambda x: float(x['priceChangePercent']))[:5]
                
                avg_change = sum(float(t['priceChangePercent']) for t in usdt_pairs) / len(usdt_pairs) if usdt_pairs else 0
                positive_count = len([t for t in usdt_pairs if float(t['priceChangePercent']) > 0])
                negative_count = len(usdt_pairs) - positive_count
                
                result['market_overview'] = {
                    'avg_change_24h': round(avg_change, 2),
                    'positive_count': positive_count,
                    'negative_count': negative_count,
                    'trend': 'BULLISH' if avg_change > 1 else ('BEARISH' if avg_change < -1 else 'NEUTRAL'),
                    'top_gainers': [{'symbol': t['symbol'], 'change': round(float(t['priceChangePercent']), 2)} for t in gainers],
                    'top_losers': [{'symbol': t['symbol'], 'change': round(float(t['priceChangePercent']), 2)} for t in losers]
                }
            except Exception as e:
                result['market_overview'] = {'error': str(e)}
            
            # 5. Perspectives
            missed_would_profit = len([m for m in missed_analysis if m['would_profit']])
            missed_would_lose = len(missed_analysis) - missed_would_profit
            
            perspectives = []
            
            # Analyse contexte
            market_trend = result['market_overview'].get('trend', 'NEUTRAL')
            if market_trend == 'BEARISH':
                perspectives.append({
                    'type': 'warning',
                    'title': 'Marché Baissier',
                    'message': 'Le marché global est en baisse. Privilégiez la prudence et surveillez vos stop-loss.'
                })
            elif market_trend == 'BULLISH':
                perspectives.append({
                    'type': 'success',
                    'title': 'Marché Haussier',
                    'message': 'Conditions favorables pour les achats. Le momentum est positif.'
                })
            
            # Efficacité
            win_rate = result['performance'].get('win_rate', 0)
            if win_rate < 45:
                perspectives.append({
                    'type': 'warning',
                    'title': 'Taux de réussite faible',
                    'message': f'Win rate de {win_rate}%. Envisagez d\'augmenter le score minimum pour filtrer les signaux.'
                })
            elif win_rate > 55:
                perspectives.append({
                    'type': 'success',
                    'title': 'Bonne efficacité',
                    'message': f'Win rate de {win_rate}%. La stratégie fonctionne bien.'
                })
            
            # Quick exits
            qe_rate = (qe_count / len(recent_trades) * 100) if recent_trades else 0
            if qe_rate > 60:
                perspectives.append({
                    'type': 'info',
                    'title': 'Beaucoup de Quick-Exits',
                    'message': f'{qe_rate:.0f}% de sorties rapides. Le système pourrait être trop agressif.'
                })
            
            # Opportunités manquées
            if missed_would_lose > missed_would_profit and len(missed_analysis) > 3:
                perspectives.append({
                    'type': 'success',
                    'title': 'Signaux bien filtrés',
                    'message': f'{missed_would_lose}/{len(missed_analysis)} signaux rejetés auraient été perdants. Bonne sélection.'
                })
            elif missed_would_profit > missed_would_lose and len(missed_analysis) > 3:
                perspectives.append({
                    'type': 'warning',
                    'title': 'Opportunités manquées',
                    'message': f'{missed_would_profit}/{len(missed_analysis)} signaux rejetés auraient été gagnants. Augmentez max_positions?'
                })
            
            result['perspectives'] = perspectives
            
            request_handler.send_json_response(result)
            logger.info("✅ Market Analysis completed")
            
        except Exception as e:
            logger.error(f"Error in Market Analysis: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_watchdog_status(self, request_handler) -> None:
        """GET /api/watchdog-status - Statut du watchdog et surveillance du bot"""
        try:
            # 🚀 PERF: cache 30s — psutil.process_iter est coûteux sous charge
            cached = _cache_get('watchdog_status', ttl=30.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            import psutil
            from pathlib import Path
            
            status = {
                'watchdog_active': False,
                'watchdog_uptime': None,
                'last_check': None,
                'bot_active': False,
                'bot_uptime': None,
                'bot_pid': None,
                'auto_restart_enabled': False,
                'services': {}
            }
            
            # 1. Vérifier le fichier watchdog.log
            watchdog_log = Path(self.script_dir) / 'watchdog.log'
            if watchdog_log.exists():
                try:
                    # Lire seulement la dernière ligne (seek depuis la fin)
                    with open(watchdog_log, 'rb') as f:
                        f.seek(0, 2)  # fin du fichier
                        fsize = f.tell()
                        if fsize > 0:
                            # Lire les derniers 512 bytes pour trouver la dernière ligne
                            f.seek(max(0, fsize - 512))
                            chunk = f.read().decode('utf-8', errors='ignore')
                            lines = chunk.strip().split('\n')
                            last_line = lines[-1].strip() if lines else ''
                            if last_line and '[' in last_line:
                                timestamp_str = last_line.split(']')[0].replace('[', '')
                                try:
                                    last_check = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
                                    status['last_check'] = last_check.isoformat()
                                    time_diff = (datetime.now() - last_check).total_seconds()
                                    status['watchdog_active'] = time_diff < 120
                                except:
                                    pass
                except Exception as e:
                    logger.warning(f"Erreur lecture watchdog.log: {e}")
            
            # 2. Scanner TOUS les processus en UNE SEULE passe (au lieu de 3)
            scripts_to_find = {
                'bot_watchdog.py': None,
                'dashboard_api_server.py': None,
                'auto_updater_service.py': None,
            }
            for proc in psutil.process_iter(['pid', 'name', 'cmdline', 'create_time']):
                try:
                    cmdline = ' '.join(proc.info['cmdline'] or [])
                    for script_name in scripts_to_find:
                        if scripts_to_find[script_name] is None and script_name in cmdline:
                            scripts_to_find[script_name] = proc.info
                            break
                except (psutil.NoSuchProcess, psutil.AccessDenied):
                    continue
            
            # Watchdog
            wdinfo = scripts_to_find['bot_watchdog.py']
            if wdinfo:
                create_time = datetime.fromtimestamp(wdinfo['create_time'])
                uptime = (datetime.now() - create_time).total_seconds()
                status['watchdog_active'] = True
                status['watchdog_uptime'] = int(uptime)
                status['auto_restart_enabled'] = True
            
            # 3. Vérifier le bot de trading
            bot_pid_file = Path(self.script_dir) / 'bot.pid'
            if bot_pid_file.exists():
                try:
                    with open(bot_pid_file, 'r') as f:
                        pid = int(f.read().strip())
                        status['bot_pid'] = pid
                        
                        # Vérifier que le processus existe
                        if psutil.pid_exists(pid):
                            proc = psutil.Process(pid)
                            if 'python' in proc.name().lower():
                                create_time = datetime.fromtimestamp(proc.create_time())
                                uptime = (datetime.now() - create_time).total_seconds()
                                status['bot_active'] = True
                                status['bot_uptime'] = int(uptime)
                except Exception as e:
                    logger.warning(f"Erreur vérification bot: {e}")
            
            # 4. Vérifier autres services (réutilise le scan unique du step 2)
            services_to_check = {
                'dashboard_api': 'dashboard_api_server.py',
                'auto_updater': 'auto_updater_service.py'
            }
            
            for service_name, script_name in services_to_check.items():
                svc_info = scripts_to_find.get(script_name)
                if svc_info:
                    create_time = datetime.fromtimestamp(svc_info['create_time'])
                    uptime = (datetime.now() - create_time).total_seconds()
                    status['services'][service_name] = {
                        'active': True,
                        'uptime': int(uptime),
                        'pid': svc_info['pid']
                    }
                else:
                    status['services'][service_name] = {'active': False}
            
            # 5. Évaluation globale
            status['overall_status'] = 'ok' if status['bot_active'] else 'warning'
            if status['bot_active'] and not status['watchdog_active']:
                status['overall_status'] = 'warning'
                status['warning_message'] = '⚠️ Bot actif mais SANS surveillance! Risque d\'arrêt sans redémarrage.'
            elif not status['bot_active']:
                status['overall_status'] = 'error'
                status['error_message'] = '❌ Bot de trading NON ACTIF!'
            elif status['watchdog_active'] and status['bot_active']:
                status['success_message'] = '✅ Bot actif et surveillé 24/7'
            
            _cache_set('watchdog_status', status)
            request_handler.send_json_response(status)
            
        except Exception as e:
            logger.error(f"Error in watchdog status: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)
    
    def handle_bot_logs(self, request_handler) -> None:
        """GET /api/bot-logs - Récupère les derniers logs du bot de trading"""
        try:
            # Paramètres
            params = {}
            if '?' in request_handler.path:
                query_string = request_handler.path.split('?')[1]
                for param in query_string.split('&'):
                    if '=' in param:
                        key, value = param.split('=', 1)
                        params[key] = value
            
            max_lines = int(params.get('lines', 50))  # Par défaut 50 lignes
            
            # Fichier de logs du bot de trading (trading_bot.log contient tous les logs)
            log_file = os.path.join(self.script_dir, 'trading_bot.log')
            
            if not os.path.exists(log_file):
                request_handler.send_json_response({
                    'success': True,
                    'logs': [],
                    'message': 'Fichier de logs non trouvé'
                })
                return
            
            # Lire les dernières lignes
            logs = []
            try:
                from datetime import datetime
                import re
                
                from collections import deque
                with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
                    # Lire uniquement les dernières lignes sans charger tout le fichier
                    recent_lines = deque(f, maxlen=max_lines)
                    
                    for line in recent_lines:
                        line = line.strip()
                        if not line:
                            continue
                        
                        # Extraire le timestamp depuis la ligne (format: HH:MM:SS [LEVEL] ...)
                        timestamp_match = re.match(r'^(\d{2}:\d{2}:\d{2})', line)
                        if timestamp_match:
                            timestamp_str = timestamp_match.group(1)
                            # Créer un datetime avec la date d'aujourd'hui et l'heure du log
                            today = datetime.now().date()
                            time_obj = datetime.strptime(timestamp_str, '%H:%M:%S').time()
                            timestamp = datetime.combine(today, time_obj).isoformat()
                        else:
                            timestamp = datetime.now().isoformat()
                        
                        # Déterminer le type de log
                        log_type = 'info'
                        if '[ERROR]' in line or '❌' in line or 'ERROR:' in line:
                            log_type = 'error'
                        elif '[WARNING]' in line or '⚠️' in line or 'WARNING:' in line:
                            log_type = 'warning'
                        elif '[SUCCESS]' in line or '✅' in line or '💰' in line:
                            log_type = 'success'
                        elif 'Score=' in line or 'Pattern=' in line or '@' in line:
                            log_type = 'analysis'
                        elif '🚫' in line or 'REJET' in line:
                            log_type = 'reject'
                        
                        logs.append({
                            'timestamp': timestamp,
                            'type': log_type,
                            'message': line
                        })
            
            except Exception as e:
                logger.error(f"Error reading log file: {e}")
                request_handler.send_json_response({
                    'success': False,
                    'error': f'Erreur de lecture du fichier: {str(e)}'
                }, 500)
                return
            
            # Statistiques
            stats = {
                'total': len(logs),
                'errors': len([l for l in logs if l['type'] == 'error']),
                'warnings': len([l for l in logs if l['type'] == 'warning']),
                'success': len([l for l in logs if l['type'] == 'success']),
                'analysis': len([l for l in logs if l['type'] == 'analysis'])
            }
            
            request_handler.send_json_response({
                'success': True,
                'logs': logs,
                'stats': stats,
                'file_size': os.path.getsize(log_file),
                'last_modified': datetime.fromtimestamp(os.path.getmtime(log_file)).isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in bot logs: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    @staticmethod
    def handle_monitor_status(request_handler):
        """Endpoint: /api/monitor-status - Statut de la surveillance silencieuse"""
        try:
            script_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
            status_file = os.path.join(script_dir, 'monitor_status.json')
            log_file = os.path.join(script_dir, 'silent_monitor.log')
            
            # Lire le fichier de statut
            if os.path.exists(status_file):
                with open(status_file, 'r', encoding='utf-8') as f:
                    status = json.load(f)
                if not isinstance(status, dict):
                    status = {'all_ok': False, 'message': 'Fichier statut corrompu'}
            else:
                status = {
                    'all_ok': False,
                    'message': 'Silent monitor non démarré'
                }
            
            # Ajouter les dernières lignes du log
            recent_logs = []
            if os.path.exists(log_file):
                try:
                    with open(log_file, 'r', encoding='utf-8') as f:
                        lines = f.readlines()
                        recent_logs = [line.strip() for line in lines[-10:]]
                except:
                    pass
            
            status['recent_logs'] = recent_logs
            status['monitor_active'] = os.path.exists(status_file)
            
            request_handler.send_json_response({
                'success': True,
                'status': status
            })
            
        except Exception as e:
            logger.error(f"Error in monitor status: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    @staticmethod
    def handle_patterns_performance(request_handler, query_params):
        """Endpoint: /api/patterns-performance - Stats détaillées de tous les patterns"""
        try:
            script_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
            trade_history_file = os.path.join(script_dir, 'trade_history.json')
            
            # Liste complète des patterns configurés dans le système
            ALL_AVAILABLE_PATTERNS = [
                'MANUAL_CLOSE', 'STABLECOIN_REJECTED', 'DEAD_CAT_BOUNCE', 'PROLONGED_DOWNTREND',
                'END_OF_CYCLE', 'CREUX_REBOUND', 'STRONG_UPTREND', 'EARLY_BREAKOUT',
                'CONSOLIDATION_BREAKOUT', 'EMA_BULLISH', 'CROSSOVER_IMMINENT', 
                'SQUEEZE_BREAKOUT', 'VOLUME_REVERSAL', 'RSI_REVERSAL', 'ACTIVE_CRASH',
                'RSI_TRAP', 'STRONG_DOWNTREND', 'HIGH_SCORE_OVERRIDE', 'ACHAT', 'HOLD', 'POSSIBLE'
            ]
            
            # Charger l'historique des trades
            if os.path.exists(trade_history_file):
                with open(trade_history_file, 'r', encoding='utf-8-sig') as f:
                    trades = json.load(f)
            else:
                trades = []
            
            # Analyser les performances par pattern
            from collections import defaultdict
            
            pattern_stats = defaultdict(lambda: {
                'count': 0,
                'wins': 0,
                'losses': 0,
                'total_pnl': 0.0,
                'best_trade': None,
                'worst_trade': None,
                'total_duration_minutes': 0,
                'trades_with_duration': 0
            })
            
            for trade in trades:
                pattern = trade.get('pattern', '') or 'UNKNOWN'
                # Renommer UNKNOWN en MANUAL_CLOSE pour plus de clarté
                if pattern == 'UNKNOWN':
                    pattern = 'MANUAL_CLOSE'
                pnl_pct = trade.get('pnl_pct', 0)
                
                stats = pattern_stats[pattern]
                stats['count'] += 1
                stats['total_pnl'] += pnl_pct
                
                if pnl_pct > 0:
                    stats['wins'] += 1
                else:
                    stats['losses'] += 1
                
                # Meilleur/Pire trade
                if stats['best_trade'] is None or pnl_pct > stats['best_trade']['pnl_pct']:
                    stats['best_trade'] = {
                        'symbol': trade.get('symbol', 'N/A'),
                        'pnl_pct': pnl_pct,
                        'exit_time': trade.get('exit_time', 'N/A')
                    }
                
                if stats['worst_trade'] is None or pnl_pct < stats['worst_trade']['pnl_pct']:
                    stats['worst_trade'] = {
                        'symbol': trade.get('symbol', 'N/A'),
                        'pnl_pct': pnl_pct,
                        'exit_time': trade.get('exit_time', 'N/A')
                    }
                
                # Durée moyenne
                if 'entry_time' in trade and 'exit_time' in trade:
                    try:
                        entry = datetime.fromisoformat(trade['entry_time'])
                        exit = datetime.fromisoformat(trade['exit_time'])
                        duration = (exit - entry).total_seconds() / 60
                        stats['total_duration_minutes'] += duration
                        stats['trades_with_duration'] += 1
                    except:
                        pass
            
            # Calculer les moyennes et ajouter les recommandations
            patterns_data = []
            for pattern, stats in pattern_stats.items():
                win_rate = (stats['wins'] / stats['count']) * 100 if stats['count'] > 0 else 0
                avg_pnl = stats['total_pnl'] / stats['count'] if stats['count'] > 0 else 0
                avg_duration = stats['total_duration_minutes'] / stats['trades_with_duration'] if stats['trades_with_duration'] > 0 else 0
                
                # Déterminer le statut et la recommandation
                if win_rate >= 60:
                    status = 'excellent'
                    recommendation = 'Maintenir - Performance excellente'
                    action = 'keep'
                elif win_rate >= 50:
                    status = 'good'
                    recommendation = 'Bon - Continuer la surveillance'
                    action = 'keep'
                elif win_rate >= 40:
                    status = 'medium'
                    recommendation = 'Moyen - Optimisation recommandée'
                    action = 'optimize'
                elif win_rate >= 30:
                    status = 'weak'
                    recommendation = 'Faible - Renforcer conditions ou désactiver'
                    action = 'review'
                else:
                    status = 'critical'
                    recommendation = 'Critique - Désactivation recommandée'
                    action = 'disable'
                
                # Déterminer les optimisations potentielles
                optimizations = []
                if win_rate < 50:
                    if avg_duration > 120:  # Plus de 2h
                        optimizations.append('Réduire timeout - Sorties trop tardives')
                    if avg_pnl < 0:
                        optimizations.append('Renforcer confirmations - Faux signaux')
                    if stats['count'] > 50:
                        optimizations.append('Critères trop permissifs - Trop de trades')
                    elif stats['count'] < 5:
                        optimizations.append('Critères trop stricts - Pas assez de trades')
                
                # 🔧 LOGIQUE CORRECTE pour meilleur/pire trade
                best_trade = stats['best_trade']
                worst_trade = stats['worst_trade']
                
                # CAS 1: Un seul trade = même valeur pour best et worst
                # → Afficher seulement dans meilleur SI c'est un gain, sinon dans pire SI c'est une perte
                if (best_trade and worst_trade and 
                    best_trade['symbol'] == worst_trade['symbol'] and 
                    abs(best_trade['pnl_pct'] - worst_trade['pnl_pct']) < 0.001 and
                    best_trade.get('exit_time') == worst_trade.get('exit_time')):
                    # C'est le même trade unique
                    if best_trade['pnl_pct'] >= 0:
                        # C'est un gain → garder dans meilleur, masquer pire
                        worst_trade = None
                    else:
                        # C'est une perte → garder dans pire, masquer meilleur
                        best_trade = None
                
                # CAS 2: Plusieurs trades
                else:
                    # Meilleur trade : afficher SEULEMENT si c'est un GAIN réel (> 0%)
                    if best_trade and best_trade['pnl_pct'] <= 0:
                        best_trade = None
                    
                    # Pire trade : TOUJOURS afficher s'il existe (montrer la pire performance)
                    # Pas de condition supplémentaire - le pire trade doit être visible !
                
                patterns_data.append({
                    'pattern': pattern,
                    'count': stats['count'],
                    'wins': stats['wins'],
                    'losses': stats['losses'],
                    'win_rate': round(win_rate, 1),
                    'total_pnl': round(stats['total_pnl'], 2),
                    'avg_pnl': round(avg_pnl, 3),
                    'avg_duration_hours': round(avg_duration / 60, 1) if avg_duration > 0 else 0,
                    'best_trade': best_trade,
                    'worst_trade': worst_trade,
                    'status': status,
                    'recommendation': recommendation,
                    'action': action,
                    'optimizations': optimizations
                })
            
            # 🆕 Ajouter les patterns configurés sans historique (0 trades)
            existing_patterns = set(p['pattern'] for p in patterns_data)
            for pattern_name in ALL_AVAILABLE_PATTERNS:
                if pattern_name not in existing_patterns:
                    patterns_data.append({
                        'pattern': pattern_name,
                        'count': 0,
                        'wins': 0,
                        'losses': 0,
                        'win_rate': 0.0,
                        'total_pnl': 0.0,
                        'avg_pnl': 0.0,
                        'avg_duration_hours': 0.0,
                        'best_trade': None,
                        'worst_trade': None,
                        'status': 'inactive',
                        'recommendation': 'Aucune donnée - Pattern jamais utilisé',
                        'action': 'monitor',
                        'optimizations': ['Pattern disponible mais jamais déclenché', 'Vérifier les conditions de détection']
                    })
            
            # Trier par fréquence d'utilisation
            patterns_data.sort(key=lambda x: x['count'], reverse=True)
            
            # Calculer stats globales
            total_trades = len(trades)
            total_wins = sum(1 for t in trades if t.get('pnl_pct', 0) > 0)
            global_win_rate = (total_wins / total_trades) * 100 if total_trades > 0 else 0
            total_pnl = sum(t.get('pnl_pct', 0) for t in trades)
            avg_pnl = total_pnl / total_trades if total_trades > 0 else 0
            
            # Compter patterns par statut
            status_counts = {
                'excellent': sum(1 for p in patterns_data if p['status'] == 'excellent'),
                'good': sum(1 for p in patterns_data if p['status'] == 'good'),
                'medium': sum(1 for p in patterns_data if p['status'] == 'medium'),
                'weak': sum(1 for p in patterns_data if p['status'] == 'weak'),
                'critical': sum(1 for p in patterns_data if p['status'] == 'critical'),
                'inactive': sum(1 for p in patterns_data if p['status'] == 'inactive')
            }
            
            request_handler.send_json_response({
                'success': True,
                'patterns': patterns_data,
                'global_stats': {
                    'total_trades': total_trades,
                    'total_patterns': len(patterns_data),
                    'global_win_rate': round(global_win_rate, 1),
                    'total_pnl': round(total_pnl, 2),
                    'avg_pnl': round(avg_pnl, 3),
                    'status_counts': status_counts
                },
                'timestamp': datetime.now().isoformat()
            })
            
        except Exception as e:
            logger.error(f"Error in patterns performance: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    # ═══════════════════════════════════════════════════════════════════════
    # MARKET SPY - Données du scanner espion
    # ═══════════════════════════════════════════════════════════════════════

    def handle_spy_status(self, request_handler) -> None:
        """Retourne le statut temps réel du Market Spy (spy_status.json)"""
        try:
            # 🚀 PERF: cache 8s — spy_status.json est écrit toutes les ~12s
            cached = _cache_get('spy_status', ttl=8.0)
            if cached is not None:
                request_handler.send_json_response(cached)
                return

            import os
            import psutil
            status_file = os.path.join(self.script_dir, 'spy_status.json')
            
            if not os.path.exists(status_file):
                request_handler.send_json_response({
                    'success': True,
                    'running': False,
                    'phase': 'NOT_STARTED',
                    'message': 'Market Spy non démarré'
                })
                return
            
            with open(status_file, 'r', encoding='utf-8') as f:
                status = json.load(f)
            
            # Vérifier si le process est vraiment actif
            pid = status.get('pid')
            process_alive = False
            if pid:
                try:
                    proc = psutil.Process(pid)
                    cmdline = ' '.join(proc.cmdline())
                    process_alive = proc.is_running() and 'market_spy' in cmdline
                except (psutil.NoSuchProcess, psutil.AccessDenied):
                    process_alive = False
            
            # Vérifier fraîcheur du timestamp
            ts = status.get('timestamp', '')
            stale = False
            if ts:
                try:
                    from datetime import datetime
                    last_update = datetime.fromisoformat(ts)
                    age = (datetime.now() - last_update).total_seconds()
                    stale = age > 60  # Plus de 60s sans MAJ = probablement mort
                    status['status_age_seconds'] = round(age, 1)
                except Exception:
                    pass
            
            if not process_alive or stale:
                status['running'] = False
                status['phase'] = 'OFFLINE'
            
            status['success'] = True
            status['process_alive'] = process_alive
            _cache_set('spy_status', status)
            request_handler.send_json_response(status)
            
        except Exception as e:
            logger.error(f"Error in spy status: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': True,
                'running': False,
                'phase': 'ERROR',
                'error': str(e)
            })

    def handle_ia_status(self, request_handler) -> None:
        """Retourne le statut temps réel de la surveillance IA (ia_status.json)"""
        try:
            import os
            status_file = os.path.join(self.script_dir, 'ia_status.json')
            
            if not os.path.exists(status_file):
                request_handler.send_json_response({
                    'success': True,
                    'running': False,
                    'phase': 'NOT_STARTED',
                    'message': 'Service IA non d\u00e9marr\u00e9'
                })
                return
            
            with open(status_file, 'r', encoding='utf-8') as f:
                status = json.load(f)
            
            # Vérifier fraîcheur du timestamp
            ts = status.get('timestamp', '')
            if ts:
                try:
                    from datetime import datetime
                    last_update = datetime.fromisoformat(ts)
                    age = (datetime.now() - last_update).total_seconds()
                    status['status_age_seconds'] = round(age, 1)
                    # Seuil adapté: cycle IA ~15s + LSTM training jusqu'à 60s (thread) + intervalle 5s + marge
                    # 🔴 FIX 23/03: 120→300s (LSTM training en thread peut prendre 60s → cycle total ≤ 80s)
                    if age > 300:  # 300s sans MAJ = probablement bloqué
                        status['running'] = False
                        status['phase'] = 'OFFLINE'
                except Exception:
                    pass
            
            # Enrichir avec les signaux ready depuis ia_surveillance_cache.json
            try:
                cache_file = os.path.join(self.script_dir, 'ia_surveillance_cache.json')
                if os.path.exists(cache_file):
                    with open(cache_file, 'r', encoding='utf-8') as f:
                        cache = json.load(f)
                    status['ready_signals_detail'] = [
                        {'symbol': s.get('symbol', ''), 'score': s.get('score', 0), 'pattern': s.get('pattern', '')}
                        for s in cache.get('ready_signals', [])[:6]
                    ]
            except Exception:
                pass
            
            status['success'] = True
            request_handler.send_json_response(status)
            
        except Exception as e:
            logger.error(f"Error in IA status: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': True,
                'running': False,
                'phase': 'ERROR',
                'error': str(e)
            })

    def handle_crypto_news(self, request_handler) -> None:
        """Proxy RSS → JSON côté serveur pour éviter CORS et erreurs rss2json.
        Lit les flux RSS crypto FR directement, parse le XML, retourne JSON.
        Fallback automatique si un flux est indisponible.
        Cache 5 minutes pour limiter les appels externes.
        """
        import time
        import urllib.request
        import xml.etree.ElementTree as ET
        from datetime import datetime, timezone

        now = time.time()
        cache = getattr(self, '_news_cache', None)
        if cache and now - cache.get('ts', 0) < 300:
            request_handler.send_json_response(cache['data'])
            return

        feeds = [
            {'url': 'https://journalducoin.com/feed/', 'name': 'Journal du Coin'},
            {'url': 'https://cryptoast.fr/feed/', 'name': 'Cryptoast'},
        ]

        def sentiment(title):
            t = title.lower()
            pos = ['hausse', 'bullish', 'monte', 'record', 'adoption', 'positif', 'croissance', 'rebond', 'rally']
            neg = ['baisse', 'chute', 'crash', 'plonge', 'recul', 'effondre', 'bearish', 'panique', 'liquidation']
            if any(w in t for w in pos): return 'positive'
            if any(w in t for w in neg): return 'negative'
            return 'neutral'

        def time_ago(pub_str):
            try:
                from email.utils import parsedate_to_datetime
                dt = parsedate_to_datetime(pub_str)
                mins = int((datetime.now(timezone.utc) - dt).total_seconds() / 60)
                if mins < 60: return f'Il y a {mins} min'
                if mins < 1440: return f'Il y a {mins//60}h'
                return f'Il y a {mins//1440}j'
            except Exception:
                return 'Récent'

        news = []
        for feed in feeds:
            try:
                req = urllib.request.Request(feed['url'],
                    headers={'User-Agent': 'Mozilla/5.0 CryptoBot/1.0'})
                with urllib.request.urlopen(req, timeout=8) as resp:
                    raw = resp.read()
                root = ET.fromstring(raw)
                ns = {'atom': 'http://www.w3.org/2005/Atom'}
                items = root.findall('.//item')
                for item in items[:8]:
                    title = (item.findtext('title') or '').strip()
                    link = (item.findtext('link') or '').strip()
                    desc = (item.findtext('description') or '')
                    desc = ET.fromstring(f'<x>{desc}</x>').itertext().__next__() if '<' in desc else desc
                    desc = desc[:120].strip() + '...' if len(desc) > 120 else desc
                    pub = item.findtext('pubDate') or ''
                    if title:
                        news.append({
                            'title': title,
                            'summary': desc,
                            'url': link,
                            'source': feed['name'],
                            'time': time_ago(pub),
                            'sentiment': sentiment(title)
                        })
            except Exception as e:
                logger.debug(f"RSS fetch error {feed['name']}: {e}")

        result = {'success': True, 'news': news, 'count': len(news)}
        self._news_cache = {'ts': now, 'data': result}
        request_handler.send_json_response(result)

    def handle_spy_data(self, request_handler) -> None:
        """Récupère toutes les données du Market Spy pour le dashboard"""
        try:
            # 🚀 PERF: cache 5s — lectures fichiers multiples
            cached = _cache_get('spy_data', ttl=5.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            import os
            spy_trades_file = os.path.join(self.script_dir, 'espion_trades.json')
            spy_opportunities_file = os.path.join(self.script_dir, 'espion_opportunities.json')
            spy_history_file = os.path.join(self.script_dir, 'espion_history.json')
            spy_log_file = os.path.join(self.script_dir, 'market_spy.log')
            positions_file = os.path.join(self.script_dir, 'positions.json')

            # 1. Positions spy actives
            spy_trades = {}
            if os.path.exists(spy_trades_file):
                try:
                    with open(spy_trades_file, 'r', encoding='utf-8') as f:
                        spy_trades = json.load(f)
                except Exception:
                    spy_trades = {}

            # 2. Historique des opportunités
            opportunities = []
            if os.path.exists(spy_opportunities_file):
                try:
                    with open(spy_opportunities_file, 'r', encoding='utf-8') as f:
                        opportunities = json.load(f)
                except Exception:
                    opportunities = []

            # 3. Historique des trades complétés (v3)
            spy_history = []
            if os.path.exists(spy_history_file):
                try:
                    with open(spy_history_file, 'r', encoding='utf-8') as f:
                        spy_history = json.load(f)
                except Exception:
                    spy_history = []

            # 4. Dernières lignes du log
            log_lines = []
            if os.path.exists(spy_log_file):
                try:
                    with open(spy_log_file, 'r', encoding='utf-8') as f:
                        all_lines = f.readlines()
                        log_lines = [l.strip() for l in all_lines[-80:] if l.strip()]
                except Exception:
                    log_lines = []

            # 5. Positions bot principal (pour compter le total)
            bot_positions = {}
            if os.path.exists(positions_file):
                try:
                    with open(positions_file, 'r', encoding='utf-8') as f:
                        raw = json.load(f)
                        bot_positions = {k: v for k, v in raw.items()
                                       if isinstance(v, dict) and 'entry_price' in v}
                except Exception:
                    bot_positions = {}

            # Enrichir les positions spy avec PnL en temps réel
            # Fetch prix actuels depuis Binance pour les positions spy
            spy_current_prices = {}
            if spy_trades:
                try:
                    import requests as req
                    spy_symbols = list(spy_trades.keys())
                    for sym in spy_symbols:
                        try:
                            r = req.get(f"https://api.binance.com/api/v3/ticker/price",
                                       params={"symbol": sym}, timeout=3)
                            if r.status_code == 200:
                                spy_current_prices[sym] = float(r.json().get('price', 0))
                        except Exception:
                            pass
                except Exception:
                    pass

            spy_positions_enriched = []
            for symbol, trade in spy_trades.items():
                entry = trade.get('entry_price', 0)
                # Prix actuel: API Binance > positions.json > max_price > entry
                current_price = spy_current_prices.get(symbol, 0)
                if current_price <= 0 and symbol in bot_positions:
                    current_price = bot_positions[symbol].get('current_price', 0)
                if current_price <= 0:
                    current_price = trade.get('max_price', entry)
                pnl_pct = ((current_price - entry) / entry * 100) if entry > 0 else 0
                spy_positions_enriched.append({
                    'symbol': symbol,
                    'entry_price': entry,
                    'quantity': trade.get('quantity', 0),
                    'current_price': current_price,
                    'pnl_pct': round(pnl_pct, 2),
                    'stop_loss': trade.get('stop_loss', 0),
                    'take_profit': trade.get('take_profit', 0),
                    'pattern': trade.get('surge_type', trade.get('pattern', 'UNKNOWN')),
                    'score': trade.get('surge_strength', trade.get('score', 0)) * 20 if trade.get('surge_strength') else trade.get('score', 0),
                    'timestamp': trade.get('timestamp', ''),
                    'max_pnl': trade.get('max_pnl', 0),
                    'indicators': trade.get('indicators', {})
                })

            # Stats globales
            executed_opps = [o for o in opportunities if o.get('executed')]
            total_opps = len(opportunities)
            total_executed = len(executed_opps)
            patterns_count = {}
            for o in opportunities:
                p = o.get('pattern', 'UNKNOWN')
                patterns_count[p] = patterns_count.get(p, 0) + 1

            # Dernières opportunités (les 30 plus récentes)
            recent_opps = opportunities[-30:] if opportunities else []
            recent_opps.reverse()  # Plus récent en premier

            # Stats historique trades
            history_stats = {}
            if spy_history:
                wins = sum(1 for t in spy_history if t.get('pnl_pct', 0) > 0)
                total_pnl = sum(t.get('pnl_usdt', 0) for t in spy_history)
                avg_pnl = sum(t.get('pnl_pct', 0) for t in spy_history) / len(spy_history)
                avg_hold = sum(t.get('hold_minutes', 0) for t in spy_history) / len(spy_history)
                history_stats = {
                    'total_trades': len(spy_history),
                    'wins': wins,
                    'losses': len(spy_history) - wins,
                    'win_rate': round(wins / len(spy_history) * 100, 1) if spy_history else 0,
                    'total_pnl_usdt': round(total_pnl, 4),
                    'avg_pnl_pct': round(avg_pnl, 2),
                    'avg_hold_min': round(avg_hold, 1),
                }

            # Derniers 200 trades pour l'affichage, PnL calculé sur TOUT l'historique
            display_history = spy_history[-200:] if spy_history else []
            recent_history = display_history[-20:]
            recent_history.reverse()

            result = {
                'success': True,
                'spy_positions': spy_positions_enriched,
                'spy_positions_count': len(spy_trades),
                'bot_positions_count': len(bot_positions),
                'total_positions': len(spy_trades) + len(bot_positions),
                'recent_opportunities': recent_opps,
                'total_opportunities': total_opps,
                'total_executed': total_executed,
                'patterns_distribution': patterns_count,
                'spy_history': recent_history,
                'history_stats': history_stats,
                'log_lines': log_lines,
                'timestamp': datetime.now().isoformat()
            }
            _cache_set('spy_data', result)
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in spy data: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_spy_scan(self, request_handler, data: Dict = None) -> None:
        """Lance un scan spy depuis le dashboard"""
        try:
            import subprocess
            import sys
            
            # Lancer le scan en one-shot
            python_exe = os.path.join(self.script_dir, '.venv', 'Scripts', 'python.exe')
            if not os.path.exists(python_exe):
                python_exe = sys.executable
            
            spy_script = os.path.join(self.script_dir, 'market_spy.py')
            if not os.path.exists(spy_script):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'market_spy.py introuvable'
                }, 404)
                return
            
            # Exécuter en subprocess avec timeout
            result = subprocess.run(
                [python_exe, spy_script, '--once'],
                capture_output=True,
                text=True,
                timeout=120,
                cwd=self.script_dir
            )
            
            # Parser la sortie pour extraire les stats
            output = result.stdout + result.stderr
            candidates = 0
            trades = 0
            for line in output.split('\n'):
                if 'Candidats analysés:' in line:
                    try:
                        candidates = int(line.split('Candidats analysés:')[1].split('|')[0].strip())
                    except:
                        pass
                if 'Trades ce cycle:' in line:
                    try:
                        trades = int(line.split('Trades ce cycle:')[1].split('|')[0].strip())
                    except:
                        pass
            
            request_handler.send_json_response({
                'success': True,
                'candidates_analyzed': candidates,
                'trades_executed': trades,
                'exit_code': result.returncode,
                'output_preview': output[-500:] if output else ''
            })
            
        except subprocess.TimeoutExpired:
            request_handler.send_json_response({
                'success': False,
                'error': 'Timeout du scan (>120s)'
            }, 500)
        except Exception as e:
            logger.error(f"Error in spy scan: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': False,
                'error': str(e)
            }, 500)

    def handle_mobile_summary(self, request_handler) -> None:
        """GET /api/mobile-summary - Données agrégées pour le dashboard mobile (1 seul appel)"""
        try:
            # 🚀 PERF: cache 12s — agrège 6 fichiers + market regime, coûteux à calculer
            cached = _cache_get('mobile_summary', ttl=12.0)
            if cached is not None:
                request_handler.send_json_response(cached)
                return

            from datetime import datetime

            # --- Bot analysis ---
            analysis_path = os.path.join(self.script_dir, 'bot_analysis.json')
            analysis = load_json_file(analysis_path, {})
            data = analysis if analysis else {}
            stats = data.get('stats', {})

            bot_running = False
            ts = data.get('timestamp', '')
            if ts:
                try:
                    age = (datetime.now() - datetime.fromisoformat(ts)).total_seconds()
                    bot_running = age < 120
                except Exception:
                    pass

            # Lire les positions depuis positions.json (source de vérité, même que le desktop)
            # bot_analysis.json peut être périmé si le bot est arrêté → positions fantômes
            positions = []
            try:
                pos_file = os.path.join(self.script_dir, 'positions.json')
                if os.path.exists(pos_file):
                    with open(pos_file, 'r', encoding='utf-8') as _f:
                        _pos_raw = json.load(_f)
                    if isinstance(_pos_raw, list):
                        positions = _pos_raw
                    elif isinstance(_pos_raw, dict) and _pos_raw:
                        # Injecter le symbol (clé du dict) dans chaque entrée
                        positions = [{**v, 'symbol': k} for k, v in _pos_raw.items()]
            except Exception:
                pass
            market = data.get('marketAnalysis', {})

            # --- IA status ---
            ia_running = False
            ia_cycle = 0
            ia_signals_buy = 0
            ia_signals_sell = 0
            ia_ready = []
            ia_eligible = 0
            ia_surveillance = 0
            try:
                status_file = os.path.join(self.script_dir, 'ia_status.json')
                if os.path.exists(status_file):
                    with open(status_file, 'r', encoding='utf-8') as f:
                        ia_status = json.load(f)
                    ts_ia = ia_status.get('timestamp', '')
                    ia_age = 999
                    if ts_ia:
                        try:
                            ia_age = (datetime.now() - datetime.fromisoformat(ts_ia)).total_seconds()
                        except Exception:
                            pass
                    ia_running = ia_status.get('running', False) and ia_age < 120
                    ia_cycle = ia_status.get('cycle_count', 0)
                    ia_signals_buy = ia_status.get('signals_achat', 0)
                    ia_signals_sell = ia_status.get('signals_vente', 0)
                    ia_eligible = ia_status.get('eligible_count', 0)
                    ia_surveillance = ia_status.get('signals_possible', 0)

                cache_file = os.path.join(self.script_dir, 'ia_surveillance_cache.json')
                if os.path.exists(cache_file):
                    with open(cache_file, 'r', encoding='utf-8') as f:
                        cache = json.load(f)
                    ia_ready = [
                        {'symbol': s.get('symbol', ''), 'score': s.get('score', 0), 'pattern': s.get('pattern', '')}
                        for s in cache.get('ready_signals', [])[:5]
                    ]
            except Exception:
                pass

            # --- Market regime (via détecteur en cache) ---
            regime = 'NEUTRAL'
            fear_greed_score = 50
            fear_greed_label = 'Neutre'
            try:
                from market_regime import get_market_regime_detector
                detector = get_market_regime_detector()
                # Utiliser les données en cache uniquement (force_update=False) — pas de délai réseau
                try:
                    _rname, _rcfg = detector.detect_regime(force_update=False)
                    regime = _rname or 'NEUTRAL'
                except Exception:
                    regime = detector.current_regime or 'NEUTRAL'
                md = detector.market_data or {}
                _global_score = md.get('global_score', 50)
                _btc_rsi = md.get('btc', {}).get('rsi', 50)
                _bull_pct = md.get('altcoins', {}).get('bullish_pct', 50)
                fear_greed_score = round(_global_score * 0.5 + min(100, max(0, _btc_rsi)) * 0.3 + _bull_pct * 0.2)
                if fear_greed_score <= 20:
                    fear_greed_label = 'Peur extrême'
                elif fear_greed_score <= 40:
                    fear_greed_label = 'Peur'
                elif fear_greed_score <= 60:
                    fear_greed_label = 'Neutre'
                elif fear_greed_score <= 80:
                    fear_greed_label = 'Avidité'
                else:
                    fear_greed_label = 'Avidité extrême'
            except Exception:
                pass

            # --- Courbe PnL SPY (espion_history.json) — depuis 01/04/2026 ---
            STATS_CUTOFF = '2026-04-01'
            bot_curve = []      # gardé pour compatibilité mais vide côté mobile
            pnl_bot_realized = 0.0
            pnl_bot_all_time = 0.0
            bot_wins = 0
            bot_losses = 0
            bot_total_trades = 0
            last_trades = []  # 10 derniers trades pour l'histogramme mobile
            # Bot: stats depuis 01/04 (inclus dans pnl_total mais non affiché sur mobile)
            try:
                hist_file = os.path.join(self.script_dir, 'trade_history.json')
                if os.path.exists(hist_file):
                    with open(hist_file, 'r', encoding='utf-8') as f:
                        trades = json.load(f)
                    trades_sorted = sorted(trades, key=lambda x: x.get('exit_time', ''))
                    pnl_bot_all_time = sum(t.get('pnl', 0) or 0 for t in trades_sorted)
                    trades_recent = [t for t in trades_sorted if t.get('exit_time', '')[:10] >= STATS_CUTOFF]
                    for t in trades_recent:
                        p = t.get('pnl', 0) or 0
                        pnl_bot_realized += p
                        if p > 0: bot_wins += 1
                        else: bot_losses += 1
                    bot_total_trades = len(trades_recent)
            except Exception:
                pass

            # --- Courbe PnL SPY (espion_history.json) — depuis 01/04/2026 uniquement ---
            spy_curve = []
            spy_pnl_total = 0.0
            spy_pnl_all_time = 0.0
            spy_wins = 0
            spy_losses = 0
            spy_total_trades = 0
            spy_activity = {}
            tr_ratio = {'ratio': 0, 'zone': 'NEUTRE', 'zone_color': '#f59e0b', 'label': 'Neutre', 'recommendation': 'Prudence recommandée'}
            try:
                spy_file = os.path.join(self.script_dir, 'espion_history.json')
                if os.path.exists(spy_file):
                    with open(spy_file, 'r', encoding='utf-8') as f:
                        spy_trades = json.load(f)
                    spy_trades_sorted = sorted(spy_trades, key=lambda x: x.get('exit_time', ''))
                    spy_pnl_all_time = sum(t.get('pnl_usdt', 0) for t in spy_trades_sorted)
                    spy_recent = [t for t in spy_trades_sorted if t.get('exit_time', '')[:10] >= STATS_CUTOFF]
                    cum = 0.0
                    for t in spy_recent:
                        p = t.get('pnl_usdt', 0)
                        cum += p
                        spy_pnl_total += p
                        if p > 0:
                            spy_wins += 1
                        else:
                            spy_losses += 1
                        spy_curve.append({'t': t.get('exit_time', '')[:16], 'v': round(cum, 2)})
                    spy_total_trades = len(spy_recent)
                    for t in spy_recent[-10:]:
                        p = t.get('pnl_usdt', 0)
                        et = t.get('exit_time', '')[:19]
                        last_trades.append({'symbol': t.get('symbol', ''), 'pnl': round(p, 2), 'pnl_pct': round(t.get('pnl_pct', 0) or 0, 2), 'source': 'spy', 'exit_time': et})
                    last_trades = sorted(last_trades, key=lambda x: x.get('exit_time', '')[:19])[-10:]
                    # Activité SPY : aujourd'hui et 7 derniers jours
                    from datetime import timedelta
                    today_str = datetime.now().strftime('%Y-%m-%d')
                    week_cutoff = (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d')
                    spy_today_list = [t for t in spy_trades_sorted if t.get('exit_time', '')[:10] == today_str]
                    spy_7d_list = [t for t in spy_trades_sorted if t.get('exit_time', '')[:10] >= week_cutoff]
                    last_spy = spy_trades_sorted[-1] if spy_trades_sorted else {}
                    spy_activity = {
                        'today_trades': len(spy_today_list),
                        'today_pnl_usdt': round(sum(t.get('pnl_usdt', 0) for t in spy_today_list), 2),
                        'week_trades': len(spy_7d_list),
                        'week_pnl_usdt': round(sum(t.get('pnl_usdt', 0) for t in spy_7d_list), 2),
                        'last_symbol': last_spy.get('symbol', ''),
                        'last_exit_time': last_spy.get('exit_time', '')[:16],
                    }
                    # 🔧 FIX 13/04: Ratio Tendance/Risque — utilise le cache du endpoint dédié
                    # (calcul correct jour par jour) au lieu du Sharpe 7j simplifié
                    try:
                        _tr_cached = _cache_get('trend_risk_ratio', ttl=300.0)
                        if _tr_cached and _tr_cached.get('current'):
                            _cur = _tr_cached['current']
                            tr_ratio = {
                                'ratio':          _cur.get('ratio', 0),
                                'zone':           _cur.get('zone', 'NEUTRE'),
                                'zone_color':     _cur.get('zone_color', '#f59e0b'),
                                'label':          _cur.get('zone_label', 'Neutre'),
                                'recommendation': _cur.get('recommendation', ''),
                            }
                        else:
                            # Fallback: calcul simplifié si cache absent
                            import statistics as _stats_mod
                            spy_7d_pcts = [t.get('pnl_pct', 0) for t in spy_7d_list if t.get('pnl_pct') is not None]
                            if len(spy_7d_pcts) >= 5:
                                mean_p = _stats_mod.mean(spy_7d_pcts)
                                std_p = _stats_mod.stdev(spy_7d_pcts)
                                ratio = round(mean_p / std_p, 3) if std_p > 0.001 else 0
                                if ratio > 0.5:     zone, color, label, rec = 'FORT_POSITIF', '#10b981', 'Fort Positif', 'Positions normales ✅'
                                elif ratio > 0.1:   zone, color, label, rec = 'POSITIF', '#10b981', 'Positif', 'Positions normales ✅'
                                elif ratio > -0.1:  zone, color, label, rec = 'NEUTRE', '#f59e0b', 'Neutre', 'Réduire positions -30%'
                                elif ratio > -0.5:  zone, color, label, rec = 'NEGATIF', '#ef4444', 'Négatif', 'Réduire positions -50%'
                                else:               zone, color, label, rec = 'FORT_NEGATIF', '#7f1d1d', 'Fort Négatif', 'PAUSE trading'
                                tr_ratio = {'ratio': ratio, 'zone': zone, 'zone_color': color, 'label': label, 'recommendation': rec}
                    except Exception:
                        pass
            except Exception:
                pass

            # --- Market overview (gainers/losers/sentiment depuis crypto_cache) ---
            market_overview = {}
            try:
                cache_path = os.path.join(self.script_dir, 'crypto_cache', 'crypto_data.json')
                if os.path.exists(cache_path):
                    with open(cache_path, 'r', encoding='utf-8') as f:
                        cdata = json.load(f)
                    syms = cdata.get('symbols', {})
                    changes = [
                        (s.replace('USDT', ''), v.get('priceChangePercent', 0))
                        for s, v in syms.items()
                        if isinstance(v.get('priceChangePercent'), (int, float))
                    ]
                    if changes:
                        changes_sorted = sorted(changes, key=lambda x: x[1])
                        losers  = [{'symbol': s, 'change': round(v, 2)} for s, v in changes_sorted[:3] if v < 0]
                        gainers = [{'symbol': s, 'change': round(v, 2)} for s, v in reversed(changes_sorted[-3:]) if v > 0]
                        pos_count = sum(1 for _, v in changes if v > 0)
                        neg_count = sum(1 for _, v in changes if v <= 0)
                        avg = round(sum(v for _, v in changes) / len(changes), 2)
                        pct_bull = pos_count / len(changes) * 100
                        if pct_bull >= 60:   sent = 'HAUSSIER'
                        elif pct_bull >= 45: sent = 'NEUTRE'
                        else:                sent = 'BAISSIER'
                        market_overview = {
                            'sentiment': sent,
                            'pct_positive': round(pct_bull, 1),
                            'avg_change_24h': avg,
                            'positive_count': pos_count,
                            'negative_count': neg_count,
                            'gainers': gainers,
                            'losers': losers,
                        }
            except Exception:
                pass

            # --- PnL non-réalisé (positions ouvertes) avec PRIX BINANCE TEMPS RÉEL ---
            pnl_unrealized = 0.0
            # 🔧 FIX: Récupérer les prix temps réel comme /api/positions-live
            pos_symbols = [p.get('symbol', '') for p in positions if p.get('symbol')]
            live_prices = {}
            if pos_symbols:
                try:
                    symbols_param = json.dumps(pos_symbols).replace(' ', '')
                    url = f'https://api.binance.com/api/v3/ticker/price?symbols={symbols_param}'
                    resp = requests.get(url, timeout=5)
                    if resp.status_code == 200:
                        live_prices = {p['symbol']: float(p['price']) for p in resp.json()}
                except Exception:
                    pass  # Fallback sur currentPrice de bot_analysis.json

            for pos_item in positions:
                qty = pos_item.get('quantity', 0)
                entry = pos_item.get('entryPrice') or pos_item.get('entry_price', 0)
                sym = pos_item.get('symbol', '')
                current = live_prices.get(sym, pos_item.get('currentPrice') or pos_item.get('current_price', 0))
                if entry > 0 and qty > 0:
                    pnl_unrealized += (current - entry) * qty
                # Mettre à jour pour la réponse
                if sym in live_prices:
                    pos_item['currentPrice'] = live_prices[sym]
                    if entry > 0:
                        pos_item['pnlPct'] = round(((live_prices[sym] - entry) / entry) * 100, 2)

            # pnl_total = depuis 01/04 (spy + bot) — cohérent avec dashboard desktop et bloc Gestion
            pnl_total = round(spy_pnl_total + pnl_bot_realized + pnl_unrealized, 2)

            # --- Taux EUR depuis Binance (même source que le dashboard desktop) ---
            eur_rate = None
            try:
                import urllib.request as _ureq
                _r = _ureq.urlopen('https://api.binance.com/api/v3/ticker/price?symbol=EURUSDT', timeout=3)
                _price = json.loads(_r.read()).get('price')
                if _price:
                    eur_rate = round(1 / float(_price), 6)
            except Exception:
                pass

            response = {
                'success': True,
                'bot_running': bot_running,
                'ia_running': ia_running,
                'regime': regime,
                'fear_greed': {'score': fear_greed_score, 'label': fear_greed_label},
                'pnl_total': pnl_total,
                'pnl_total_eur': round(pnl_total * eur_rate, 2) if eur_rate else None,
                'eur_rate': eur_rate,
                'pnl_unrealized': round(pnl_unrealized, 2),
                'stats': {
                    'pnl': pnl_bot_realized,
                    'win_rate': round(bot_wins / bot_total_trades * 100, 1) if bot_total_trades > 0 else stats.get('winRate', 0),
                    'total_trades': bot_total_trades or stats.get('totalTrades', 0),
                    'wins': bot_wins or stats.get('wins', 0),
                    'losses': bot_losses or stats.get('losses', 0),
                    'positions': stats.get('positions', 0),
                    'max_positions': stats.get('maxPositions', 10),
                },
                'spy': {
                    'pnl': round(spy_pnl_total, 2),
                    'wins': spy_wins,
                    'losses': spy_losses,
                    'total_trades': spy_total_trades,
                    'win_rate': round(spy_wins / spy_total_trades * 100, 1) if spy_total_trades > 0 else 0,
                },
                'bot_curve': bot_curve,
                'spy_curve': spy_curve,
                'last_trades': last_trades,
                'positions': [
                    {
                        'symbol': p.get('symbol', ''),
                        'entry': p.get('entryPrice') or p.get('entry_price', 0),
                        'current': p.get('currentPrice') or p.get('current_price', 0),
                        'pnl_pct': p.get('pnlPct') or p.get('pnl_pct', 0),
                        'qty': p.get('quantity', 0),
                    }
                    for p in positions
                ],
                'market': {
                    'bearish': market.get('bearishCount', 0),
                    'bullish': market.get('bullishCount', 0),
                },
                'market_overview': market_overview,
                'ia': {
                    'cycle': ia_cycle,
                    'signals_buy': ia_signals_buy,
                    'signals_sell': ia_signals_sell,
                    'eligible': ia_eligible,
                    'surveillance': ia_surveillance,
                    'ready': ia_ready,
                },
                'spy_activity': spy_activity,
                'tr_ratio': tr_ratio,
                'timestamp': datetime.now().isoformat(),
            }
            _cache_set('mobile_summary', response)
            request_handler.send_json_response(response)

        except Exception as e:
            logger.error(f"Error in mobile_summary: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    # ═══════════════════════════════════════════════════════════════════════
    # GESTION — Projections & Plan Stratégique (temps réel spy depuis 01/04)
    # ═══════════════════════════════════════════════════════════════════════

    def handle_gestion_projections(self, request_handler) -> None:
        """Compute real-time projections based on actual spy performance since 01/04/2026."""
        try:
            cached = _cache_get('gestion_projections', ttl=60.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            spy_file = os.path.join(self.script_dir, 'espion_history.json')
            if not os.path.exists(spy_file):
                request_handler.send_json_response({'success': False, 'error': 'Pas de données spy'}, 404)
                return

            with open(spy_file, 'r', encoding='utf-8') as f:
                all_trades = json.load(f)

            # --- Filtrer depuis 01/04/2026 ---
            cutoff = datetime(2026, 4, 1)
            trades = []
            for t in all_trades:
                exit_time_str = t.get('exit_time', '')
                if not exit_time_str:
                    continue
                try:
                    exit_dt = datetime.fromisoformat(exit_time_str.replace('Z', '+00:00').replace('+00:00', ''))
                except Exception:
                    try:
                        exit_dt = datetime.strptime(exit_time_str[:19], '%Y-%m-%dT%H:%M:%S')
                    except Exception:
                        continue
                if exit_dt >= cutoff:
                    trades.append({**t, '_exit_dt': exit_dt})

            if not trades:
                request_handler.send_json_response({
                    'success': True,
                    'message': 'Pas encore de trades depuis le 01/04',
                    'projections': None
                })
                return

            # --- Compute real metrics ---
            now = datetime.now()
            first_trade_dt = min(t['_exit_dt'] for t in trades)
            days_active = max((now - first_trade_dt).total_seconds() / 86400, 0.5)

            total_pnl_brut = sum(t.get('pnl_usdt', 0) or 0 for t in trades)
            total_trades = len(trades)
            wins = sum(1 for t in trades if (t.get('pnl_usdt', 0) or 0) > 0)
            win_rate = round(wins / total_trades * 100, 1) if total_trades else 0

            # Deduct fees: 0.2% RT (maker+taker) + 0.05% slippage per trade
            fee_rate = 0.0025  # 0.25% total per trade
            position_size = 900  # base position USDT
            total_fees_usdt = total_trades * position_size * fee_rate
            total_pnl_net_usdt = total_pnl_brut - total_fees_usdt
            daily_pnl_net_usdt = total_pnl_net_usdt / days_active

            # --- Convert USDT -> EUR (taux Binance, même source que dashboard desktop) ---
            eur_rate = 0.88  # fallback
            try:
                import urllib.request as _ureq
                _r = _ureq.urlopen('https://api.binance.com/api/v3/ticker/price?symbol=EURUSDT', timeout=3)
                _price = json.loads(_r.read()).get('price')
                if _price:
                    eur_rate = 1 / float(_price)
            except Exception:
                pass

            total_pnl_brut_eur = round(total_pnl_brut * eur_rate, 2)
            total_fees_eur = round(total_fees_usdt * eur_rate, 2)
            total_pnl_net_eur = round(total_pnl_net_usdt * eur_rate, 2)
            daily_pnl_net = round(daily_pnl_net_usdt * eur_rate, 2)  # en EUR pour les projections

            # --- Capital & balance ---
            capital_initial = 16000
            try:
                settings_file = os.path.join(self.script_dir, 'bot_settings.json')
                if os.path.exists(settings_file):
                    with open(settings_file, 'r') as sf:
                        settings = json.load(sf)
                        capital_initial = settings.get('capital_initial', 16000)
            except Exception:
                pass

            capital_actuel = capital_initial + total_pnl_net_eur

            # --- ROI model (en EUR, décroissant avec le capital) ---
            def get_roi_daily(cap):
                if cap <= 20000: return daily_pnl_net / max(capital_initial, 1)
                elif cap <= 50000: return (daily_pnl_net / max(capital_initial, 1)) * 0.70
                elif cap <= 100000: return (daily_pnl_net / max(capital_initial, 1)) * 0.47
                elif cap <= 250000: return (daily_pnl_net / max(capital_initial, 1)) * 0.23
                else: return (daily_pnl_net / max(capital_initial, 1)) * 0.14

            CAP_TRADING = 80000

            # --- Simulate projections ---
            projections = {}
            for label, months in [('1_mois', 1), ('3_mois', 3), ('6_mois', 6)]:
                cap_trading = float(capital_actuel)
                cap_securise = 0.0
                cap_etf = 0.0
                monthly_detail = []

                for m in range(1, months + 1):
                    roi_d = get_roi_daily(cap_trading)
                    month_gain = 0
                    for _d in range(30):
                        day_gain = cap_trading * roi_d
                        month_gain += day_gain
                        cap_trading += day_gain

                    if m <= 3:
                        retrait = 0
                        phase = "ACCUMULATION"
                    elif m <= 12:
                        retrait = month_gain * 0.30
                        phase = "SÉCURISATION"
                    else:
                        retrait = month_gain * 0.50
                        phase = "DIVERSIFICATION"

                    if cap_trading > CAP_TRADING:
                        retrait += cap_trading - CAP_TRADING
                        cap_trading = CAP_TRADING

                    cap_trading -= retrait

                    if m <= 12:
                        cap_securise += retrait
                    else:
                        cap_securise += retrait * 0.40
                        cap_etf += retrait * 0.60

                    cap_securise *= (1 + 0.035 / 12)
                    cap_etf *= (1 + 0.08 / 12)

                    monthly_detail.append({
                        'month': m,
                        'phase': phase,
                        'gain': round(month_gain, 2),
                        'trading': round(cap_trading, 2),
                        'securise': round(cap_securise, 2),
                        'etf': round(cap_etf, 2),
                        'total': round(cap_trading + cap_securise + cap_etf, 2),
                    })

                final = monthly_detail[-1]
                projections[label] = {
                    'months': months,
                    'detail': monthly_detail,
                    'total_final': final['total'],
                    'gain_total': round(final['total'] - capital_actuel, 2),
                    'trading': final['trading'],
                    'securise': final['securise'],
                    'etf': final['etf'],
                }

            # --- Strategic plan summary ---
            plan = [
                {'phase': 1, 'nom': 'ACCUMULATION', 'periode': 'Mois 1-3',
                 'action': 'Réinvestir 100% des gains', 'objectif': 'Atteindre 50K€ de capital trading'},
                {'phase': 2, 'nom': 'SÉCURISATION', 'periode': 'Mois 4-12',
                 'action': 'Retirer 30% → épargne sécurisée (3.5%/an)',
                 'objectif': 'Matelas de sécurité 30-50K€'},
                {'phase': 3, 'nom': 'DIVERSIFICATION', 'periode': 'Année 2-3',
                 'action': 'Retirer 50% → 60% ETF + 40% épargne',
                 'objectif': 'Patrimoine hors crypto > capital trading'},
                {'phase': 4, 'nom': 'RENTE', 'periode': 'Année 4-5',
                 'action': 'Retirer 60% → 70% ETF + 30% épargne',
                 'objectif': 'Revenus passifs autonomes'},
            ]

            # Current phase detection
            current_phase = 1
            if days_active > 90 and capital_actuel > 50000:
                current_phase = 2
            if days_active > 365:
                current_phase = 3
            if days_active > 3 * 365:
                current_phase = 4

            # --- Daily breakdown ---
            from collections import defaultdict
            daily_pnl = defaultdict(float)
            daily_count = defaultdict(int)
            for t in trades:
                day_key = t['_exit_dt'].strftime('%Y-%m-%d')
                daily_pnl[day_key] += (t.get('pnl_usdt', 0) or 0)
                daily_count[day_key] += 1

            daily_series = []
            for day in sorted(daily_pnl.keys()):
                fee_day = daily_count[day] * position_size * fee_rate
                net = daily_pnl[day] - fee_day
                daily_series.append({'date': day, 'pnl_brut': round(daily_pnl[day] * eur_rate, 2),
                                     'pnl_net': round((daily_pnl[day] - fee_day) * eur_rate, 2), 'trades': daily_count[day]})

            result = {
                'success': True,
                'performances': {
                    'days_active': round(days_active, 1),
                    'total_trades': total_trades,
                    'wins': wins,
                    'win_rate': win_rate,
                    'total_pnl_brut': total_pnl_brut_eur,
                    'total_fees': total_fees_eur,
                    'total_pnl_net': total_pnl_net_eur,
                    'daily_pnl_net': daily_pnl_net,
                    'daily_series': daily_series,
                },
                'capital': {
                    'initial': capital_initial,
                    'actuel': round(capital_actuel, 2),
                    'gain_pct': round((capital_actuel - capital_initial) / capital_initial * 100, 2),
                },
                'projections': projections,
                'plan_strategique': plan,
                'phase_actuelle': current_phase,
                'regles_securite': {
                    'cap_trading': CAP_TRADING,
                    'fee_rate': fee_rate,
                    'position_size': position_size,
                    'drawdown_max': '20% mensuel → PAUSE',
                },
                'timestamp': now.isoformat(),
            }

            _cache_set('gestion_projections', result)
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in gestion_projections: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    # ═══════════════════════════════════════════════════════════════════════
    # DEEP ANALYSIS — Corrélation Spy / Marché (résultats ML)
    # ═══════════════════════════════════════════════════════════════════════

    def handle_deep_spy_analysis(self, request_handler) -> None:
        """Serve deep spy/market correlation analysis results."""
        try:
            cached = _cache_get('deep_spy_analysis', ttl=3600.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            analysis_file = os.path.join(self.script_dir, 'deep_spy_market_analysis_results.json')
            if not os.path.exists(analysis_file):
                request_handler.send_json_response({
                    'success': False,
                    'error': 'Analyse non disponible. Lancez deep_spy_market_analysis.py'
                }, 404)
                return

            with open(analysis_file, 'r', encoding='utf-8') as f:
                raw = json.load(f)

            # Extract key data for the dashboard
            stat = raw.get('statistical_analysis', {})
            ml = raw.get('ml_analysis', {})
            adv = raw.get('advanced_analysis', {})
            meta = raw.get('metadata', {})
            conclusions = raw.get('conclusions', {})
            summary = raw.get('data_summary', {})

            # Top correlations (|r| > 0.3)
            top_corrs = []
            for k, v in list(stat.get('correlations', {}).items())[:30]:
                if abs(v.get('pearson_r', 0)) >= 0.3:
                    top_corrs.append({
                        'pair': k,
                        'r': v['pearson_r'],
                        'p': v['pearson_p'],
                        'significant': v.get('significant_1pct', False)
                    })

            # Granger causality
            granger = []
            for k, v in stat.get('granger_causality', {}).items():
                granger.append({
                    'pair': k,
                    'p_value': v['min_p_value'],
                    'lag': v['best_lag'],
                    'significant': v['significant']
                })

            # Regime analysis
            regimes = stat.get('regime_analysis', {})

            # ML performance
            ml_perf = {}
            for target, models in ml.items():
                if not isinstance(models, dict):
                    continue
                ml_perf[target] = {}
                for model_name, metrics in models.items():
                    if isinstance(metrics, dict) and 'r2_score' in metrics:
                        ml_perf[target][model_name] = {
                            'r2': metrics['r2_score'],
                            'mae': metrics.get('mae'),
                            'direction_accuracy': metrics.get('prediction_direction_accuracy'),
                            'top_features': metrics.get('feature_importance_top10', [])[:5]
                        }

            # Binary classification
            binary_cls = ml.get('binary_classification', {})

            # BTC trend conditions
            btc_conditions = adv.get('btc_trend_conditions', {})

            # Hourly analysis
            hourly = adv.get('hourly_analysis', {})

            # Surge types
            surge_types = adv.get('surge_type_analysis', {})

            # Rolling correlation
            rolling = stat.get('rolling_correlation_btc_vs_pnl', [])
            # Subsample to ~60 points for chart
            if len(rolling) > 60:
                step = len(rolling) // 60
                rolling = rolling[::step]

            # Day of week
            dow = stat.get('day_of_week', {})

            # Spy quality score
            quality_score = adv.get('spy_quality_score', {})

            result = {
                'success': True,
                'metadata': {
                    'analysis_date': meta.get('analysis_date', ''),
                    'symbols_analyzed': meta.get('symbols_analyzed', 0),
                    'total_surges': meta.get('total_surges_simulated', 0),
                    'data_days': meta.get('data_days', 0),
                    'date_range': meta.get('date_range', {}),
                    'computation_time': meta.get('computation_time_seconds', 0),
                },
                'conclusions': {
                    'validated': conclusions.get('hypothesis_validated', False),
                    'confidence': conclusions.get('confidence_level', 'LOW'),
                    'signals': conclusions.get('exploitable_signals', []),
                    'recommendations': conclusions.get('recommendations', []),
                },
                'top_correlations': top_corrs,
                'granger_causality': granger,
                'regime_analysis': regimes,
                'ml_performance': ml_perf,
                'binary_classification': binary_cls,
                'btc_conditions': btc_conditions,
                'hourly_analysis': hourly,
                'surge_types': surge_types,
                'rolling_correlation': rolling,
                'day_of_week': dow,
                'quality_score': quality_score,
                'surge_stats': summary.get('surge_stats', {}),
            }

            _cache_set('deep_spy_analysis', result)
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in deep_spy_analysis: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    # ═══════════════════════════════════════════════════════════════════════
    # TREND/RISK RATIO — Analyse corrélation spy/marché en temps réel
    # ═══════════════════════════════════════════════════════════════════════

    def handle_trend_risk_ratio(self, request_handler) -> None:
        """Compute live trend/risk ratio from spy trades + BTC benchmark."""
        try:
            cached = _cache_get('trend_risk_ratio', ttl=300.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            import numpy as np

            spy_file = os.path.join(self.script_dir, 'espion_history.json')
            if not os.path.exists(spy_file):
                request_handler.send_json_response({'success': False, 'error': 'Pas de données spy'}, 404)
                return

            with open(spy_file, 'r', encoding='utf-8') as f:
                all_trades = json.load(f)

            # --- Parse all trades with exit dates ---
            trades_by_day = {}
            symbol_counts = {}
            for t in all_trades:
                exit_str = t.get('exit_time', '')
                if not exit_str:
                    continue
                try:
                    exit_dt = datetime.fromisoformat(exit_str.replace('Z', '+00:00').replace('+00:00', ''))
                except Exception:
                    try:
                        exit_dt = datetime.strptime(exit_str[:19], '%Y-%m-%dT%H:%M:%S')
                    except Exception:
                        continue

                day = exit_dt.strftime('%Y-%m-%d')
                if day not in trades_by_day:
                    trades_by_day[day] = []
                trades_by_day[day].append(t)

                sym = t.get('symbol', '')
                symbol_counts[sym] = symbol_counts.get(sym, 0) + 1

            if len(trades_by_day) < 3:
                request_handler.send_json_response({
                    'success': True,
                    'message': 'Pas assez de données (< 3 jours)',
                    'ratio': None
                })
                return

            # --- Daily PnL series ---
            sorted_days = sorted(trades_by_day.keys())
            daily_pnl = []
            daily_trades_count = []
            daily_avg_pnl_pct = []
            for day in sorted_days:
                day_trades = trades_by_day[day]
                pnl = sum(t.get('pnl_usdt', 0) or 0 for t in day_trades)
                avg_pct = np.mean([t.get('pnl_pct', 0) or 0 for t in day_trades])
                daily_pnl.append(pnl)
                daily_trades_count.append(len(day_trades))
                daily_avg_pnl_pct.append(avg_pct)

            pnl_arr = np.array(daily_pnl)
            pct_arr = np.array(daily_avg_pnl_pct)

            # --- Fetch BTC daily close for same period ---
            btc_daily_returns = {}
            try:
                from binance.client import Client
                import config as bot_config
                client = Client(bot_config.BINANCE_API_KEY, bot_config.BINANCE_API_SECRET)
                first_day = datetime.strptime(sorted_days[0], '%Y-%m-%d')
                klines = client.get_klines(
                    symbol='BTCUSDT', interval='1d',
                    startTime=int((first_day - timedelta(days=1)).timestamp() * 1000),
                    limit=len(sorted_days) + 5
                )
                btc_closes = {}
                for k in klines:
                    dt = datetime.fromtimestamp(k[0] / 1000).strftime('%Y-%m-%d')
                    btc_closes[dt] = float(k[4])

                btc_dates = sorted(btc_closes.keys())
                for i in range(1, len(btc_dates)):
                    d = btc_dates[i]
                    prev = btc_dates[i - 1]
                    btc_daily_returns[d] = (btc_closes[d] - btc_closes[prev]) / btc_closes[prev] * 100
            except Exception as e:
                logger.warning(f"Could not fetch BTC data for T/R: {e}")

            # --- Compute rolling T/R ratio (7-day) ---
            window = min(7, len(pnl_arr))
            rolling_mean = []
            rolling_vol = []
            rolling_tr = []
            for i in range(window - 1, len(pnl_arr)):
                chunk = pct_arr[i - window + 1:i + 1]
                m = float(np.mean(chunk))
                v = float(np.std(chunk))
                rolling_mean.append(m)
                rolling_vol.append(v)
                rolling_tr.append(m / v if v > 0.001 else 0)

            current_tr = rolling_tr[-1] if rolling_tr else 0
            avg_tr = float(np.mean(rolling_tr)) if rolling_tr else 0

            # Zone classification
            if current_tr > 0.5:
                zone = 'FORT_POSITIF'
                zone_label = 'Fort Positif'
                zone_color = '#10b981'
                zone_icon = '🟢🟢'
                recommendation = 'Positions normales — marché très favorable'
            elif current_tr > 0.1:
                zone = 'POSITIF'
                zone_label = 'Positif'
                zone_color = '#10b981'
                zone_icon = '🟢'
                recommendation = 'Positions normales — tendance favorable'
            elif current_tr > -0.1:
                zone = 'NEUTRE'
                zone_label = 'Neutre'
                zone_color = '#f59e0b'
                zone_icon = '🟡'
                recommendation = 'Prudence — réduire positions de 30%'
            elif current_tr > -0.5:
                zone = 'NEGATIF'
                zone_label = 'Négatif'
                zone_color = '#ef4444'
                zone_icon = '🔴'
                recommendation = 'Risque élevé — réduire positions de 50%'
            else:
                zone = 'FORT_NEGATIF'
                zone_label = 'Fort Négatif'
                zone_color = '#7f1d1d'
                zone_icon = '🔴🔴'
                recommendation = 'DANGER — envisager PAUSE trading'

            # --- Correlations with BTC ---
            btc_corr = None
            btc_series = []
            if btc_daily_returns:
                spy_vals = []
                btc_vals = []
                for i, day in enumerate(sorted_days):
                    if day in btc_daily_returns:
                        spy_vals.append(daily_avg_pnl_pct[i])
                        btc_vals.append(btc_daily_returns[day])
                        btc_series.append({'date': day, 'btc_return': round(btc_daily_returns[day], 3)})

                if len(spy_vals) >= 5:
                    btc_corr = round(float(np.corrcoef(spy_vals, btc_vals)[0, 1]), 4)

            # --- Top spy symbols stats ---
            top_symbols = sorted(symbol_counts.items(), key=lambda x: x[1], reverse=True)[:10]
            top_sym_stats = []
            for sym, cnt in top_symbols:
                sym_trades = [t for t in all_trades if t.get('symbol') == sym]
                pnl = sum(t.get('pnl_usdt', 0) or 0 for t in sym_trades)
                wins = sum(1 for t in sym_trades if (t.get('pnl_usdt', 0) or 0) > 0)
                wr = round(wins / cnt * 100, 1) if cnt else 0
                top_sym_stats.append({
                    'symbol': sym, 'trades': cnt,
                    'pnl': round(pnl, 2), 'win_rate': wr
                })

            # --- Build T/R history for chart ---
            tr_history = []
            for i in range(len(rolling_tr)):
                day_idx = window - 1 + i
                tr_history.append({
                    'date': sorted_days[day_idx],
                    'ratio': round(rolling_tr[i], 4),
                    'mean_pnl_pct': round(rolling_mean[i], 3),
                    'volatility': round(rolling_vol[i], 3),
                    'spy_pnl_usdt': round(daily_pnl[day_idx], 2),
                    'trades': daily_trades_count[day_idx],
                })

            # --- Contexte horaire (saisonnalité intraday, 10 jours analysés) ---
            # Sources: analyse backtestée du 03/04→11/04 sur 480 trades testnet
            _HQ = {
                3:  {'wr': 81, 'avg': 1.50, 'label': 'Excellent', 'color': '#10b981'},
                9:  {'wr': 71, 'avg': 2.17, 'label': 'Excellent', 'color': '#10b981'},
                13: {'wr': 75, 'avg': 1.66, 'label': 'Excellent', 'color': '#10b981'},
                15: {'wr': 71, 'avg': 2.05, 'label': 'Excellent', 'color': '#10b981'},
                23: {'wr': 60, 'avg': 1.95, 'label': 'Favorable', 'color': '#34d399'},
                4:  {'wr': 59, 'avg': 0.71, 'label': 'Favorable', 'color': '#34d399'},
                5:  {'wr': 61, 'avg': 1.23, 'label': 'Favorable', 'color': '#34d399'},
                6:  {'wr': 56, 'avg': 0.66, 'label': 'Favorable', 'color': '#34d399'},
                7:  {'wr': 54, 'avg': 0.84, 'label': 'Favorable', 'color': '#34d399'},
                8:  {'wr': 62, 'avg': 1.00, 'label': 'Favorable', 'color': '#34d399'},
                14: {'wr': 50, 'avg': 0.86, 'label': 'Neutre',    'color': '#f59e0b'},
                16: {'wr': 56, 'avg': 0.97, 'label': 'Neutre',    'color': '#f59e0b'},
                17: {'wr': 63, 'avg': 1.16, 'label': 'Favorable', 'color': '#34d399'},
                18: {'wr': 53, 'avg': 0.78, 'label': 'Neutre',    'color': '#f59e0b'},
                19: {'wr': 57, 'avg': 0.60, 'label': 'Neutre',    'color': '#f59e0b'},
                20: {'wr': 62, 'avg': 0.73, 'label': 'Neutre',    'color': '#f59e0b'},
                21: {'wr': 57, 'avg': 0.74, 'label': 'Neutre',    'color': '#f59e0b'},
                22: {'wr': 62, 'avg': 0.30, 'label': 'Neutre',    'color': '#f59e0b'},
                10: {'wr': 38, 'avg': 0.79, 'label': 'Creux',     'color': '#ef4444'},
                11: {'wr': 42, 'avg': 0.36, 'label': 'Creux',     'color': '#ef4444'},
                12: {'wr': 44, 'avg': 0.35, 'label': 'Creux',     'color': '#ef4444'},
                0:  {'wr': 57, 'avg': 0.75, 'label': 'Favorable', 'color': '#34d399'},
                1:  {'wr': 47, 'avg': 0.60, 'label': 'Neutre',    'color': '#f59e0b'},
                2:  {'wr': 56, 'avg': 0.37, 'label': 'Neutre',    'color': '#f59e0b'},
            }
            _cur_h = datetime.utcnow().hour
            _hq = _HQ.get(_cur_h, {'wr': 55, 'avg': 0.75, 'label': 'Neutre', 'color': '#f59e0b'})
            hour_context = {
                'hour_utc': _cur_h,
                'hour_paris': (_cur_h + 2) % 24,
                'wr_pct': _hq['wr'],
                'avg_pnl_pct': _hq['avg'],
                'label': _hq['label'],
                'color': _hq['color'],
            }

            # --- Diversité alts (breadth) : nb de coins distincts par jour, rolling 7j ---
            _r_days = sorted_days[-7:]
            _b_vals = [len(set(t.get('symbol', '') for t in trades_by_day[d])) for d in _r_days]
            alt_breadth_avg7 = round(float(np.mean(_b_vals)), 1) if _b_vals else 0
            _today_key = datetime.utcnow().strftime('%Y-%m-%d')
            alt_breadth_today = len(set(t.get('symbol', '') for t in trades_by_day.get(_today_key, [])))
            # Seuils issus de l'analyse: ≥25 = marché distribué (actif), <15 = concentré (calme)
            if alt_breadth_avg7 >= 25:
                _b_sig, _b_color, _b_label = 'élevée', '#10b981', 'Marché distribué'
            elif alt_breadth_avg7 >= 15:
                _b_sig, _b_color, _b_label = 'modérée', '#f59e0b', 'Activité modérée'
            else:
                _b_sig, _b_color, _b_label = 'faible', '#ef4444', 'Marché concentré'
            alt_breadth = {
                'avg_7d': alt_breadth_avg7,
                'today': alt_breadth_today,
                'signal': _b_sig,
                'label': _b_label,
                'color': _b_color,
            }

            # --- Régime BTC (dernier jour disponible) ---
            _btc_chg = None
            if btc_daily_returns:
                _btc_chg = btc_daily_returns.get(sorted(btc_daily_returns.keys())[-1])
            if _btc_chg is None:
                _btc_regime = {'change': None, 'label': 'Inconnu', 'color': '#6b7280', 'spy_hint': '—'}
            elif _btc_chg >= 2.5:
                # Forte hausse BTC → alts se corrèlent → moins de micro-pumps indépendants
                _btc_regime = {'change': round(_btc_chg, 2), 'label': 'Fort ▲', 'color': '#f59e0b',
                               'spy_hint': '⚠ Alts corrélés'}
            elif _btc_chg >= 1.0:
                _btc_regime = {'change': round(_btc_chg, 2), 'label': 'Modéré ▲', 'color': '#34d399',
                               'spy_hint': 'Rotation probable'}
            elif _btc_chg > -1.0:
                # BTC plat → alts micro-spikent indépendamment → maximum spy activity
                _btc_regime = {'change': round(_btc_chg, 2), 'label': 'Plat ↔', 'color': '#10b981',
                               'spy_hint': '↑ Alts indépendants'}
            elif _btc_chg > -2.5:
                _btc_regime = {'change': round(_btc_chg, 2), 'label': 'Recul ▼', 'color': '#f59e0b',
                               'spy_hint': 'Rotation sélective'}
            else:
                _btc_regime = {'change': round(_btc_chg, 2), 'label': 'Chute ▼▼', 'color': '#ef4444',
                               'spy_hint': '⚠ Risk-off'}

            # Enrichir la recommandation avec le contexte horaire
            if _hq['label'] == 'Creux' and current_tr > 0.5:
                recommendation = f"Positions normales — créneau {_cur_h:02d}h UTC historiquement creux (WR {_hq['wr']}%)"
            elif _hq['label'] == 'Excellent' and current_tr > 0.1:
                recommendation = recommendation + f" | Créneau favorable (WR hist. {_hq['wr']}%)"

            result = {
                'success': True,
                'current': {
                    'ratio': round(current_tr, 4),
                    'zone': zone,
                    'zone_label': zone_label,
                    'zone_color': zone_color,
                    'zone_icon': zone_icon,
                    'recommendation': recommendation,
                },
                'stats': {
                    'avg_ratio': round(avg_tr, 4),
                    'days_analyzed': len(sorted_days),
                    'total_trades': len(all_trades),
                    'pct_days_positive': round(float((pnl_arr > 0).mean()) * 100, 1),
                    'btc_correlation': btc_corr,
                },
                'thresholds': {
                    'fort_positif': 0.5,
                    'positif': 0.1,
                    'neutre': -0.1,
                    'negatif': -0.5,
                },
                'tr_history': tr_history,
                'btc_series': btc_series[-30:],
                'top_symbols': top_sym_stats,
                'hour_context': hour_context,
                'alt_breadth': alt_breadth,
                'btc_regime': _btc_regime,
                'timestamp': datetime.now().isoformat(),
            }

            _cache_set('trend_risk_ratio', result)
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in trend_risk_ratio: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_market_movers(self, request_handler) -> None:
        """GET /api/market-movers — Top gainers/losers Binance (tous tickers USDC, temps réel)"""
        try:
            import urllib.request as ureq
            # Appel API Binance publique — pas d'auth nécessaire
            url = 'https://api.binance.com/api/v3/ticker/24hr'
            req = ureq.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
            with ureq.urlopen(req, timeout=10) as resp:
                all_tickers = json.loads(resp.read().decode())

            # Filtrer paires USDC (et USDT pour BTC/ETH si voulu)
            usdc = []
            for t in all_tickers:
                sym = t.get('symbol', '')
                if not sym.endswith('USDC'):
                    continue
                change = float(t.get('priceChangePercent', 0))
                price = float(t.get('lastPrice', 0))
                volume = float(t.get('quoteVolume', 0))
                if price <= 0:
                    continue
                usdc.append({
                    'symbol': sym,
                    'price': round(price, 8),
                    'change_24h': round(change, 2),
                    'volume_24h': round(volume, 0),
                    'high_24h': round(float(t.get('highPrice', 0)), 8),
                    'low_24h': round(float(t.get('lowPrice', 0)), 8),
                    'trades_count': int(t.get('count', 0)),
                })

            # Trier par variation desc
            usdc.sort(key=lambda x: x['change_24h'], reverse=True)

            request_handler.send_json_response({
                'success': True,
                'count': len(usdc),
                'top_gainers': usdc[:50],
                'top_losers': list(reversed(usdc))[:20],
                'all': usdc,
            })
        except Exception as e:
            logger.error(f"Error in market_movers: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    # ═══════════════════════════════════════════════════════════════════════
    # SPY PROD — Endpoints dédiés au spy production
    # ═══════════════════════════════════════════════════════════════════════

    PROD_DIR = '/home/ubuntu/crypto_trading_prod'

    def handle_spy_prod_status(self, request_handler) -> None:
        """GET /api/spy-prod-status — Statut temps réel du spy prod"""
        try:
            cached = _cache_get('spy_prod_status', ttl=8.0)
            if cached is not None:
                request_handler.send_json_response(cached)
                return

            import psutil
            # Chercher d'abord à la racine (chemin réel), puis dans data/ (ancien emplacement)
            status_file = os.path.join(self.PROD_DIR, 'spy_status.json')
            if not os.path.exists(status_file):
                status_file = os.path.join(self.PROD_DIR, 'data', 'spy_status.json')

            if not os.path.exists(status_file):
                request_handler.send_json_response({
                    'success': True, 'running': False,
                    'phase': 'NOT_STARTED', 'message': 'Spy Prod non démarré'
                })
                return

            with open(status_file, 'r', encoding='utf-8') as f:
                status = json.load(f)

            pid = status.get('pid')
            process_alive = False
            if pid:
                try:
                    proc = psutil.Process(pid)
                    cmdline = ' '.join(proc.cmdline())
                    process_alive = proc.is_running() and 'market_spy' in cmdline
                except (psutil.NoSuchProcess, psutil.AccessDenied):
                    pass

            ts = status.get('timestamp', '')
            stale = False
            if ts:
                try:
                    last_update = datetime.fromisoformat(ts)
                    now = datetime.now(last_update.tzinfo) if last_update.tzinfo else datetime.now()
                    age = (now - last_update).total_seconds()
                    stale = age > 60
                    status['status_age_seconds'] = round(age, 1)
                except Exception:
                    pass

            if not process_alive or stale:
                status['running'] = False
                status['phase'] = 'OFFLINE'

            status['success'] = True
            status['process_alive'] = process_alive
            _cache_set('spy_prod_status', status)
            request_handler.send_json_response(status)

        except Exception as e:
            logger.error(f"Error in spy prod status: {e}", exc_info=True)
            request_handler.send_json_response({
                'success': True, 'running': False,
                'phase': 'ERROR', 'error': str(e)
            })

    def handle_spy_prod_data(self, request_handler) -> None:
        """GET /api/spy-prod-data — Positions, historique, opportunités du spy prod"""
        try:
            cached = _cache_get('spy_prod_data', ttl=5.0)
            if cached:
                request_handler.send_json_response(cached)
                return

            data_dir = os.path.join(self.PROD_DIR, 'data')
            spy_trades_file = os.path.join(data_dir, 'espion_trades.json')
            spy_opportunities_file = os.path.join(data_dir, 'espion_opportunities.json')
            spy_history_file = os.path.join(data_dir, 'espion_history.json')
            spy_log_file = os.path.join(self.PROD_DIR, 'logs', 'market_spy_prod.log')

            # 1. Positions actives
            spy_trades = {}
            if os.path.exists(spy_trades_file):
                try:
                    with open(spy_trades_file, 'r', encoding='utf-8') as f:
                        spy_trades = json.load(f)
                except Exception:
                    pass

            # 2. Opportunités
            opportunities = []
            if os.path.exists(spy_opportunities_file):
                try:
                    with open(spy_opportunities_file, 'r', encoding='utf-8') as f:
                        opportunities = json.load(f)
                except Exception:
                    pass

            # 3. Historique trades
            spy_history = []
            if os.path.exists(spy_history_file):
                try:
                    with open(spy_history_file, 'r', encoding='utf-8') as f:
                        spy_history = json.load(f)
                except Exception:
                    pass

            # 4. Log
            log_lines = []
            if os.path.exists(spy_log_file):
                try:
                    with open(spy_log_file, 'r', encoding='utf-8') as f:
                        all_lines = f.readlines()
                        log_lines = [l.strip() for l in all_lines[-80:] if l.strip()]
                except Exception:
                    pass

            # 5. Enrichir positions avec prix live
            spy_positions_enriched = []
            spy_current_prices = {}
            if spy_trades:
                try:
                    spy_symbols = list(spy_trades.keys())
                    for sym in spy_symbols:
                        try:
                            r = requests.get("https://api.binance.com/api/v3/ticker/price",
                                            params={"symbol": sym}, timeout=3)
                            if r.status_code == 200:
                                spy_current_prices[sym] = float(r.json().get('price', 0))
                        except Exception:
                            pass
                except Exception:
                    pass

            for symbol, trade in spy_trades.items():
                entry = trade.get('entry_price', 0)
                current_price = spy_current_prices.get(symbol, 0)
                if current_price <= 0:
                    current_price = trade.get('max_price', entry)
                pnl_pct = ((current_price - entry) / entry * 100) if entry > 0 else 0
                qty = trade.get('quantity', 0)
                pnl_usdt = (current_price - entry) * qty if entry > 0 else 0
                spy_positions_enriched.append({
                    'symbol': symbol,
                    'entry_price': entry,
                    'quantity': qty,
                    'current_price': current_price,
                    'pnl_pct': round(pnl_pct, 2),
                    'pnl_usdt': round(pnl_usdt, 4),
                    'stop_loss': trade.get('stop_loss', 0),
                    'take_profit': trade.get('take_profit', 0),
                    'pattern': trade.get('surge_type', trade.get('pattern', 'UNKNOWN')),
                    'score': trade.get('surge_strength', trade.get('score', 0)) * 20 if trade.get('surge_strength') else trade.get('score', 0),
                    'timestamp': trade.get('timestamp', ''),
                    'max_pnl': trade.get('max_pnl', 0),
                    'indicators': trade.get('indicators', {})
                })

            # 6. Balance USDC prod (via Binance API) — le spy prod trade en USDC
            balance_usdt = None
            try:
                import hmac, hashlib, time as _t
                prod_config_file = os.path.join(self.PROD_DIR, 'config.py')
                cfg = {}
                with open(prod_config_file, 'r', encoding='utf-8') as f:
                    for line in f:
                        if line.startswith('BINANCE_API_KEY') and '=' in line:
                            cfg['key'] = line.split('"')[1]
                        elif line.startswith('BINANCE_API_SECRET') and '=' in line:
                            cfg['secret'] = line.split('"')[1]
                if cfg.get('key') and cfg.get('secret'):
                    ts = int(_t.time() * 1000)
                    qs = f"timestamp={ts}&recvWindow=5000"
                    sig = hmac.new(cfg['secret'].encode(), qs.encode(), hashlib.sha256).hexdigest()
                    r = requests.get(f"https://api.binance.com/api/v3/account?{qs}&signature={sig}",
                                    headers={'X-MBX-APIKEY': cfg['key']}, timeout=5)
                    if r.status_code == 200:
                        for b in r.json().get('balances', []):
                            if b['asset'] == 'USDC':
                                balance_usdt = round(float(b['free']) + float(b['locked']), 2)
                                break
            except Exception as e:
                logger.warning(f"Prod balance fetch failed: {e}")

            # Stats
            executed_opps = [o for o in opportunities if o.get('executed')]
            patterns_count = {}
            for o in opportunities:
                p = o.get('pattern', 'UNKNOWN')
                patterns_count[p] = patterns_count.get(p, 0) + 1

            recent_opps = opportunities[-30:] if opportunities else []
            recent_opps.reverse()

            history_stats = {}
            if spy_history:
                wins = sum(1 for t in spy_history if t.get('pnl_pct', 0) > 0)
                total_pnl = sum(t.get('pnl_usdt', 0) for t in spy_history)
                avg_pnl = sum(t.get('pnl_pct', 0) for t in spy_history) / len(spy_history)
                avg_hold = sum(t.get('hold_minutes', 0) for t in spy_history) / len(spy_history)
                # W/L breakdown par pattern
                by_pattern = {}
                for t in spy_history:
                    pat = t.get('surge_type', t.get('pattern', 'UNKNOWN'))
                    p = t.get('pnl_usdt', 0)
                    if pat not in by_pattern:
                        by_pattern[pat] = {'wins': 0, 'losses': 0, 'pnl': 0.0}
                    if p > 0:
                        by_pattern[pat]['wins'] += 1
                    else:
                        by_pattern[pat]['losses'] += 1
                    by_pattern[pat]['pnl'] += p
                wl_by_pattern = []
                for pat, s in sorted(by_pattern.items()):
                    tot = s['wins'] + s['losses']
                    wl_by_pattern.append({
                        'pattern': pat,
                        'wins': s['wins'],
                        'losses': s['losses'],
                        'total': tot,
                        'win_rate': round(s['wins'] / tot * 100, 1) if tot else 0,
                        'pnl': round(s['pnl'], 3),
                    })
                # Avg win / avg loss ratio
                win_pnls = [t.get('pnl_usdt', 0) for t in spy_history if t.get('pnl_usdt', 0) > 0]
                loss_pnls = [t.get('pnl_usdt', 0) for t in spy_history if t.get('pnl_usdt', 0) <= 0]
                avg_win = round(sum(win_pnls) / len(win_pnls), 4) if win_pnls else 0
                avg_loss = round(sum(loss_pnls) / len(loss_pnls), 4) if loss_pnls else 0
                payoff_ratio = round(abs(avg_win / avg_loss), 2) if avg_loss != 0 else 0
                history_stats = {
                    'total_trades': len(spy_history),
                    'wins': wins,
                    'losses': len(spy_history) - wins,
                    'win_rate': round(wins / len(spy_history) * 100, 1),
                    'total_pnl_usdt': round(total_pnl, 4),
                    'avg_pnl_pct': round(avg_pnl, 2),
                    'avg_hold_min': round(avg_hold, 1),
                    'avg_win_usdt': avg_win,
                    'avg_loss_usdt': avg_loss,
                    'payoff_ratio': payoff_ratio,
                    'wl_by_pattern': wl_by_pattern,
                }

            display_history = spy_history[-200:] if spy_history else []
            recent_history = display_history[-20:]
            recent_history.reverse()

            result = {
                'success': True,
                'spy_positions': spy_positions_enriched,
                'spy_positions_count': len(spy_trades),
                'balance_usdt': balance_usdt,
                'recent_opportunities': recent_opps,
                'total_opportunities': len(opportunities),
                'total_executed': len(executed_opps),
                'patterns_distribution': patterns_count,
                'spy_history': recent_history,
                'all_history': display_history,
                'history_stats': history_stats,
                'log_lines': log_lines,
                'timestamp': datetime.now().isoformat()
            }
            _cache_set('spy_prod_data', result)
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in spy prod data: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_spy_compare(self, request_handler) -> None:
        """GET /api/maintenance/spy-compare — Compare SPY testnet vs prod (état + surges + perf)"""
        try:
            import re
            import time as _t
            from collections import defaultdict

            TESTNET_DIR = self.script_dir
            PROD_DIR    = os.path.join(os.path.dirname(self.script_dir), 'crypto_trading_prod')
            TESTNET_LOG = os.path.join(TESTNET_DIR, 'market_spy_daemon.log')
            PROD_LOG    = os.path.join(PROD_DIR, 'logs', 'market_spy_prod.log')

            # ── helpers ────────────────────────────────────────────────────────
            def _load_json(path):
                try:
                    with open(path, 'r', encoding='utf-8', errors='replace') as f:
                        return json.load(f)
                except Exception:
                    return None

            def _parse_sells(log_path, max_lines=50000):
                trades = []
                try:
                    with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
                        lines = f.readlines()[-max_lines:]
                except FileNotFoundError:
                    return trades
                RE = re.compile(r'(✅|❌) VENDU (\S+) @ .* PnL: ([+-]?[\d.]+)%.*Hold: ([\d.]+)min')
                for line in lines:
                    m = RE.search(line)
                    if m:
                        trades.append({
                            'ok': m.group(1) == '✅',
                            'coin': m.group(2),
                            'pnl': float(m.group(3)),
                            'hold': float(m.group(4)),
                        })
                return trades

            def _parse_surges(log_path, max_lines=5000):
                surges = {}
                RE_S = re.compile(r'⚡ SURGE: (\w+USDT|\w+USDC).*?\+([\d.]+)%')
                RE_R = re.compile(r'❌ Rejeté: (.+)')
                RE_TS = re.compile(r'^(\d{2}:\d{2}:\d{2})')
                try:
                    with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
                        lines = f.readlines()[-max_lines:]
                except FileNotFoundError:
                    return surges
                last_ts = '00:00:00'
                for i, line in enumerate(lines):
                    m_ts = RE_TS.match(line.strip())
                    if m_ts:
                        last_ts = m_ts.group(1)
                    m = RE_S.search(line)
                    if m:
                        coin, strength = m.group(1), m.group(2)
                        decision, reason = 'unknown', ''
                        for j in range(i+1, min(i+15, len(lines))):
                            l2 = lines[j].strip()
                            mr = RE_R.search(l2)
                            if mr:
                                decision, reason = 'rejected', mr.group(1)[:60]
                                break
                            if 'ACHAT' in l2 and coin in l2:
                                decision = 'bought'
                                break
                            if 'bloqué' in l2.lower() or 'SKIP' in l2:
                                decision = 'blocked'
                                break
                            if '⚡ SURGE' in l2:
                                break
                        surges[coin] = {'ts': last_ts, 'strength': strength,
                                         'decision': decision, 'reason': reason}
                return surges

            # ── 1. Régime ──────────────────────────────────────────────────────
            now = _t.time()
            regime = {}
            for label, base in [('testnet', TESTNET_DIR), ('prod', PROD_DIR)]:
                f = os.path.join(base, 'data', 'spy_regime_state.json')
                d = _load_json(f)
                if d:
                    age = now - d.get('last_update_ts', 0)
                    regime[label] = {
                        'regime': d.get('current_regime', '?'),
                        'saved_at': d.get('saved_at', '')[:19],
                        'age_s': round(age),
                    }
                else:
                    regime[label] = {'regime': 'inconnu', 'saved_at': '', 'age_s': -1}

            # ── 2. Coins bloqués ────────────────────────────────────────────────
            blocked = {}
            for label, base in [('testnet', TESTNET_DIR), ('prod', PROD_DIR)]:
                scores_path = os.path.join(base, 'spy_coin_scores.json')
                if not os.path.exists(scores_path):
                    scores_path = os.path.join(base, 'data', 'spy_coin_scores.json')
                d = _load_json(scores_path) or {}
                bl7  = sorted([s for s,v in d.items() if v.get('consec_losses',0) >= 7])
                near = sorted([s for s,v in d.items() if 3 <= v.get('consec_losses',0) < 7])
                blocked[label] = {'blocked': bl7, 'near': near, 'total_coins': len(d)}

            # ── 3. Time-blocks ────────────────────────────────────────────────
            timeblocks = {}
            for label, base in [('testnet', TESTNET_DIR), ('prod', PROD_DIR)]:
                f = os.path.join(base, 'spy_loss_state.json')
                if not os.path.exists(f):
                    f = os.path.join(base, 'data', 'spy_loss_state.json')
                d = _load_json(f) or {}
                active = []
                for s, v in d.items():
                    bu = v.get('blocked_until', 0)
                    if bu > now:
                        active.append({
                            'coin': s,
                            'until': datetime.fromtimestamp(bu).strftime('%H:%M:%S'),
                            'left_min': round((bu - now) / 60),
                        })
                active.sort(key=lambda x: x['left_min'])
                timeblocks[label] = active

            # ── 4. Scores divergents ────────────────────────────────────────────
            t_path = os.path.join(TESTNET_DIR, 'spy_coin_scores.json')
            p_path = os.path.join(PROD_DIR, 'data', 'spy_coin_scores.json')
            t_scores = _load_json(t_path) or {}
            p_scores = _load_json(p_path) or {}
            common = set(t_scores) & set(p_scores)
            diverged = []
            for s in sorted(common):
                tv, pv = t_scores[s], p_scores[s]
                t_cl = tv.get('consec_losses', 0)
                p_cl = pv.get('consec_losses', 0)
                if abs(t_cl - p_cl) >= 2:
                    diverged.append({
                        'coin': s,
                        'testnet': {'cl': t_cl, 'w': tv.get('wins',0), 'l': tv.get('losses',0)},
                        'prod':    {'cl': p_cl, 'w': pv.get('wins',0), 'l': pv.get('losses',0)},
                    })

            # ── 5. Surges comparés ──────────────────────────────────────────────
            t_surges = _parse_surges(TESTNET_LOG, 5000)
            p_surges = _parse_surges(PROD_LOG, 5000)
            all_surge_coins = sorted(set(t_surges) | set(p_surges))
            surge_comparison = []
            for coin in all_surge_coins:
                t = t_surges.get(coin)
                p = p_surges.get(coin)
                if t and p and t['decision'] == p['decision']:
                    status = 'sync'
                elif t and p:
                    status = 'divergent'
                elif t:
                    status = 'testnet_only'
                else:
                    status = 'prod_only'
                surge_comparison.append({
                    'coin': coin,
                    'testnet': t,
                    'prod': p,
                    'status': status,
                })

            # ── 6. Performances ─────────────────────────────────────────────────
            perf = {}
            for label, log_path in [('testnet', TESTNET_LOG), ('prod', PROD_LOG)]:
                trades = _parse_sells(log_path)
                if not trades:
                    perf[label] = {'trades': 0}
                    continue
                n = len(trades)
                wins   = [t for t in trades if t['ok']]
                losses = [t for t in trades if not t['ok']]
                avg_w  = sum(t['pnl'] for t in wins)   / len(wins)   if wins   else 0
                avg_l  = sum(t['pnl'] for t in losses) / len(losses) if losses else 0
                pf     = round(abs(avg_w / avg_l), 2) if avg_l != 0 else 999
                avg_h  = sum(t['hold'] for t in trades) / n
                # par coin
                coin_pnl = defaultdict(list)
                for t in trades:
                    coin_pnl[t['coin']].append(t['pnl'])
                coin_avg = {c: round(sum(v)/len(v), 2) for c, v in coin_pnl.items() if len(v) >= 2}
                best  = sorted(coin_avg.items(), key=lambda x: -x[1])[:3]
                worst = sorted(coin_avg.items(), key=lambda x:  x[1])[:3]
                perf[label] = {
                    'trades':    n,
                    'wins':      len(wins),
                    'losses':    len(losses),
                    'win_rate':  round(len(wins)/n*100, 1),
                    'avg_win':   round(avg_w, 2),
                    'avg_loss':  round(avg_l, 2),
                    'profit_factor': pf,
                    'avg_hold':  round(avg_h, 1),
                    'best_coins':  [{'coin': c, 'avg': v} for c, v in best],
                    'worst_coins': [{'coin': c, 'avg': v} for c, v in worst],
                }

            result = {
                'success': True,
                'regime':          regime,
                'blocked':         blocked,
                'timeblocks':      timeblocks,
                'score_diverged':  diverged,
                'surge_comparison': surge_comparison,
                'perf':            perf,
                'timestamp':       datetime.now().isoformat(),
            }
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in spy_compare: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_spy_performance(self, request_handler) -> None:
        """Analyse performance du spy par jour: volume, WR, PnL moyen, raisons de rejet."""
        try:
            import json as _json
            from datetime import datetime, timedelta, timezone
            from collections import defaultdict

            history_path = os.path.join(self.script_dir, 'espion_history.json')
            opps_path    = os.path.join(self.script_dir, 'espion_opportunities.json')

            if not os.path.exists(history_path):
                request_handler.send_json_response({'success': False, 'error': 'espion_history.json introuvable'}, 404)
                return

            with open(history_path, 'r') as f:
                history = _json.load(f)

            now = datetime.now(timezone.utc)
            cutoff = now - timedelta(days=14)

            # --- Stats par jour depuis l'historique ---
            by_day = defaultdict(lambda: {'trades': 0, 'wins': 0, 'pnl_sum': 0.0, 'pnl_pct_sum': 0.0})
            for t in history:
                ts_raw = t.get('exit_time') or t.get('entry_time') or t.get('timestamp', '')
                try:
                    ts = datetime.fromisoformat(ts_raw)
                    if ts.tzinfo is None:
                        ts = ts.replace(tzinfo=timezone.utc)
                    if ts < cutoff:
                        continue
                    day = ts.strftime('%Y-%m-%d')
                    by_day[day]['trades'] += 1
                    pnl_pct = float(t.get('pnl_pct', t.get('profit_pct', 0)) or 0)
                    pnl_usd = float(t.get('pnl', t.get('profit', 0)) or 0)
                    by_day[day]['pnl_sum']     += pnl_usd
                    by_day[day]['pnl_pct_sum'] += pnl_pct
                    if pnl_pct > 0 or pnl_usd > 0:
                        by_day[day]['wins'] += 1
                except Exception:
                    continue

            daily_stats = []
            for day in sorted(by_day.keys()):
                d = by_day[day]
                n = d['trades']
                wr = round(d['wins'] / n * 100, 1) if n else 0
                avg_pnl = round(d['pnl_pct_sum'] / n, 3) if n else 0
                daily_stats.append({
                    'date':         day,
                    'trades':       n,
                    'wins':         d['wins'],
                    'win_rate':     wr,
                    'avg_pnl_pct':  avg_pnl,
                    'total_pnl':    round(d['pnl_sum'], 2),
                })

            # --- Données opportunités (surges détectés / convertis / rejetés) ---
            opp_days = defaultdict(lambda: {'total': 0, 'executed': 0, 'patterns': defaultdict(int), 'reject_reasons': defaultdict(int)})
            if os.path.exists(opps_path):
                with open(opps_path, 'r') as f:
                    opps = _json.load(f)
                for o in opps:
                    ts_raw = o.get('timestamp', '')
                    try:
                        ts = datetime.fromisoformat(ts_raw)
                        if ts.tzinfo is None:
                            ts = ts.replace(tzinfo=timezone.utc)
                        if ts < cutoff:
                            continue
                        day = ts.strftime('%Y-%m-%d')
                        opp_days[day]['total'] += 1
                        if o.get('executed'):
                            opp_days[day]['executed'] += 1
                        pat = o.get('pattern', '?')
                        opp_days[day]['patterns'][pat] += 1
                        if not o.get('executed'):
                            reason = str(o.get('reason', ''))[:60]
                            opp_days[day]['reject_reasons'][reason] += 1
                    except Exception:
                        continue

            opp_stats = []
            for day in sorted(opp_days.keys()):
                d = opp_days[day]
                total = d['total']
                conv  = round(d['executed'] / total * 100, 1) if total else 0
                top_patterns = sorted(d['patterns'].items(), key=lambda x: -x[1])[:4]
                top_rejects  = sorted(d['reject_reasons'].items(), key=lambda x: -x[1])[:5]
                opp_stats.append({
                    'date':           day,
                    'surges':         total,
                    'executed':       d['executed'],
                    'conv_rate':      conv,
                    'top_patterns':   [{'pattern': p, 'count': n} for p, n in top_patterns],
                    'top_rejects':    [{'reason': r, 'count': n} for r, n in top_rejects],
                })

            # --- Comparaison periode ---
            last_3  = [d for d in daily_stats if d['date'] >= (now - timedelta(days=3)).strftime('%Y-%m-%d')]
            prev_7  = [d for d in daily_stats if (now - timedelta(days=10)).strftime('%Y-%m-%d') <= d['date'] < (now - timedelta(days=3)).strftime('%Y-%m-%d')]

            def period_summary(days_list):
                if not days_list: return {}
                n = sum(d['trades'] for d in days_list)
                w = sum(d['wins'] for d in days_list)
                avg = round(sum(d['avg_pnl_pct'] * d['trades'] for d in days_list) / n, 3) if n else 0
                return {'trades': n, 'win_rate': round(w/n*100,1) if n else 0, 'avg_pnl_pct': avg}

            result = {
                'success': True,
                'daily_stats':   daily_stats,
                'opp_stats':     opp_stats,
                'period_last3':  period_summary(last_3),
                'period_prev7':  period_summary(prev_7),
                'timestamp':     now.isoformat(),
            }
            request_handler.send_json_response(result)

        except Exception as e:
            logger.error(f"Error in spy_performance: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    # ─────────────────────────────────────────────────────────────────────────────
    # SPY OPTIMIZER PIPELINE — Lancer analyse GPU + Appliquer modifications
    # ─────────────────────────────────────────────────────────────────────────────

    def handle_spy_pipeline_status(self, request_handler) -> None:
        """GET /api/spy-pipeline-status — Statut du pipeline optimizer en cours"""
        try:
            import time as _time
            spy_opt_dir = os.path.join(self.script_dir, 'spy_optimizer')
            status_file = os.path.join(spy_opt_dir, 'pipeline_status.json')
            log_file    = os.path.join(spy_opt_dir, 'pipeline_run.log')

            status = {'status': 'idle', 'step': '', 'log': [], 'started_at': None, 'completed_at': None, 'pid': None, 'error': None}

            if os.path.exists(status_file):
                try:
                    with open(status_file, 'r', encoding='utf-8') as f:
                        status = json.load(f)
                except (json.JSONDecodeError, ValueError):
                    # Fichier tronqué/corrompu → traiter comme idle
                    status['status'] = 'idle'

            # Si running, vérifier si le process est encore vivant
            if status.get('status') == 'running' and status.get('pid'):
                try:
                    import signal as _sig
                    os.kill(status['pid'], 0)  # Signal 0 = check existence
                except (ProcessLookupError, PermissionError):
                    # Process mort — attendre 0.5s que le watcher thread puisse écrire le status final
                    import time as _time2
                    _time2.sleep(0.5)
                    # Re-lire le fichier : le watcher a peut-être déjà écrit success/error
                    if os.path.exists(status_file):
                        try:
                            with open(status_file, 'r', encoding='utf-8') as f:
                                refreshed = json.load(f)
                            if refreshed.get('completed_at') and refreshed.get('status') in ('success', 'error'):
                                status = refreshed  # le watcher a déjà mis à jour → faire confiance
                            else:
                                raise ValueError('not updated')
                        except Exception:
                            status['status'] = 'error'
                            status['error']  = 'Pipeline interrompu de manière inattendue'
                            with open(status_file, 'w', encoding='utf-8') as f:
                                json.dump(status, f)
                    else:
                        status['status'] = 'error'
                        status['error']  = 'Pipeline interrompu de manière inattendue'

            # Lire les dernières lignes du log
            if os.path.exists(log_file):
                with open(log_file, 'r', encoding='utf-8', errors='replace') as f:
                    lines = f.readlines()
                status['log'] = [l.rstrip() for l in lines[-40:]]
                # Parser le step_num courant depuis les marqueurs "ÉTAPE N:"
                if status.get('status') == 'running':
                    import re as _re
                    current_step = 0
                    for line in lines:
                        m = _re.search(r'ÉTAPE\s+(\d+)', line)
                        if m:
                            current_step = int(m.group(1))
                    if current_step > 0:
                        status['step_num'] = current_step
                        status['step'] = f'Étape {current_step}/6'
            else:
                status['log'] = []

            request_handler.send_json_response({'success': True, **status})

        except Exception as e:
            logger.error(f"Error in spy_pipeline_status: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_spy_run_pipeline(self, request_handler, data: Dict = None) -> None:
        """POST /api/spy-run-pipeline — Lance le pipeline complet (download + GPU + rapport)"""
        try:
            import subprocess
            import sys

            spy_opt_dir = os.path.join(self.script_dir, 'spy_optimizer')
            status_file = os.path.join(spy_opt_dir, 'pipeline_status.json')
            log_file    = os.path.join(spy_opt_dir, 'pipeline_run.log')
            pipeline_py = os.path.join(spy_opt_dir, 'full_pipeline.py')

            venv_python = os.path.join(self.script_dir, '.venv', 'bin', 'python3')
            python_exe  = venv_python if os.path.exists(venv_python) else sys.executable

            # Vérifier si déjà en cours
            if os.path.exists(status_file):
                try:
                    with open(status_file, 'r', encoding='utf-8') as f:
                        current = json.load(f)
                except (json.JSONDecodeError, ValueError):
                    current = {}
                if current.get('status') == 'running':
                    pid = current.get('pid')
                    try:
                        os.kill(pid, 0)
                        request_handler.send_json_response({
                            'success': False,
                            'error': f'Pipeline déjà en cours (PID {pid})',
                            'status': 'running'
                        })
                        return
                    except (ProcessLookupError, PermissionError):
                        pass  # Process mort, on peut relancer

            # Paramètres
            data = data or {}
            skip_download = data.get('skip_download', False)
            gpu_trials    = int(data.get('gpu_trials', 300))

            cmd = [python_exe, pipeline_py, f'--gpu-trials', str(gpu_trials)]
            if skip_download:
                cmd.append('--skip-download')

            now_str = datetime.now(timezone.utc).isoformat()

            # Vider le log précédent
            with open(log_file, 'w', encoding='utf-8') as lf:
                lf.write(f"[{now_str}] 🚀 Pipeline démarré (GPU trials={gpu_trials}, skip_download={skip_download})\n")

            # Écrire le status initial
            init_status = {
                'status': 'running',
                'step': 'Démarrage...',
                'step_num': 0,
                'steps_total': 6,
                'started_at': now_str,
                'completed_at': None,
                'pid': None,
                'error': None
            }
            with open(status_file, 'w', encoding='utf-8') as f:
                json.dump(init_status, f)

            # Lancer le process en arrière-plan
            with open(log_file, 'a', encoding='utf-8') as lf:
                proc = subprocess.Popen(
                    cmd,
                    stdout=lf,
                    stderr=subprocess.STDOUT,
                    cwd=spy_opt_dir,
                    env={**os.environ, 'PYTHONUNBUFFERED': '1'}
                )

            # Mettre à jour le PID dans le status
            init_status['pid'] = proc.pid
            with open(status_file, 'w', encoding='utf-8') as f:
                json.dump(init_status, f)

            # Lancer un watcher thread qui met à jour le status en fin
            import threading

            def _watch(proc, status_file, log_file):
                ret = proc.wait()
                try:
                    with open(status_file, 'r', encoding='utf-8') as f:
                        st = json.load(f)
                except (json.JSONDecodeError, ValueError, FileNotFoundError):
                    st = {'status': 'running', 'step': '', 'step_num': 0, 'steps_total': 6,
                          'started_at': None, 'completed_at': None, 'pid': proc.pid, 'error': None}
                st['completed_at'] = datetime.now(timezone.utc).isoformat()
                if ret == 0:
                    st['status'] = 'success'
                    st['step']   = '✅ Pipeline terminé avec succès'
                    # Copier le rapport si généré
                    report_src = os.path.join(os.path.dirname(status_file), 'report.html')
                    report_dst = os.path.join(os.path.dirname(os.path.dirname(status_file)), 'spy_report.html')
                    if os.path.exists(report_src):
                        import shutil
                        shutil.copy2(report_src, report_dst)
                        with open(log_file, 'a') as lf:
                            lf.write(f"\n✅ Rapport copié → spy_report.html\n")
                else:
                    st['status'] = 'error'
                    st['step']   = f'❌ Erreur (code {ret})'
                    st['error']  = f'Pipeline terminé avec code {ret}'
                with open(status_file, 'w', encoding='utf-8') as f:
                    json.dump(st, f)

            t = threading.Thread(target=_watch, args=(proc, status_file, log_file), daemon=True)
            t.start()

            logger.info(f"Pipeline spy lancé: PID {proc.pid}, trials={gpu_trials}")
            request_handler.send_json_response({
                'success': True,
                'pid': proc.pid,
                'message': f'Pipeline démarré (PID {proc.pid})',
                'gpu_trials': gpu_trials
            })

        except Exception as e:
            logger.error(f"Error in spy_run_pipeline: {e}", exc_info=True)
            # Mettre le status en erreur
            try:
                spy_opt_dir = os.path.join(self.script_dir, 'spy_optimizer')
                status_file = os.path.join(spy_opt_dir, 'pipeline_status.json')
                with open(status_file, 'w', encoding='utf-8') as f:
                    json.dump({'status': 'error', 'error': str(e), 'completed_at': datetime.now(timezone.utc).isoformat()}, f)
            except Exception:
                pass
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)

    def handle_spy_apply_optimizer(self, request_handler, data: Dict = None) -> None:
        """POST /api/spy-apply-optimizer — Applique les résultats optimizer au spy + redémarre"""
        try:
            import subprocess
            import sys
            import shutil

            spy_opt_dir     = os.path.join(self.script_dir, 'spy_optimizer')
            results_file    = os.path.join(spy_opt_dir, 'data', 'optimizer_results.json')
            classifier_pkl  = os.path.join(spy_opt_dir, 'models', 'signal_classifier.pkl')
            pid_file        = os.path.join(self.script_dir, 'spy_testnet.pid')

            if not os.path.exists(results_file):
                request_handler.send_json_response({'success': False, 'error': 'optimizer_results.json introuvable — lancez d\'abord le pipeline'}, 404)
                return

            with open(results_file, 'r', encoding='utf-8') as f:
                results = json.load(f)

            metrics   = results.get('metrics', {})
            threshold = metrics.get('optimal_threshold', results.get('optimal_threshold', 0.3))
            auc       = metrics.get('auc_roc', 0.0)
            wr        = metrics.get('win_rate_filtered', 0.0)
            trade_rate = metrics.get('trade_rate', 1.0)

            changes = []

            # 1. Mettre à jour le threshold dans le classifier pkl
            if os.path.exists(classifier_pkl):
                import pickle
                with open(classifier_pkl, 'rb') as f:
                    clf = pickle.load(f)
                old_threshold = getattr(clf, 'optimal_threshold', None)
                clf.optimal_threshold = threshold
                # Backup
                shutil.copy2(classifier_pkl, classifier_pkl + '.bak')
                with open(classifier_pkl, 'wb') as f:
                    pickle.dump(clf, f)
                changes.append(f'Threshold ML: {old_threshold:.3f if old_threshold else "N/A"} → {threshold:.3f}')
                logger.info(f"Classifier threshold mis à jour: {threshold:.3f}")

            # 2. Redémarrer le testnet spy
            spy_script = os.path.join(self.script_dir, 'market_spy.py')
            venv_python = os.path.join(self.script_dir, '.venv', 'bin', 'python3')
            python_exe  = venv_python if os.path.exists(venv_python) else sys.executable

            # Tuer l'ancien process
            killed_pid = None
            if os.path.exists(pid_file):
                try:
                    with open(pid_file, 'r') as f:
                        old_pid = int(f.read().strip())
                    os.kill(old_pid, 15)  # SIGTERM
                    import time as _t; _t.sleep(2)
                    try: os.kill(old_pid, 9)  # SIGKILL si encore vivant
                    except ProcessLookupError: pass
                    killed_pid = old_pid
                except (ValueError, ProcessLookupError, FileNotFoundError):
                    pass

            # Relancer
            log_file = os.path.join(self.script_dir, 'market_spy.log')
            with open(log_file, 'a', encoding='utf-8') as lf:
                new_proc = subprocess.Popen(
                    [python_exe, spy_script],
                    stdout=lf,
                    stderr=subprocess.STDOUT,
                    cwd=self.script_dir
                )
            with open(pid_file, 'w') as f:
                f.write(str(new_proc.pid))

            changes.append(f'Spy redémarré (ancien PID {killed_pid} → nouveau PID {new_proc.pid})')
            logger.info(f"Spy redémarré avec nouveau threshold {threshold:.3f}: PID {new_proc.pid}")

            request_handler.send_json_response({
                'success': True,
                'message': 'Optimisations appliquées et spy redémarré',
                'changes': changes,
                'threshold': threshold,
                'auc': auc,
                'win_rate_filtered': wr,
                'trade_rate': trade_rate,
                'new_pid': new_proc.pid
            })

        except Exception as e:
            logger.error(f"Error in spy_apply_optimizer: {e}", exc_info=True)
            request_handler.send_json_response({'success': False, 'error': str(e)}, 500)
