#!/usr/bin/env python3
"""
AI OPPORTUNITY SELECTOR - Sélection intelligente des meilleures opportunités
==============================================================================

Utilise PyTorch + GPU (RTX) pour analyser les 58 cryptos de la watchlist et
sélectionner les 15-20 meilleures opportunités basées sur:
- Volatilité prédictive (mouvements attendus)
- Potentiel de gain dans les 6-24h
- Qualité des patterns actuels
- Momentum AI-prédit

Architecture:
1. Feature extraction: 50+ features techniques par crypto
2. Volatility prediction: Réseau neuronal prédit amplitude future
3. Gain prediction: Prédit variation prix dans 6h/24h
4. Opportunity scoring: Score final 0-100 pour chaque crypto
5. Selection: TOP 15-20 cryptos avec meilleur score

Mise à jour: Toutes les 30 min (balance performance/réactivité)
"""

import numpy as np
from datetime import datetime, timedelta
from typing import Dict, List, Tuple, Optional
import json
import os
import logging
from dataclasses import dataclass, field
import time

# Configuration du logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("AIOpportunitySelector")

# === IMPORT PYTORCH + GPU ===
TORCH_AVAILABLE = False
DEVICE = "cpu"
GPU_NAME = "CPU"
nn = None  # Placeholder pour éviter NameError

try:
    import torch
    import torch.nn as nn
    import torch.nn.functional as F
    # 🔵 FIX 25/03: Limiter threads CPU pour éviter saturation
    torch.set_num_threads(2)
    try:
        torch.set_num_interop_threads(1)
    except RuntimeError:
        pass  # Peut être déjà fixé par un autre module importé avant

    if torch.cuda.is_available():
        DEVICE = "cuda"
        GPU_NAME = torch.cuda.get_device_name(0)
        TORCH_AVAILABLE = True
        logger.info(f"✅ GPU RTX activé pour AI Opportunity Selector: {GPU_NAME}")
    else:
        TORCH_AVAILABLE = True
        logger.info("⚠️ PyTorch disponible mais sans GPU - Mode CPU")
except ImportError:
    logger.info("ℹ️  PyTorch non installé - Sélection basique utilisée")
    TORCH_AVAILABLE = False

# === CONFIGURATION ===
OPPORTUNITY_FILE = "ai_opportunities.json"
UPDATE_INTERVAL_MINUTES = 3  # Mise à jour toutes les 3 min (optimisé 24/01: 5→3 pour capter creux rapides)
TOP_N_CRYPTOS = 20            # Sélectionner TOP 20 opportunités
MIN_OPPORTUNITY_SCORE = 55    # Score minimum pour être sélectionné (09/03: relevé 50→55)


@dataclass
class OpportunityProfile:
    """Profil d'opportunité pour une crypto"""
    symbol: str
    timestamp: datetime = field(default_factory=datetime.now)
    
    # === PRÉDICTIONS IA ===
    predicted_volatility_6h: float = 0.0      # Volatilité prédite 6h (%)
    predicted_volatility_24h: float = 0.0     # Volatilité prédite 24h (%)
    predicted_move_6h: float = 0.0            # Mouvement prédit 6h (%)
    predicted_move_24h: float = 0.0           # Mouvement prédit 24h (%)
    predicted_direction: str = "NEUTRAL"      # UP, DOWN, NEUTRAL
    
    # === SCORES COMPOSANTS ===
    volatility_score: float = 0.0             # Score volatilité (0-100)
    momentum_score: float = 0.0               # Score momentum (0-100)
    pattern_score: float = 0.0                # Score patterns (0-100)
    trend_score: float = 0.0                  # Score tendance (0-100)
    entry_timing_score: float = 0.0           # Score timing entrée (0-100)
    risk_reward_score: float = 0.0            # Score risque/rendement (0-100)
    
    # === SCORE FINAL ===
    opportunity_score: float = 0.0            # Score final opportunité (0-100)
    gain_potential: float = 0.0               # Potentiel de gain estimé (%)
    confidence: float = 0.0                   # Confiance prédiction (0-100)
    rank: int = 0                             # Rang dans la sélection
    
    # === STATUS ===
    selected: bool = False                    # Sélectionné pour trading
    reasons: List[str] = field(default_factory=list)
    warnings: List[str] = field(default_factory=list)
    
    def to_dict(self) -> Dict:
        """Convertit en dictionnaire - Force conversion float pour compatibilité JSON"""
        return {
            'symbol': self.symbol,
            'timestamp': self.timestamp.isoformat(),
            'predictions': {
                'volatility_6h': float(round(self.predicted_volatility_6h, 3)),
                'volatility_24h': float(round(self.predicted_volatility_24h, 3)),
                'move_6h': float(round(self.predicted_move_6h, 3)),
                'move_24h': float(round(self.predicted_move_24h, 3)),
                'direction': str(self.predicted_direction),
            },
            'scores': {
                'volatility': float(round(self.volatility_score, 1)),
                'momentum': float(round(self.momentum_score, 1)),
                'pattern': float(round(self.pattern_score, 1)),
                'trend': float(round(self.trend_score, 1)),
                'entry_timing': float(round(self.entry_timing_score, 1)),
                'risk_reward': float(round(self.risk_reward_score, 1)),
            },
            'opportunity_score': float(round(self.opportunity_score, 1)),
            'gain_potential': float(round(self.gain_potential, 2)),
            'confidence': float(round(self.confidence, 1)),
            'rank': int(self.rank),
            'selected': bool(self.selected),
            'reasons': list(self.reasons),
            'warnings': list(self.warnings),
        }


if TORCH_AVAILABLE and nn is not None:
    class VolatilityPredictorNN(nn.Module):
        """
        Réseau neuronal pour prédire la volatilité future
        Input: 50 features techniques
        Output: [volatility_6h, volatility_24h, direction_prob]
        """
        def __init__(self, input_size=50):
            super(VolatilityPredictorNN, self).__init__()
            self.fc1 = nn.Linear(input_size, 128)
            self.bn1 = nn.BatchNorm1d(128)
            self.dropout1 = nn.Dropout(0.3)
            
            self.fc2 = nn.Linear(128, 64)
            self.bn2 = nn.BatchNorm1d(64)
            self.dropout2 = nn.Dropout(0.2)
            
            self.fc3 = nn.Linear(64, 32)
            self.bn3 = nn.BatchNorm1d(32)
            
            # Sorties multiples
            self.vol_6h = nn.Linear(32, 1)      # Volatilité 6h
            self.vol_24h = nn.Linear(32, 1)     # Volatilité 24h
            self.direction = nn.Linear(32, 3)   # Direction (UP/DOWN/NEUTRAL)
        
        def forward(self, x):
            x = F.relu(self.bn1(self.fc1(x)))
            x = self.dropout1(x)
            
            x = F.relu(self.bn2(self.fc2(x)))
            x = self.dropout2(x)
            
            x = F.relu(self.bn3(self.fc3(x)))
            
            vol_6h = torch.sigmoid(self.vol_6h(x)) * 10  # 0-10% volatilité
            vol_24h = torch.sigmoid(self.vol_24h(x)) * 15  # 0-15% volatilité
            direction = F.softmax(self.direction(x), dim=1)
            
            return vol_6h, vol_24h, direction
else:
    # Classe dummy si PyTorch non disponible
    class VolatilityPredictorNN:
        pass


class AIOpportunitySelector:
    """Sélecteur intelligent d'opportunités basé sur IA PyTorch"""
    
    def __init__(self):
        self.device = None
        self.model = None
        self.opportunities = {}
        self.last_update = None
        self.torch_available = TORCH_AVAILABLE  # Variable locale pour éviter erreur scope
        
        if self.torch_available:
            try:
                self.device = torch.device(DEVICE)
                self._init_model()
                logger.info(f"✅ AI Opportunity Selector initialisé sur {DEVICE}")
            except Exception as e:
                logger.warning(f"⚠️ Erreur init PyTorch: {e} - Mode fallback")
                self.torch_available = False
        else:
            logger.warning("⚠️ Mode fallback CPU - Sélection basique")
    
    def _init_model(self):
        """Initialise le modèle de prédiction"""
        if not self.torch_available:
            return
            
        try:
            self.model = VolatilityPredictorNN(input_size=50).to(self.device)
            self.model.eval()
            
            # Essayer de charger des poids pré-entraînés
            model_path = "ai_volatility_predictor.pth"
            if os.path.exists(model_path):
                self.model.load_state_dict(torch.load(model_path))
                logger.info("✅ Modèle pré-entraîné chargé")
            else:
                logger.info("ℹ️ Modèle initialisé avec poids aléatoires (entraînement requis)")
        
        except Exception as e:
            logger.error(f"❌ Erreur initialisation modèle: {e}")
            self.model = None
    
    def should_update(self) -> bool:
        """Vérifie si une mise à jour est nécessaire"""
        if not self.last_update:
            return True
        
        minutes_since_update = (datetime.now() - self.last_update).total_seconds() / 60
        return minutes_since_update >= UPDATE_INTERVAL_MINUTES
    
    def extract_features(self, crypto_data: Dict) -> np.ndarray:
        """
        Extrait 50 features techniques pour une crypto
        
        Features:
        - Prix: close, high, low, open (4)
        - Momentum: 3, 5, 10, 20 bougies (4)
        - EMA: 9, 21, diff, slope (4)
        - RSI + dérivées (3)
        - BB: position, bandwidth, squeeze (3)
        - Volume: ratio, trend, anomalies (3)
        - Volatilité historique: 1h, 6h, 24h (3)
        - Patterns: squeeze, breakout, reversal (3)
        - Tendance: short, medium, long (3)
        - Prix relatifs: distances EMA, BB, pivots (5)
        - Statistiques: std, range, ATR (5)
        - Cyclicité: périodicité, régularité (3)
        - Score IA existant + composants (7)
        = 50 features
        """
        features = []
        
        try:
            # 1. Prix normalisés (4)
            close = crypto_data.get('current_price', 0)
            high = crypto_data.get('high_24h', close)
            low = crypto_data.get('low_24h', close)
            open_price = crypto_data.get('open_price', close)
            
            if close > 0:
                features.extend([
                    (high - close) / close,
                    (close - low) / close,
                    (close - open_price) / close if open_price > 0 else 0,
                    (high - low) / close if close > 0 else 0
                ])
            else:
                features.extend([0, 0, 0, 0])
            
            # 2. Momentum (4)
            momentum_3 = crypto_data.get('momentum_3', 0)
            momentum_5 = crypto_data.get('momentum_5', 0)
            momentum_10 = crypto_data.get('momentum_10', 0)
            momentum_20 = crypto_data.get('momentum_20', 0)
            features.extend([momentum_3, momentum_5, momentum_10, momentum_20])
            
            # 3. EMA (4)
            ema_9 = crypto_data.get('ema_9', close)
            ema_21 = crypto_data.get('ema_21', close)
            ema_diff = ((ema_9 - ema_21) / ema_21) if ema_21 > 0 else 0
            ema_slope = crypto_data.get('ema_slope', 0)
            features.extend([
                (close - ema_9) / close if close > 0 else 0,
                (close - ema_21) / close if close > 0 else 0,
                ema_diff,
                ema_slope
            ])
            
            # 4. RSI (3)
            rsi = crypto_data.get('rsi', 50)
            rsi_normalized = (rsi - 50) / 50  # -1 à 1
            rsi_velocity = crypto_data.get('rsi_change', 0)
            rsi_divergence = crypto_data.get('rsi_divergence', 0)
            features.extend([rsi_normalized, rsi_velocity, rsi_divergence])
            
            # 5. Bollinger Bands (3)
            bb_position = crypto_data.get('bb_position', 0.5)
            bb_bandwidth = crypto_data.get('bb_bandwidth', 0)
            bb_squeeze = 1 if bb_bandwidth < 2 else 0
            features.extend([bb_position, bb_bandwidth / 10, bb_squeeze])
            
            # 6. Volume (3)
            volume_ratio = crypto_data.get('volume_ratio', 1.0)
            volume_trend = crypto_data.get('volume_trend', 0)
            volume_anomaly = 1 if volume_ratio > 2.0 else 0
            features.extend([np.log1p(volume_ratio), volume_trend, volume_anomaly])
            
            # 7. Volatilité historique (3)
            volatility_1h = crypto_data.get('volatility_1h', 0)
            volatility_6h = crypto_data.get('volatility_6h', 0)
            volatility_24h = crypto_data.get('volatility_24h', 0)
            features.extend([volatility_1h, volatility_6h, volatility_24h])
            
            # 8. Patterns (3)
            is_squeeze = 1 if crypto_data.get('squeeze_active', False) else 0
            is_breakout = 1 if crypto_data.get('breakout_detected', False) else 0
            is_reversal = 1 if crypto_data.get('reversal_pattern', False) else 0
            features.extend([is_squeeze, is_breakout, is_reversal])
            
            # 9. Tendance (3)
            trend_short = 1 if ema_diff > 0 else -1
            trend_medium = crypto_data.get('trend_medium', 0)
            trend_long = crypto_data.get('trend_long', 0)
            features.extend([trend_short, trend_medium, trend_long])
            
            # 10. Prix relatifs (5)
            dist_ema9 = (close - ema_9) / close if close > 0 else 0
            dist_ema21 = (close - ema_21) / close if close > 0 else 0
            bb_upper = crypto_data.get('bb_upper', close)
            bb_lower = crypto_data.get('bb_lower', close)
            dist_bb_upper = (bb_upper - close) / close if close > 0 else 0
            dist_bb_lower = (close - bb_lower) / close if close > 0 else 0
            price_range_pos = (close - low) / (high - low) if (high - low) > 0 else 0.5
            features.extend([dist_ema9, dist_ema21, dist_bb_upper, dist_bb_lower, price_range_pos])
            
            # 11. Statistiques (5)
            price_std = crypto_data.get('price_std_20', 0)
            price_range = (high - low) / close if close > 0 else 0
            atr = crypto_data.get('atr', 0)
            avg_volume = crypto_data.get('avg_volume_20', 1)
            volume_std = crypto_data.get('volume_std_20', 0)
            features.extend([price_std, price_range, atr, np.log1p(avg_volume), volume_std])
            
            # 12. Cyclicité (3)
            squeeze_frequency = crypto_data.get('squeeze_frequency', 0)
            cycle_regularity = crypto_data.get('cycle_regularity', 0)
            time_since_last_squeeze = crypto_data.get('time_since_last_squeeze', 0)
            features.extend([squeeze_frequency, cycle_regularity, time_since_last_squeeze])
            
            # 13. Scores IA (7)
            ai_score = crypto_data.get('ai_score', 50) / 100  # Normaliser 0-1
            technical_score = crypto_data.get('technical_score', 50) / 100
            momentum_score = crypto_data.get('momentum_score', 50) / 100
            pattern_score = crypto_data.get('pattern_score', 50) / 100
            trend_score = crypto_data.get('trend_score', 50) / 100
            entry_quality = crypto_data.get('entry_quality', 50) / 100
            confidence = crypto_data.get('confidence', 50) / 100
            features.extend([ai_score, technical_score, momentum_score, pattern_score, 
                           trend_score, entry_quality, confidence])
            
            # Vérifier que nous avons exactement 50 features
            assert len(features) == 50, f"Attendu 50 features, obtenu {len(features)}"
            
            return np.array(features, dtype=np.float32)
        
        except Exception as e:
            logger.error(f"❌ Erreur extraction features: {e}")
            return np.zeros(50, dtype=np.float32)
    
    def predict_opportunity(self, features: np.ndarray) -> Tuple[float, float, str]:
        """
        Prédit l'opportunité pour une crypto
        
        Returns:
            (volatility_6h, volatility_24h, direction)
        """
        if not self.torch_available or self.model is None:
            # Fallback: estimation basique
            volatility_6h = abs(features[4]) * 100  # Utiliser momentum_3
            volatility_24h = abs(features[6]) * 100  # Utiliser momentum_10
            direction = "UP" if features[4] > 0 else ("DOWN" if features[4] < 0 else "NEUTRAL")
            return volatility_6h, volatility_24h, direction
        
        try:
            with torch.no_grad():
                # Convertir features en tensor
                x = torch.tensor(features, dtype=torch.float32).unsqueeze(0).to(self.device)
                
                # Prédiction
                vol_6h, vol_24h, direction_probs = self.model(x)
                
                # Extraire résultats
                volatility_6h = vol_6h.item()
                volatility_24h = vol_24h.item()
                
                # Direction (0=UP, 1=DOWN, 2=NEUTRAL)
                direction_idx = torch.argmax(direction_probs, dim=1).item()
                direction = ["UP", "DOWN", "NEUTRAL"][direction_idx]
                
                return volatility_6h, volatility_24h, direction
        
        except Exception as e:
            logger.error(f"❌ Erreur prédiction: {e}")
            # Fallback en cas d'erreur
            volatility_6h = abs(features[4]) * 100
            volatility_24h = abs(features[6]) * 100
            direction = "UP" if features[4] > 0 else "DOWN"
            return volatility_6h, volatility_24h, direction
    
    def calculate_opportunity_score(self, profile: OpportunityProfile, features: np.ndarray) -> float:
        """
        Calcule le score d'opportunité final (0-100)
        
        Pondération:
        - Volatilité prédite: 30%
        - Momentum actuel: 20%
        - Pattern quality: 20%
        - Trend alignment: 15%
        - Entry timing: 10%
        - Risk/Reward: 5%
        """
        try:
            # 1. Score volatilité (30 points)
            # Optimal: 1-3% pour 6h, 3-8% pour 24h
            vol_6h = profile.predicted_volatility_6h
            vol_24h = profile.predicted_volatility_24h
            
            vol_score_6h = 0
            if 1.0 <= vol_6h <= 3.0:
                vol_score_6h = 100
            elif 0.5 <= vol_6h < 1.0 or 3.0 < vol_6h <= 4.0:
                vol_score_6h = 70
            elif vol_6h > 4.0:
                vol_score_6h = max(0, 70 - (vol_6h - 4.0) * 10)  # Pénalité si trop volatile
            else:
                vol_score_6h = vol_6h * 50  # Trop peu volatile
            
            vol_score_24h = 0
            if 3.0 <= vol_24h <= 8.0:
                vol_score_24h = 100
            elif 2.0 <= vol_24h < 3.0 or 8.0 < vol_24h <= 10.0:
                vol_score_24h = 70
            elif vol_24h > 10.0:
                vol_score_24h = max(0, 70 - (vol_24h - 10.0) * 5)
            else:
                vol_score_24h = vol_24h * 30
            
            profile.volatility_score = (vol_score_6h * 0.4 + vol_score_24h * 0.6)
            
            # 2. Score momentum (20 points)
            # ⚡ REFONTE 24/01: FAVORISER momentum POSITIF (ACHAT prioritaire)
            momentum_3 = features[4]  # momentum_3 normalisé
            momentum_5 = features[5]
            momentum_align = 1 if (momentum_3 > 0 and momentum_5 > 0) else 0
            
            # Calcul momentum favorisant direction UP (sans abs())
            if momentum_3 > 0:
                # Momentum positif = opportunité ACHAT (amplifier score)
                momentum_strength = min(momentum_3 * 200, 100)  # x2 pour favoriser UP
                profile.momentum_score = momentum_strength * 0.7 + momentum_align * 30
            else:
                # Momentum négatif = faible intérêt pour ACHAT
                momentum_strength = max(0, 50 + momentum_3 * 100)  # Pénalité
                profile.momentum_score = momentum_strength * 0.7
            
            # 3. Score pattern (20 points)
            # ⚡ REFONTE 24/01: Intégration patterns ai_predictor.py optimisés
            # 🔧 FIX AUDIT 28/02: Corriger les index de features (5 étaient décalés)
            # Index réels: [0-3]=prix, [4-7]=momentum, [8-11]=EMA, [12-14]=RSI,
            #              [15-17]=BB, [18-20]=Volume, [21-23]=Volatility, [24-26]=Patterns,
            #              [27-29]=Trends, [30-34]=Prix relatifs, [35-39]=Stats, [40-42]=Cycles, [43-49]=AI Scores
            is_squeeze = features[24]   # 🔧 FIX: était [23] (=volatility_24h)
            is_breakout = features[25]  # 🔧 FIX: était [24] (=is_squeeze)
            bb_bandwidth = features[16] * 10  # 🔧 FIX: était [14] (=rsi_divergence) → [16] = bb_bandwidth/10
            rsi = features[12] * 50 + 50  # 🔧 FIX: était [11] (=ema_slope) → [12] = rsi_normalized
            volume_ratio = features[18]  # 🔧 FIX: était [7] (=momentum_20) → [18] = log1p(vol_ratio)
            
            pattern_score = 0
            
            # PATTERN #1: CREUX_REBOUND (PRIORITÉ ABSOLUE)
            # 🔴 REFONTE 01/04: Critères STRICTS — volume obligatoire, momentum positif
            # Un CREUX sans volume d'achat confirmé n'est PAS un rebound.
            is_creux_rebound = (
                20 <= rsi <= 45 and              # RSI oversold/recovery
                momentum_3 > 0.001 and           # Momentum > 0.1% (prix monte)
                volume_ratio > 1.0               # Volume AU-DESSUS de la moyenne (non négociable)
            )
            if is_creux_rebound:
                pattern_score += 60  # PRIORITÉ ABSOLUE
                if rsi <= 35:
                    pattern_score += 10  # Bonus RSI vraiment oversold
                if volume_ratio > 1.5:
                    pattern_score += 5   # Bonus volume fort
                logger.info(f"🎯 {profile.symbol}: CREUX_REBOUND détecté (RSI={rsi:.0f}, Mom={momentum_3*100:.2f}%, Vol={volume_ratio:.2f})")
            
            # PATTERN #2: SQUEEZE_WAITING (75% win rate confirmé)
            if is_squeeze and bb_bandwidth < 2:
                pattern_score += 50  # Squeeze actif = excellente opportunité
                logger.info(f"🔥 {crypto_data.get('symbol', 'UNK')}: SQUEEZE_WAITING détecté")
            
            # PATTERN #3: Breakout
            if is_breakout:
                pattern_score += 30  # Breakout détecté
            
            # PATTERN #4: Consolidation (BB faible)
            if bb_bandwidth < 5:  # BB bandwidth faible
                pattern_score += 20  # Consolidation
            
            # PATTERN #5: Position optimale (milieu BB)
            bb_position = features[15]  # 🔧 FIX AUDIT 28/02: était [13] (=rsi_velocity) → [15] = bb_position
            if 0.3 <= bb_position <= 0.7:  # bb_position milieu
                pattern_score += 10  # Prix bien positionné
            
            profile.pattern_score = min(pattern_score, 100)
            
            # 4. Score trend (15 points)
            ema_diff = features[10]  # EMA diff normalisé
            trend_short = features[27]  # 🔧 FIX AUDIT 28/02: était [26] (=is_reversal) → [27]
            trend_medium = features[28]  # 🔧 FIX AUDIT 28/02: était [27] (=trend_short) → [28]
            
            if profile.predicted_direction == "UP":
                if ema_diff > 0 and trend_short > 0:
                    profile.trend_score = 100  # Parfait alignement
                elif ema_diff > -0.05:
                    profile.trend_score = 70  # Acceptable
                else:
                    profile.trend_score = 40  # Contre-tendance
            else:
                profile.trend_score = 60  # Neutre
            
            # 5. Score entry timing (10 points)
            # ⚡ REFONTE 24/01: Favoriser RSI oversold (zone CREUX_REBOUND)
            # rsi déjà calculé ligne ~450
            # bb_position déjà calculé ci-dessus
            
            timing_score = 0
            # PRIORITÉ: Zone CREUX_REBOUND (RSI oversold = opportunité rebond)
            # 🆕 FIX 09/03: Zone 35-42 reconnue comme CREUX étendu (aligné ai_predictor.py)
            if 20 <= rsi <= 35:
                timing_score += 80  # Zone CREUX_REBOUND optimale (profond)
            elif 35 < rsi <= 42:
                timing_score += 70  # Zone CREUX étendue (aligné ai_predictor.py RSI<=42)
            elif 42 < rsi <= 50 or 55 <= rsi <= 70:
                timing_score += 50  # RSI standard acceptable
            elif 50 < rsi < 55:
                timing_score += 30  # RSI neutre
            else:
                timing_score += 10  # RSI extrême haut (>70 ou <20)
            
            # Position BB: favoriser bande basse (rebond probable)
            if bb_position < 0.3:
                timing_score += 20  # Proche bande basse = rebond probable
            elif 0.3 <= bb_position <= 0.7:
                timing_score += 50  # Milieu bandes
            else:
                timing_score += 10  # Bande haute (risque retournement)
            
            profile.entry_timing_score = min(timing_score, 100)
            
            # 6. Score risk/reward (5 points)
            # 🔧 FIX AUDIT 28/02: Avant risk=0.3*move, reward=0.7*move → R/R toujours 2.33 (constant)
            # Maintenant: R/R basé sur le rapport entre volatilité prédite et drawdown max historique
            expected_move = profile.predicted_volatility_6h
            # Utiliser le momentum comme proxy de directionalité pour ajuster le reward ratio
            reward_ratio = 0.5 + min(0.3, max(-0.2, momentum_3 * 10))  # 0.3 à 0.8 selon momentum
            risk_ratio = 1.0 - reward_ratio
            risk = expected_move * risk_ratio
            reward = expected_move * reward_ratio
            rr_ratio = reward / (risk + 0.01)  # Éviter division par 0
            profile.risk_reward_score = min(rr_ratio * 20, 100)
            
            # Score final pondéré
            final_score = (
                profile.volatility_score * 0.30 +
                profile.momentum_score * 0.20 +
                profile.pattern_score * 0.20 +
                profile.trend_score * 0.15 +
                profile.entry_timing_score * 0.10 +
                profile.risk_reward_score * 0.05
            )

            # 🆕 FIX 09/03: PENALITÉ BTC DIRECTIONNELLE
            # Si BTC en chute, altcoin bounces = dead-cat → pénaliser le score
            # 🔧 FIX 31/03: Vérifier l'ÂGE de la donnée BTC — si > 10min, elle est obsolète
            # Bug: _btc_momentum figé à -14.11% depuis des heures alors que BTC à +1.54%
            try:
                from ai_predictor import AIPredictor as _AIP
                _btc_mom_s = getattr(_AIP, '_btc_momentum', None)
                _btc_last_check = getattr(_AIP, '_last_market_check', None)
                _btc_data_fresh = False
                if _btc_last_check is not None:
                    _btc_age = (datetime.now() - _btc_last_check).total_seconds()
                    _btc_data_fresh = _btc_age < 600  # < 10 minutes
                if _btc_mom_s is not None and _btc_data_fresh:
                    # 🔧 FIX 01/04: _btc_momentum est en % (ex: -0.5 = -0.5%), pas en décimal
                    if _btc_mom_s < -0.5:    # BTC crash > 0.5%
                        final_score *= 0.55
                        profile.warnings.append(f"BTC crash ({_btc_mom_s:+.2f}%) — score pénalisé -45%")
                    elif _btc_mom_s < -0.3:  # BTC baisse > 0.3%
                        final_score *= 0.75
                        profile.warnings.append(f"BTC faible ({_btc_mom_s:+.2f}%) — score pénalisé -25%")
                elif _btc_mom_s is not None and not _btc_data_fresh:
                    profile.warnings.append(f"BTC data obsolète (>{int(_btc_age/60) if '_btc_age' in dir() else '?'}min) — pénalité ignorée")
            except Exception:
                pass

            # Potentiel de gain estimé
            profile.gain_potential = profile.predicted_volatility_6h * 0.6  # 60% de la volatilité
            
            # Confiance basée sur cohérence des signaux
            confidence_factors = [
                profile.volatility_score > 60,
                profile.momentum_score > 60,
                profile.pattern_score > 60,
                profile.trend_score > 60,
                profile.entry_timing_score > 60
            ]
            profile.confidence = sum(confidence_factors) / len(confidence_factors) * 100
            
            return final_score
        
        except Exception as e:
            logger.error(f"❌ Erreur calcul opportunity score: {e}")
            return 0.0
    
    def analyze_crypto(self, symbol: str, crypto_data: Dict) -> OpportunityProfile:
        """Analyse une crypto et génère son profil d'opportunité"""
        profile = OpportunityProfile(symbol=symbol)
        
        try:
            # 1. Extraction features
            features = self.extract_features(crypto_data)
            
            # 2. Prédiction IA
            vol_6h, vol_24h, direction = self.predict_opportunity(features)
            profile.predicted_volatility_6h = vol_6h
            profile.predicted_volatility_24h = vol_24h
            profile.predicted_direction = direction
            profile.predicted_move_6h = vol_6h * 0.7  # 70% de volatilité
            profile.predicted_move_24h = vol_24h * 0.6
            
            # 3. Calcul scores
            opportunity_score = self.calculate_opportunity_score(profile, features)
            profile.opportunity_score = opportunity_score
            
            # 4. Raisons et warnings
            if profile.volatility_score > 70:
                profile.reasons.append(f"Excellente volatilité prédite: {vol_6h:.1f}% (6h)")
            if profile.momentum_score > 70:
                profile.reasons.append("Momentum fort et cohérent")
            if profile.pattern_score > 70:
                profile.reasons.append("Pattern favorable détecté")
            if profile.trend_score > 80:
                profile.reasons.append(f"Tendance alignée {direction}")
            
            if vol_6h > 5.0:
                profile.warnings.append("Volatilité très élevée - Risque accru")
            if profile.confidence < 50:
                profile.warnings.append("Confiance faible - Signaux contradictoires")
            
        except Exception as e:
            logger.error(f"❌ Erreur analyse {symbol}: {e}")
            profile.opportunity_score = 0
        
        return profile
    
    def select_opportunities(self, watchlist: List[Dict]) -> List[OpportunityProfile]:
        """
        Sélectionne les TOP opportunités de la watchlist
        
        Args:
            watchlist: Liste des cryptos avec leurs données
        
        Returns:
            Liste des OpportunityProfile triés par score (meilleurs en premier)
        """
        logger.info(f"🔍 Analyse de {len(watchlist)} cryptos pour sélection opportunités...")
        start_time = time.time()
        
        # Analyser chaque crypto
        profiles = []
        for crypto_data in watchlist:
            symbol = crypto_data.get('symbol', '')
            if not symbol:
                continue
            
            profile = self.analyze_crypto(symbol, crypto_data)
            profiles.append(profile)
            self.opportunities[symbol] = profile
        
        # Trier par score décroissant
        profiles.sort(key=lambda p: p.opportunity_score, reverse=True)
        
        # Assigner les rangs et sélectionner TOP N
        for i, profile in enumerate(profiles, 1):
            profile.rank = i
            profile.selected = (i <= TOP_N_CRYPTOS and profile.opportunity_score >= MIN_OPPORTUNITY_SCORE)
        
        # Stats
        selected_count = sum(1 for p in profiles if p.selected)
        avg_score = np.mean([p.opportunity_score for p in profiles])
        top_score = profiles[0].opportunity_score if profiles else 0
        
        elapsed = time.time() - start_time
        logger.info(f"✅ Sélection terminée en {elapsed:.2f}s")
        logger.info(f"   - TOP {selected_count}/{len(profiles)} cryptos sélectionnées")
        logger.info(f"   - Score moyen: {avg_score:.1f}/100")
        logger.info(f"   - Meilleur score: {top_score:.1f}/100 ({profiles[0].symbol if profiles else 'N/A'})")
        
        self.last_update = datetime.now()
        self._save_opportunities(profiles)
        
        return profiles
    
    def get_selected_symbols(self) -> List[str]:
        """Retourne la liste des symboles sélectionnés"""
        return [symbol for symbol, profile in self.opportunities.items() if profile.selected]
    
    def get_opportunity_profile(self, symbol: str) -> Optional[OpportunityProfile]:
        """Récupère le profil d'opportunité d'un symbole"""
        return self.opportunities.get(symbol)
    
    def _save_opportunities(self, profiles: List[OpportunityProfile]):
        """Sauvegarde les opportunités"""
        try:
            data = {
                'last_update': datetime.now().isoformat(),
                'top_n': TOP_N_CRYPTOS,
                'min_score': MIN_OPPORTUNITY_SCORE,
                'opportunities': [p.to_dict() for p in profiles]
            }
            
            with open(OPPORTUNITY_FILE, 'w') as f:
                json.dump(data, f, indent=2)
            
            logger.info(f"💾 Opportunités sauvegardées: {OPPORTUNITY_FILE}")
        
        except Exception as e:
            logger.error(f"❌ Erreur sauvegarde opportunités: {e}")
    
    def load_opportunities(self) -> List[OpportunityProfile]:
        """Charge les opportunités sauvegardées"""
        if not os.path.exists(OPPORTUNITY_FILE):
            return []
        
        try:
            with open(OPPORTUNITY_FILE, 'r') as f:
                data = json.load(f)
            
            profiles = []
            for opp_data in data.get('opportunities', []):
                profile = OpportunityProfile(symbol=opp_data['symbol'])
                
                # Charger prédictions
                preds = opp_data.get('predictions', {})
                profile.predicted_volatility_6h = preds.get('volatility_6h', 0)
                profile.predicted_volatility_24h = preds.get('volatility_24h', 0)
                profile.predicted_move_6h = preds.get('move_6h', 0)
                profile.predicted_move_24h = preds.get('move_24h', 0)
                profile.predicted_direction = preds.get('direction', 'NEUTRAL')
                
                # Charger scores
                scores = opp_data.get('scores', {})
                profile.volatility_score = scores.get('volatility', 0)
                profile.momentum_score = scores.get('momentum', 0)
                profile.pattern_score = scores.get('pattern', 0)
                profile.trend_score = scores.get('trend', 0)
                profile.entry_timing_score = scores.get('entry_timing', 0)
                profile.risk_reward_score = scores.get('risk_reward', 0)
                
                profile.opportunity_score = opp_data.get('opportunity_score', 0)
                profile.gain_potential = opp_data.get('gain_potential', 0)
                profile.confidence = opp_data.get('confidence', 0)
                profile.rank = opp_data.get('rank', 0)
                profile.selected = opp_data.get('selected', False)
                profile.reasons = opp_data.get('reasons', [])
                profile.warnings = opp_data.get('warnings', [])
                
                profiles.append(profile)
                self.opportunities[profile.symbol] = profile
            
            self.last_update = datetime.fromisoformat(data.get('last_update', datetime.now().isoformat()))
            logger.info(f"✅ {len(profiles)} opportunités chargées depuis {OPPORTUNITY_FILE}")
            
            return profiles
        
        except Exception as e:
            logger.error(f"❌ Erreur chargement opportunités: {e}")
            return []


# === SINGLETON ===
_opportunity_selector = None

def get_opportunity_selector() -> AIOpportunitySelector:
    """Récupère l'instance singleton du sélecteur d'opportunités"""
    global _opportunity_selector
    if _opportunity_selector is None:
        _opportunity_selector = AIOpportunitySelector()
    return _opportunity_selector


if __name__ == "__main__":
    # Test basique
    selector = get_opportunity_selector()
    print(f"✅ AI Opportunity Selector prêt")
    print(f"   - Device: {DEVICE}")
    print(f"   - PyTorch disponible: {TORCH_AVAILABLE}")
    print(f"   - TOP N cryptos: {TOP_N_CRYPTOS}")
    print(f"   - Score minimum: {MIN_OPPORTUNITY_SCORE}")
