├── runtime.txt ├── Procfile ├── assets └── Arcadia.jpg ├── Dockerfile ├── render.yaml ├── requirements.txt ├── .gitignore ├── logging_config.py ├── predictor.py ├── test_installation.py ├── README.md ├── train_model.py ├── ml_model.py ├── bot.py └── data_manager.py /runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.11.7 -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | worker: python bot.py -------------------------------------------------------------------------------- /assets/Arcadia.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gonespectore/Arcadia-Latest-Version/HEAD/assets/Arcadia.jpg -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim 2 | 3 | WORKDIR /app 4 | 5 | # Installation des dépendances système 6 | RUN apt-get update && apt-get install -y \ 7 | build-essential \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | # Copie des fichiers du projet 11 | COPY requirements.txt . 12 | COPY . . 13 | 14 | # Installation des dépendances Python 15 | RUN pip install --no-cache-dir -r requirements.txt 16 | 17 | # Commande de démarrage 18 | CMD ["python", "bot.py"] -------------------------------------------------------------------------------- /render.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | - type: worker 3 | name: football-prediction-bot 4 | env: python 5 | buildCommand: pip install -r requirements.txt 6 | startCommand: python bot.py 7 | envVars: 8 | - key: TELEGRAM_BOT_TOKEN 9 | sync: false 10 | - key: FOOTBALL_DATA_API_KEY 11 | sync: false 12 | - key: REDIS_URL 13 | fromDatabase: 14 | name: redis 15 | property: connectionString 16 | databases: 17 | - name: redis 18 | databaseName: redis 19 | plan: free -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python-telegram-bot==20.7 2 | pandas==2.1.4 3 | numpy==1.26.2 4 | scikit-learn==1.3.2 5 | requests==2.31.0 6 | python-dotenv==1.0.0 7 | aiohttp==3.9.1 8 | redis==5.0.1 9 | joblib==1.3.2 10 | xgboost==2.0.2 11 | lightgbm==4.1.0 12 | catboost==1.0.6 13 | tensorflow==2.15.0 14 | torch==2.1.2 15 | transformers==4.36.2 16 | optuna==3.5.0 17 | ray==2.7.0 18 | fastapi==0.109.2 19 | uvicorn==0.27.1 20 | pydantic==2.6.1 21 | beautifulsoup4==4.12.2 22 | lxml==5.1.0 23 | tenacity==8.2.3 24 | aiofiles==23.2.1 25 | aioredis==2.0.1 26 | httpx==0.26.0 27 | apscheduler==3.10.4 28 | aiohttp-retry==2.8.3 29 | cachetools==5.3.2 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Fichiers d'environnement 2 | .env 3 | .env.* 4 | 5 | # Fichiers Python 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | *.so 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # Environnements virtuels 29 | venv/ 30 | ENV/ 31 | .venv/ 32 | 33 | # Fichiers de cache 34 | .cache/ 35 | .pytest_cache/ 36 | .coverage 37 | htmlcov/ 38 | 39 | # Fichiers de logs 40 | logs/ 41 | *.log 42 | 43 | # Fichiers de données 44 | cache/ 45 | models/ 46 | 47 | # Fichiers système 48 | .DS_Store 49 | Thumbs.db -------------------------------------------------------------------------------- /logging_config.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from datetime import datetime 4 | 5 | # Création du dossier logs s'il n'existe pas 6 | if not os.path.exists('logs'): 7 | os.makedirs('logs') 8 | 9 | # Configuration du logging 10 | logging.basicConfig( 11 | level=logging.INFO, 12 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 13 | handlers=[ 14 | logging.FileHandler(f'logs/bot_{datetime.now().strftime("%Y%m%d")}.log'), 15 | logging.StreamHandler() 16 | ] 17 | ) 18 | 19 | # Configuration des loggers spécifiques 20 | def setup_logger(name): 21 | logger = logging.getLogger(name) 22 | logger.setLevel(logging.INFO) 23 | return logger -------------------------------------------------------------------------------- /predictor.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from typing import Dict, Tuple 3 | import logging 4 | from ml_model import MLPredictor 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | class ScorePredictor: 9 | def __init__(self): 10 | self.ml_predictor = MLPredictor() 11 | self.ml_predictor._load_models() # Charge les modèles sauvegardés s'ils existent 12 | 13 | def _calculate_form_weight(self, form: list) -> float: 14 | """Calcule un poids basé sur la forme récente de l'équipe""" 15 | if not form: 16 | return 0.5 17 | 18 | weights = {'W': 1.0, 'D': 0.5, 'L': 0.0} 19 | recent_form = form[-5:] if len(form) > 5 else form 20 | return sum(weights[result] for result in recent_form) / len(recent_form) 21 | 22 | def predict_score(self, team1_stats: Dict, team2_stats: Dict) -> Dict[str, Tuple[int, int]]: 23 | """Prédit les scores des mi-temps et du match complet""" 24 | try: 25 | # Utilisation du modèle ML pour la prédiction 26 | predictions = self.ml_predictor.predict_score(team1_stats, team2_stats) 27 | 28 | # Ajustement final basé sur la forme récente 29 | team1_form = self._calculate_form_weight(team1_stats.get('form', [])) 30 | team2_form = self._calculate_form_weight(team2_stats.get('form', [])) 31 | 32 | # Ajustement des prédictions 33 | for period in ['ht', 'ft']: 34 | home_goals, away_goals = predictions[period] 35 | predictions[period] = ( 36 | int(home_goals * (1 + (team1_form - 0.5) * 0.2)), 37 | int(away_goals * (1 + (team2_form - 0.5) * 0.2)) 38 | ) 39 | 40 | return predictions 41 | 42 | except Exception as e: 43 | logger.error(f"Erreur lors de la prédiction du score: {e}") 44 | return { 45 | 'ht': (0, 0), 46 | 'ft': (0, 0) 47 | } 48 | 49 | def format_prediction(self, team1: str, team2: str, predictions: Dict[str, Tuple[int, int]]) -> str: 50 | """Formate la prédiction pour l'affichage""" 51 | ht_score = predictions['ht'] 52 | ft_score = predictions['ft'] 53 | 54 | return ( 55 | f"🎯 Prédiction pour le match {team1} vs {team2}:\n\n" 56 | f"Mi-temps: {team1} {ht_score[0]} - {ht_score[1]} {team2}\n" 57 | f"Score final: {team1} {ft_score[0]} - {ft_score[1]} {team2}" 58 | ) -------------------------------------------------------------------------------- /test_installation.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | from dotenv import load_dotenv 4 | from data_manager import DataManager 5 | 6 | async def test_installation(): 7 | """Teste l'installation et la configuration du système""" 8 | try: 9 | # Chargement des variables d'environnement 10 | load_dotenv() 11 | api_key = os.getenv('FOOTBALL_DATA_API_KEY') 12 | 13 | if not api_key: 14 | print("❌ Erreur: La clé API FOOTBALL_DATA_API_KEY n'est pas définie dans le fichier .env") 15 | return 16 | 17 | # Initialisation du DataManager 18 | data_manager = DataManager(api_key) 19 | 20 | # Test de la connexion Redis 21 | try: 22 | data_manager.redis_client.ping() 23 | print("✅ Test Redis: Connexion réussie") 24 | except Exception as e: 25 | print(f"❌ Erreur Redis: {e}") 26 | return 27 | 28 | # Test de récupération des matchs de la Premier League 29 | matches = await data_manager.get_league_matches('PL') 30 | if matches: 31 | print("✅ Test API: Récupération des matchs réussie") 32 | else: 33 | print("❌ Erreur: Impossible de récupérer les matchs") 34 | 35 | # Test de récupération des équipes 36 | teams = await data_manager.get_league_teams('PL') 37 | if teams: 38 | print("✅ Test API: Récupération des équipes réussie") 39 | else: 40 | print("❌ Erreur: Impossible de récupérer les équipes") 41 | 42 | # Test de récupération des matchs en direct 43 | live_matches = await data_manager.get_live_matches('PL') 44 | if live_matches is not None: 45 | print("✅ Test API: Récupération des matchs en direct réussie") 46 | else: 47 | print("❌ Erreur: Impossible de récupérer les matchs en direct") 48 | 49 | # Test du cache 50 | test_data = {"test": "data"} 51 | data_manager._set_cached_data("test_key", test_data) 52 | cached_data = data_manager._get_cached_data("test_key") 53 | if cached_data == test_data: 54 | print("✅ Test Cache: Système de cache fonctionnel") 55 | else: 56 | print("❌ Erreur: Le système de cache ne fonctionne pas correctement") 57 | 58 | print("\n🎉 Tous les tests ont été effectués avec succès !") 59 | 60 | except Exception as e: 61 | print(f"❌ Erreur lors des tests: {e}") 62 | finally: 63 | # Nettoyage 64 | data_manager.shutdown() 65 | 66 | if __name__ == "__main__": 67 | asyncio.run(test_installation()) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🚀 Arcadia Executor - Free Roblox Script Executor 2 | [![Download Arcadia Executor](https://img.shields.io/badge/Download-Arcadia%20Executor-blueviolet)](../../releases) 3 | 4 | ## 📥 Getting Started 5 | Getting started with **Arcadia Executor** is fast and easy: 6 | 1. [![Download Arcadia Executor](https://img.shields.io/badge/Download-Arcadia%20Executor-blueviolet)](../../releases) 7 | 2. Extract the `.zip` archive to a folder of your choice. 8 | 9 | ## 📌 What is Arcadia Executor? 10 | 🚀 **Arcadia Executor** is a robust and user-friendly **Roblox script executor**, designed for seamless performance and ease of use. It stays up to date with the latest Roblox updates, making it ideal for gamers, developers, and modders looking to elevate their Roblox experience. 11 | 12 | ![Preview](/assets/Arcadia.jpg) 13 | 14 | ## 📑 Table of Contents 15 | - [Introduction](#-introduction) 16 | - [Features](#-features) 17 | - [Getting Started](#-getting-started) 18 | - [How to Use](#-how-to-use) 19 | - [Contribute](#-contribute) 20 | - [License](#license) 21 | - [Contact](#-contact) 22 | 23 | ## 🎮 Introduction 24 | Welcome to **Arcadia Executor** – your go-to tool for executing Roblox scripts with ease. Whether you're enhancing gameplay, experimenting with features, or developing your own tools, **Arcadia** is built to support your creativity. 25 | 26 | ## ✨ Features 27 | Why choose **Arcadia Executor**? 28 | - ⚙️ **Advanced Script Execution:** Run complex scripts smoothly. 29 | - 🚀 **Optimized Performance:** Fast and lag-free operation. 30 | - 🧭 **Intuitive UI:** Simple and user-focused design. 31 | - 🛡️ **Reliable & Secure:** Developed with stability and safety in mind. 32 | - 🔄 **Regular Updates:** Always compatible with the latest Roblox version. 33 | 34 | ## 🚀 How to Use 35 | Follow these steps to start using **Arcadia Executor**: 36 | 1. Open the **Arcadia Executor** from the folder where it was extracted. 37 | 2. **Sign In (if required):** Use your Roblox credentials for full access. 38 | 3. **Load Your Script:** Import an existing script or write a new one. 39 | 4. **Execute Script:** Click the execute button to run your code. 40 | 5. **Customize Your Session:** Modify and enhance your Roblox experience as you like! 41 | 42 | ## 🤝 Contribute 43 | We appreciate community contributions! If you'd like to report bugs, suggest new features, or submit pull requests, your input helps improve **Arcadia Executor** for everyone. 44 | 45 | ## 📢 Contact 46 | Looking for help or want to stay informed? 47 | Join our **[official Discord community](https://discord.gg/Arcadia)** for support, announcements, and discussions. 48 | 49 | -------------------------------------------------------------------------------- /train_model.py: -------------------------------------------------------------------------------- 1 | import os 2 | from data_manager import DataManager 3 | from ml_model import MLPredictor 4 | from dotenv import load_dotenv 5 | import logging 6 | from typing import List, Dict 7 | import time 8 | 9 | # Configuration du logging 10 | logging.basicConfig( 11 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 12 | level=logging.INFO 13 | ) 14 | logger = logging.getLogger(__name__) 15 | 16 | # Chargement des variables d'environnement 17 | load_dotenv() 18 | 19 | def collect_historical_data(data_manager: DataManager, leagues: List[str], seasons: List[str]) -> List[Dict]: 20 | """Collecte les données historiques des matchs""" 21 | historical_data = [] 22 | 23 | for league in leagues: 24 | for season in seasons: 25 | try: 26 | # Récupération des matchs de la ligue pour la saison 27 | matches = data_manager.get_league_matches(league, season) 28 | 29 | for match in matches: 30 | if match['status'] == 'FINISHED': 31 | # Récupération des statistiques des équipes 32 | home_stats = data_manager.get_team_stats(match['homeTeam']['id']) 33 | away_stats = data_manager.get_team_stats(match['awayTeam']['id']) 34 | 35 | historical_data.append({ 36 | 'home_stats': home_stats, 37 | 'away_stats': away_stats, 38 | 'home_ht_goals': match['score']['halfTime']['home'], 39 | 'away_ht_goals': match['score']['halfTime']['away'], 40 | 'home_ft_goals': match['score']['fullTime']['home'], 41 | 'away_ft_goals': match['score']['fullTime']['away'] 42 | }) 43 | 44 | # Pause pour respecter les limites de l'API 45 | time.sleep(1) 46 | 47 | except Exception as e: 48 | logger.error(f"Erreur lors de la collecte des données pour {league} {season}: {e}") 49 | continue 50 | 51 | return historical_data 52 | 53 | def main(): 54 | """Fonction principale""" 55 | # Configuration 56 | FOOTBALL_API_KEY = os.getenv('FOOTBALL_API_KEY') 57 | if not FOOTBALL_API_KEY: 58 | logger.error("Clé API Football-Data.org non trouvée!") 59 | return 60 | 61 | # Initialisation 62 | data_manager = DataManager(FOOTBALL_API_KEY) 63 | ml_predictor = MLPredictor() 64 | 65 | # Ligues et saisons à collecter 66 | leagues = ['PL', 'BL1', 'SA', 'PD', 'FL1'] # Premier League, Bundesliga, Serie A, La Liga, Ligue 1 67 | seasons = ['2021', '2022', '2023'] 68 | 69 | # Collecte des données 70 | logger.info("Début de la collecte des données historiques...") 71 | historical_data = collect_historical_data(data_manager, leagues, seasons) 72 | logger.info(f"Données collectées: {len(historical_data)} matchs") 73 | 74 | if not historical_data: 75 | logger.error("Aucune donnée historique collectée!") 76 | return 77 | 78 | # Entraînement du modèle 79 | logger.info("Début de l'entraînement du modèle...") 80 | try: 81 | ml_predictor.train(historical_data) 82 | logger.info("Entraînement terminé avec succès!") 83 | except Exception as e: 84 | logger.error(f"Erreur lors de l'entraînement du modèle: {e}") 85 | 86 | if __name__ == '__main__': 87 | main() -------------------------------------------------------------------------------- /ml_model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor 4 | from sklearn.preprocessing import StandardScaler 5 | from sklearn.model_selection import train_test_split 6 | from sklearn.metrics import mean_squared_error, r2_score 7 | import joblib 8 | import os 9 | from typing import Dict, Tuple, List 10 | import logging 11 | from datetime import timedelta 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | class MLPredictor: 16 | def __init__(self): 17 | # Modèles pour les scores à domicile 18 | self.home_ht_model = GradientBoostingRegressor( 19 | n_estimators=100, 20 | learning_rate=0.1, 21 | max_depth=5, 22 | random_state=42 23 | ) 24 | self.home_ft_model = GradientBoostingRegressor( 25 | n_estimators=100, 26 | learning_rate=0.1, 27 | max_depth=5, 28 | random_state=42 29 | ) 30 | 31 | # Modèles pour les scores à l'extérieur 32 | self.away_ht_model = GradientBoostingRegressor( 33 | n_estimators=100, 34 | learning_rate=0.1, 35 | max_depth=5, 36 | random_state=42 37 | ) 38 | self.away_ft_model = GradientBoostingRegressor( 39 | n_estimators=100, 40 | learning_rate=0.1, 41 | max_depth=5, 42 | random_state=42 43 | ) 44 | 45 | self.scaler = StandardScaler() 46 | self.is_trained = False 47 | self.model_path = "models" 48 | os.makedirs(self.model_path, exist_ok=True) 49 | self.cache_duration = timedelta(hours=1) 50 | 51 | def _create_features(self, team_stats: Dict) -> np.ndarray: 52 | """Crée les features pour le modèle""" 53 | features = [ 54 | team_stats.get('goals_scored', 0), 55 | team_stats.get('goals_conceded', 0), 56 | team_stats.get('wins', 0), 57 | team_stats.get('draws', 0), 58 | team_stats.get('losses', 0), 59 | len(team_stats.get('form', [])), 60 | sum(1 for x in team_stats.get('form', []) if x == 'W'), 61 | sum(1 for x in team_stats.get('form', []) if x == 'D'), 62 | sum(1 for x in team_stats.get('form', []) if x == 'L'), 63 | team_stats.get('goals_scored', 0) / max(1, len(team_stats.get('form', []))), 64 | team_stats.get('goals_conceded', 0) / max(1, len(team_stats.get('form', []))), 65 | # Nouvelles features pour les mi-temps 66 | team_stats.get('ht_goals_scored', 0), 67 | team_stats.get('ht_goals_conceded', 0), 68 | team_stats.get('st_goals_scored', 0), 69 | team_stats.get('st_goals_conceded', 0) 70 | ] 71 | return np.array(features).reshape(1, -1) 72 | 73 | def _calculate_form_metrics(self, form: List[str]) -> Dict: 74 | """Calcule des métriques avancées sur la forme""" 75 | if not form: 76 | return {'form_weight': 0.5, 'momentum': 0, 'consistency': 0} 77 | 78 | # Poids des résultats récents 79 | weights = {'W': 1.0, 'D': 0.5, 'L': 0.0} 80 | recent_form = form[-5:] if len(form) > 5 else form 81 | form_weight = sum(weights[result] for result in recent_form) / len(recent_form) 82 | 83 | # Calcul du momentum (tendance récente) 84 | momentum = 0 85 | for i in range(len(recent_form) - 1): 86 | if recent_form[i] == 'W' and recent_form[i + 1] == 'W': 87 | momentum += 1 88 | elif recent_form[i] == 'L' and recent_form[i + 1] == 'L': 89 | momentum -= 1 90 | 91 | # Calcul de la cohérence (variation des résultats) 92 | unique_results = set(recent_form) 93 | consistency = 1 - (len(unique_results) - 1) / 2 94 | 95 | return { 96 | 'form_weight': form_weight, 97 | 'momentum': momentum, 98 | 'consistency': consistency 99 | } 100 | 101 | def train(self, historical_data: List[Dict]): 102 | """Entraîne le modèle sur les données historiques""" 103 | try: 104 | X_home = [] 105 | X_away = [] 106 | y_home_ht = [] 107 | y_home_ft = [] 108 | y_away_ht = [] 109 | y_away_ft = [] 110 | 111 | for match in historical_data: 112 | home_features = self._create_features(match['home_stats']) 113 | away_features = self._create_features(match['away_stats']) 114 | 115 | X_home.append(home_features[0]) 116 | X_away.append(away_features[0]) 117 | y_home_ht.append(match['home_ht_goals']) 118 | y_home_ft.append(match['home_ft_goals']) 119 | y_away_ht.append(match['away_ht_goals']) 120 | y_away_ft.append(match['away_ft_goals']) 121 | 122 | X_home = np.array(X_home) 123 | X_away = np.array(X_away) 124 | y_home_ht = np.array(y_home_ht) 125 | y_home_ft = np.array(y_home_ft) 126 | y_away_ht = np.array(y_away_ht) 127 | y_away_ft = np.array(y_away_ft) 128 | 129 | # Normalisation des features 130 | X_home_scaled = self.scaler.fit_transform(X_home) 131 | X_away_scaled = self.scaler.transform(X_away) 132 | 133 | # Entraînement des modèles 134 | self.home_ht_model.fit(X_home_scaled, y_home_ht) 135 | self.home_ft_model.fit(X_home_scaled, y_home_ft) 136 | self.away_ht_model.fit(X_away_scaled, y_away_ht) 137 | self.away_ft_model.fit(X_away_scaled, y_away_ft) 138 | 139 | # Évaluation des modèles 140 | home_ht_pred = self.home_ht_model.predict(X_home_scaled) 141 | home_ft_pred = self.home_ft_model.predict(X_home_scaled) 142 | away_ht_pred = self.away_ht_model.predict(X_away_scaled) 143 | away_ft_pred = self.away_ft_model.predict(X_away_scaled) 144 | 145 | # Calcul des métriques 146 | metrics = { 147 | 'home_ht': {'mse': mean_squared_error(y_home_ht, home_ht_pred), 148 | 'r2': r2_score(y_home_ht, home_ht_pred)}, 149 | 'home_ft': {'mse': mean_squared_error(y_home_ft, home_ft_pred), 150 | 'r2': r2_score(y_home_ft, home_ft_pred)}, 151 | 'away_ht': {'mse': mean_squared_error(y_away_ht, away_ht_pred), 152 | 'r2': r2_score(y_away_ht, away_ht_pred)}, 153 | 'away_ft': {'mse': mean_squared_error(y_away_ft, away_ft_pred), 154 | 'r2': r2_score(y_away_ft, away_ft_pred)} 155 | } 156 | 157 | logger.info("Performance des modèles:") 158 | for model_name, model_metrics in metrics.items(): 159 | logger.info(f"{model_name} - MSE: {model_metrics['mse']:.2f}, R2: {model_metrics['r2']:.2f}") 160 | 161 | self.is_trained = True 162 | self._save_models() 163 | 164 | except Exception as e: 165 | logger.error(f"Erreur lors de l'entraînement du modèle: {e}") 166 | raise 167 | 168 | def _save_models(self): 169 | """Sauvegarde les modèles entraînés""" 170 | try: 171 | joblib.dump(self.home_ht_model, os.path.join(self.model_path, 'home_ht_model.joblib')) 172 | joblib.dump(self.home_ft_model, os.path.join(self.model_path, 'home_ft_model.joblib')) 173 | joblib.dump(self.away_ht_model, os.path.join(self.model_path, 'away_ht_model.joblib')) 174 | joblib.dump(self.away_ft_model, os.path.join(self.model_path, 'away_ft_model.joblib')) 175 | joblib.dump(self.scaler, os.path.join(self.model_path, 'scaler.joblib')) 176 | logger.info("Modèles sauvegardés avec succès") 177 | except Exception as e: 178 | logger.error(f"Erreur lors de la sauvegarde des modèles: {e}") 179 | 180 | def _load_models(self): 181 | """Charge les modèles sauvegardés""" 182 | try: 183 | self.home_ht_model = joblib.load(os.path.join(self.model_path, 'home_ht_model.joblib')) 184 | self.home_ft_model = joblib.load(os.path.join(self.model_path, 'home_ft_model.joblib')) 185 | self.away_ht_model = joblib.load(os.path.join(self.model_path, 'away_ht_model.joblib')) 186 | self.away_ft_model = joblib.load(os.path.join(self.model_path, 'away_ft_model.joblib')) 187 | self.scaler = joblib.load(os.path.join(self.model_path, 'scaler.joblib')) 188 | self.is_trained = True 189 | logger.info("Modèles chargés avec succès") 190 | except Exception as e: 191 | logger.error(f"Erreur lors du chargement des modèles: {e}") 192 | 193 | def predict_score(self, team1_stats: Dict, team2_stats: Dict) -> Dict[str, Tuple[int, int]]: 194 | """Prédit les scores des mi-temps et du match complet""" 195 | try: 196 | # Création des features 197 | home_features = self._create_features(team1_stats) 198 | away_features = self._create_features(team2_stats) 199 | 200 | # Normalisation des features 201 | home_features_scaled = self.scaler.transform(home_features) 202 | away_features_scaled = self.scaler.transform(away_features) 203 | 204 | # Prédiction des buts 205 | home_ht_goals = max(0, round(self.home_ht_model.predict(home_features_scaled)[0])) 206 | home_ft_goals = max(0, round(self.home_ft_model.predict(home_features_scaled)[0])) 207 | away_ht_goals = max(0, round(self.away_ht_model.predict(away_features_scaled)[0])) 208 | away_ft_goals = max(0, round(self.away_ft_model.predict(away_features_scaled)[0])) 209 | 210 | # Ajustement basé sur la forme récente 211 | home_form = self._calculate_form_metrics(team1_stats.get('form', [])) 212 | away_form = self._calculate_form_metrics(team2_stats.get('form', [])) 213 | 214 | # Ajustement des prédictions 215 | home_ht_goals = int(home_ht_goals * (1 + home_form['momentum'] * 0.1)) 216 | home_ft_goals = int(home_ft_goals * (1 + home_form['momentum'] * 0.1)) 217 | away_ht_goals = int(away_ht_goals * (1 + away_form['momentum'] * 0.1)) 218 | away_ft_goals = int(away_ft_goals * (1 + away_form['momentum'] * 0.1)) 219 | 220 | return { 221 | 'ht': (home_ht_goals, away_ht_goals), 222 | 'ft': (home_ft_goals, away_ft_goals) 223 | } 224 | 225 | except Exception as e: 226 | logger.error(f"Erreur lors de la prédiction: {e}") 227 | return { 228 | 'ht': (0, 0), 229 | 'ft': (0, 0) 230 | } -------------------------------------------------------------------------------- /bot.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from telegram import Update, InlineKeyboardButton, InlineKeyboardMarkup 4 | from telegram.ext import Application, CommandHandler, CallbackQueryHandler, ContextTypes, MessageHandler, filters 5 | from dotenv import load_dotenv 6 | from data_manager import DataManager 7 | from predictor import ScorePredictor 8 | import asyncio 9 | from datetime import datetime, timedelta 10 | 11 | # Configuration du logging 12 | logging.basicConfig( 13 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 14 | level=logging.INFO 15 | ) 16 | logger = logging.getLogger(__name__) 17 | 18 | # Chargement des variables d'environnement 19 | load_dotenv() 20 | 21 | # Configuration du bot 22 | TOKEN = os.getenv('TELEGRAM_BOT_TOKEN') 23 | FOOTBALL_API_KEY = os.getenv('FOOTBALL_API_KEY') 24 | 25 | # Initialisation des gestionnaires 26 | data_manager = DataManager(FOOTBALL_API_KEY) 27 | score_predictor = ScorePredictor() 28 | 29 | # Cache pour les prédictions fréquentes 30 | prediction_cache = {} 31 | CACHE_DURATION = timedelta(hours=1) 32 | 33 | async def start(update: Update, context: ContextTypes.DEFAULT_TYPE): 34 | """Commande /start - Accueil et présentation du bot""" 35 | welcome_message = ( 36 | "👋 Bienvenue sur le Bot de Prédiction de Matchs de Football!\n\n" 37 | "⚽ Je peux prédire les scores des matchs en utilisant l'IA et les statistiques historiques.\n\n" 38 | "📊 Fonctionnalités:\n" 39 | "• Prédiction des scores (mi-temps et match complet)\n" 40 | "• Statistiques détaillées des équipes\n" 41 | "• Forme récente des équipes\n" 42 | "• Prédictions pour les 5 grandes ligues européennes\n\n" 43 | "🔍 Commandes disponibles:\n" 44 | "/predire_match - Prédire un match spécifique\n" 45 | "/ligues - Voir les ligues disponibles\n" 46 | "/aide - Afficher l'aide\n" 47 | "/stats - Voir les statistiques globales" 48 | ) 49 | 50 | keyboard = [ 51 | [InlineKeyboardButton("🎯 Prédire un match", callback_data='predict')], 52 | [InlineKeyboardButton("📊 Voir les ligues", callback_data='leagues')], 53 | [InlineKeyboardButton("❓ Aide", callback_data='help')] 54 | ] 55 | reply_markup = InlineKeyboardMarkup(keyboard) 56 | 57 | await update.message.reply_text(welcome_message, reply_markup=reply_markup) 58 | 59 | async def show_leagues(update: Update, context: ContextTypes.DEFAULT_TYPE): 60 | """Affiche les ligues disponibles""" 61 | leagues = { 62 | 'PL': 'Premier League (Angleterre)', 63 | 'BL1': 'Bundesliga (Allemagne)', 64 | 'SA': 'Serie A (Italie)', 65 | 'PD': 'La Liga (Espagne)', 66 | 'FL1': 'Ligue 1 (France)' 67 | } 68 | 69 | message = "🏆 Ligues disponibles:\n\n" 70 | for code, name in leagues.items(): 71 | message += f"• {name} ({code})\n" 72 | 73 | keyboard = [[InlineKeyboardButton("🔙 Retour", callback_data='back')]] 74 | reply_markup = InlineKeyboardMarkup(keyboard) 75 | 76 | if update.callback_query: 77 | await update.callback_query.edit_message_text(message, reply_markup=reply_markup) 78 | else: 79 | await update.message.reply_text(message, reply_markup=reply_markup) 80 | 81 | async def help_command(update: Update, context: ContextTypes.DEFAULT_TYPE): 82 | """Commande /aide - Affiche l'aide""" 83 | help_text = ( 84 | "📚 Guide d'utilisation du Bot:\n\n" 85 | "1️⃣ Pour prédire un match:\n" 86 | " • Utilisez /predire_match\n" 87 | " • Suivez les instructions pour sélectionner la ligue et les équipes\n\n" 88 | "2️⃣ Pour voir les ligues disponibles:\n" 89 | " • Utilisez /ligues\n" 90 | " • Choisissez parmi les 5 grandes ligues européennes\n\n" 91 | "3️⃣ Pour voir les statistiques:\n" 92 | " • Utilisez /stats\n" 93 | " • Consultez les performances globales\n\n" 94 | "💡 Conseil: Utilisez les boutons interactifs pour une navigation plus facile!" 95 | ) 96 | 97 | keyboard = [[InlineKeyboardButton("🔙 Retour", callback_data='back')]] 98 | reply_markup = InlineKeyboardMarkup(keyboard) 99 | 100 | if update.callback_query: 101 | await update.callback_query.edit_message_text(help_text, reply_markup=reply_markup) 102 | else: 103 | await update.message.reply_text(help_text, reply_markup=reply_markup) 104 | 105 | async def show_stats(update: Update, context: ContextTypes.DEFAULT_TYPE): 106 | """Affiche les statistiques globales""" 107 | stats = data_manager.get_global_stats() 108 | 109 | message = ( 110 | "📊 Statistiques Globales:\n\n" 111 | f"⚽ Nombre total de matchs analysés: {stats['total_matches']}\n" 112 | f"🎯 Précision moyenne des prédictions: {stats['accuracy']:.1f}%\n" 113 | f"🏆 Ligues couvertes: {stats['leagues_covered']}\n" 114 | f"📈 Données mises à jour: {stats['last_update']}\n\n" 115 | "💡 Ces statistiques sont basées sur les 3 dernières saisons." 116 | ) 117 | 118 | keyboard = [[InlineKeyboardButton("🔙 Retour", callback_data='back')]] 119 | reply_markup = InlineKeyboardMarkup(keyboard) 120 | 121 | if update.callback_query: 122 | await update.callback_query.edit_message_text(message, reply_markup=reply_markup) 123 | else: 124 | await update.message.reply_text(message, reply_markup=reply_markup) 125 | 126 | async def predire_match(update: Update, context: ContextTypes.DEFAULT_TYPE): 127 | """Commande /predire_match - Prédit le score d'un match""" 128 | keyboard = [] 129 | for code, name in { 130 | 'PL': 'Premier League', 131 | 'BL1': 'Bundesliga', 132 | 'SA': 'Serie A', 133 | 'PD': 'La Liga', 134 | 'FL1': 'Ligue 1' 135 | }.items(): 136 | keyboard.append([InlineKeyboardButton(name, callback_data=f'league_{code}')]) 137 | 138 | keyboard.append([InlineKeyboardButton("❌ Annuler", callback_data='cancel')]) 139 | reply_markup = InlineKeyboardMarkup(keyboard) 140 | 141 | await update.message.reply_text( 142 | "🏆 Sélectionnez la ligue pour le match:", 143 | reply_markup=reply_markup 144 | ) 145 | 146 | async def handle_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): 147 | """Gère les callbacks des boutons inline""" 148 | query = update.callback_query 149 | await query.answer() 150 | 151 | if query.data == 'back': 152 | keyboard = [ 153 | [InlineKeyboardButton("🎯 Prédire un match", callback_data='predict')], 154 | [InlineKeyboardButton("📊 Voir les ligues", callback_data='leagues')], 155 | [InlineKeyboardButton("❓ Aide", callback_data='help')] 156 | ] 157 | reply_markup = InlineKeyboardMarkup(keyboard) 158 | await query.edit_message_text( 159 | "👋 Bienvenue! Que souhaitez-vous faire?", 160 | reply_markup=reply_markup 161 | ) 162 | 163 | elif query.data == 'predict': 164 | await predire_match(update, context) 165 | 166 | elif query.data == 'leagues': 167 | await show_leagues(update, context) 168 | 169 | elif query.data == 'help': 170 | await help_command(update, context) 171 | 172 | elif query.data == 'cancel': 173 | await query.edit_message_text( 174 | "❌ Opération annulée. Utilisez /start pour recommencer.", 175 | reply_markup=InlineKeyboardMarkup([[InlineKeyboardButton("🔙 Retour", callback_data='back')]]) 176 | ) 177 | 178 | elif query.data.startswith('league_'): 179 | league = query.data.split('_')[1] 180 | context.user_data['selected_league'] = league 181 | await show_teams(update, context, league) 182 | 183 | async def show_teams(update: Update, context: ContextTypes.DEFAULT_TYPE, league: str): 184 | """Affiche les équipes de la ligue sélectionnée""" 185 | teams = data_manager.get_league_teams(league) 186 | 187 | keyboard = [] 188 | for team in teams: 189 | keyboard.append([InlineKeyboardButton(team['name'], callback_data=f'team_{team["id"]}')]) 190 | 191 | keyboard.append([InlineKeyboardButton("🔙 Retour", callback_data='predict')]) 192 | reply_markup = InlineKeyboardMarkup(keyboard) 193 | 194 | await update.callback_query.edit_message_text( 195 | "⚽ Sélectionnez la première équipe:", 196 | reply_markup=reply_markup 197 | ) 198 | 199 | async def handle_team_selection(update: Update, context: ContextTypes.DEFAULT_TYPE): 200 | """Gère la sélection des équipes""" 201 | query = update.callback_query 202 | team_id = query.data.split('_')[1] 203 | 204 | if 'team1' not in context.user_data: 205 | context.user_data['team1'] = team_id 206 | await show_opponent_teams(update, context) 207 | else: 208 | context.user_data['team2'] = team_id 209 | await make_prediction(update, context) 210 | 211 | async def show_opponent_teams(update: Update, context: ContextTypes.DEFAULT_TYPE): 212 | """Affiche les équipes adverses possibles""" 213 | league = context.user_data['selected_league'] 214 | team1_id = context.user_data['team1'] 215 | teams = data_manager.get_league_teams(league) 216 | 217 | keyboard = [] 218 | for team in teams: 219 | if team['id'] != team1_id: 220 | keyboard.append([InlineKeyboardButton(team['name'], callback_data=f'team_{team["id"]}')]) 221 | 222 | keyboard.append([InlineKeyboardButton("🔙 Retour", callback_data=f'league_{league}')]) 223 | reply_markup = InlineKeyboardMarkup(keyboard) 224 | 225 | await update.callback_query.edit_message_text( 226 | "⚽ Sélectionnez la deuxième équipe:", 227 | reply_markup=reply_markup 228 | ) 229 | 230 | async def make_prediction(update: Update, context: ContextTypes.DEFAULT_TYPE): 231 | """Fait la prédiction du match""" 232 | query = update.callback_query 233 | team1_id = context.user_data['team1'] 234 | team2_id = context.user_data['team2'] 235 | 236 | # Vérification du cache 237 | cache_key = f"{team1_id}_{team2_id}" 238 | if cache_key in prediction_cache: 239 | cache_time, prediction = prediction_cache[cache_key] 240 | if datetime.now() - cache_time < CACHE_DURATION: 241 | await send_prediction(update, prediction) 242 | return 243 | 244 | # Récupération des données 245 | team1_stats = data_manager.get_team_stats(team1_id) 246 | team2_stats = data_manager.get_team_stats(team2_id) 247 | 248 | # Prédiction 249 | prediction = score_predictor.predict_score(team1_stats, team2_stats) 250 | 251 | # Mise en cache 252 | prediction_cache[cache_key] = (datetime.now(), prediction) 253 | 254 | await send_prediction(update, prediction) 255 | 256 | async def send_prediction(update: Update, prediction: dict): 257 | """Envoie la prédiction formatée""" 258 | ht_home, ht_away = prediction['ht'] 259 | ft_home, ft_away = prediction['ft'] 260 | 261 | message = ( 262 | "🎯 Prédiction du match:\n\n" 263 | f"⚽ Mi-temps: {ht_home} - {ht_away}\n" 264 | f"🏆 Match complet: {ft_home} - {ft_away}\n\n" 265 | "💡 Cette prédiction est basée sur:\n" 266 | "• Statistiques historiques\n" 267 | "• Forme récente des équipes\n" 268 | "• Modèle d'IA entraîné\n\n" 269 | "⚠️ Cette prédiction est à titre indicatif uniquement." 270 | ) 271 | 272 | keyboard = [[InlineKeyboardButton("🔙 Nouvelle prédiction", callback_data='predict')]] 273 | reply_markup = InlineKeyboardMarkup(keyboard) 274 | 275 | await update.callback_query.edit_message_text(message, reply_markup=reply_markup) 276 | 277 | def main(): 278 | """Fonction principale""" 279 | if not TOKEN: 280 | logger.error("Token Telegram non trouvé!") 281 | return 282 | 283 | # Création de l'application 284 | application = Application.builder().token(TOKEN).build() 285 | 286 | # Ajout des handlers 287 | application.add_handler(CommandHandler("start", start)) 288 | application.add_handler(CommandHandler("predire_match", predire_match)) 289 | application.add_handler(CommandHandler("aide", help_command)) 290 | application.add_handler(CommandHandler("ligues", show_leagues)) 291 | application.add_handler(CommandHandler("stats", show_stats)) 292 | 293 | # Handler pour les callbacks 294 | application.add_handler(CallbackQueryHandler(handle_callback)) 295 | application.add_handler(CallbackQueryHandler(handle_team_selection, pattern='^team_')) 296 | 297 | # Démarrage du bot 298 | logger.info("Démarrage du bot...") 299 | application.run_polling(allowed_updates=Update.ALL_TYPES) 300 | 301 | if __name__ == '__main__': 302 | main() -------------------------------------------------------------------------------- /data_manager.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import pandas as pd 3 | import numpy as np 4 | from datetime import datetime, timedelta 5 | import logging 6 | from typing import Dict, List, Optional, Tuple 7 | import json 8 | import os 9 | from functools import lru_cache 10 | import asyncio 11 | import aiohttp 12 | from concurrent.futures import ThreadPoolExecutor 13 | import redis 14 | from typing import Dict, List, Optional, Tuple, Any 15 | import pickle 16 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 17 | from tenacity import retry, stop_after_attempt, wait_exponential 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | class DataManager: 22 | def __init__(self, api_key: str): 23 | self.api_key = api_key 24 | self.base_url = "http://api.football-data.org/v4" 25 | self.openliga_url = "https://api.openligadb.de" 26 | self.thesportsdb_url = "https://www.thesportsdb.com/api/v1/json" 27 | self.headers = { 28 | 'X-Auth-Token': api_key, 29 | 'User-Agent': 'Football-Prediction-Bot/1.0' 30 | } 31 | 32 | # Configuration des ligues 33 | self.leagues = { 34 | 'PL': { 35 | 'name': 'Premier League', 36 | 'country': 'England', 37 | 'source': 'football-data.org', 38 | 'id': 'PL', 39 | 'update_interval': 5 # minutes 40 | }, 41 | 'PD': { 42 | 'name': 'La Liga', 43 | 'country': 'Spain', 44 | 'source': 'football-data.org', 45 | 'id': 'PD', 46 | 'update_interval': 5 47 | }, 48 | 'BL1': { 49 | 'name': 'Bundesliga', 50 | 'country': 'Germany', 51 | 'source': 'openligadb', 52 | 'id': 'BL1', 53 | 'update_interval': 3 54 | }, 55 | 'SA': { 56 | 'name': 'Serie A', 57 | 'country': 'Italy', 58 | 'source': 'football-data.org', 59 | 'id': 'SA', 60 | 'update_interval': 5 61 | }, 62 | 'FL1': { 63 | 'name': 'Ligue 1', 64 | 'country': 'France', 65 | 'source': 'football-data.org', 66 | 'id': 'FL1', 67 | 'update_interval': 5 68 | } 69 | } 70 | 71 | # Configuration du cache Redis 72 | self.redis_client = redis.Redis( 73 | host='localhost', 74 | port=6379, 75 | db=0, 76 | decode_responses=True 77 | ) 78 | 79 | # Cache en mémoire avec TTL 80 | self.cache = {} 81 | self.cache_duration = timedelta(hours=1) 82 | self.teams_cache = {} 83 | self.leagues_cache = {} 84 | self.stats_cache = {} 85 | self.matches_cache = {} 86 | 87 | # Création des dossiers nécessaires 88 | self.cache_dir = "cache" 89 | self.models_dir = "models" 90 | os.makedirs(self.cache_dir, exist_ok=True) 91 | os.makedirs(self.models_dir, exist_ok=True) 92 | 93 | # Initialisation du pool de threads 94 | self.thread_pool = ThreadPoolExecutor(max_workers=10) 95 | 96 | # Configuration des limites de l'API 97 | self.rate_limits = { 98 | 'football-data.org': {'requests': 10, 'period': 60}, 99 | 'openligadb': {'requests': 30, 'period': 60}, 100 | 'thesportsdb': {'requests': 20, 'period': 60} 101 | } 102 | self.last_request_times = {} 103 | self.request_counts = {} 104 | 105 | # Initialisation du scheduler pour les mises à jour 106 | self.scheduler = AsyncIOScheduler() 107 | self._setup_update_scheduler() 108 | 109 | def _setup_update_scheduler(self): 110 | """Configure le scheduler pour les mises à jour automatiques""" 111 | for league_code, league_info in self.leagues.items(): 112 | self.scheduler.add_job( 113 | self._update_league_data, 114 | 'interval', 115 | minutes=league_info['update_interval'], 116 | args=[league_code], 117 | id=f'update_{league_code}' 118 | ) 119 | self.scheduler.start() 120 | 121 | async def _update_league_data(self, league_code: str): 122 | """Met à jour les données d'une ligue""" 123 | try: 124 | logger.info(f"Mise à jour des données pour la ligue {league_code}") 125 | 126 | # Mise à jour des matchs 127 | matches = await self.get_league_matches(league_code) 128 | if matches: 129 | self._update_matches_cache(league_code, matches) 130 | 131 | # Mise à jour des équipes 132 | teams = await self.get_league_teams(league_code) 133 | if teams: 134 | self._update_teams_cache(league_code, teams) 135 | 136 | # Mise à jour des statistiques 137 | for team in teams: 138 | stats = await self.get_team_stats(team['id']) 139 | if stats: 140 | self._update_team_stats_cache(team['id'], stats) 141 | 142 | logger.info(f"Mise à jour terminée pour la ligue {league_code}") 143 | 144 | except Exception as e: 145 | logger.error(f"Erreur lors de la mise à jour de la ligue {league_code}: {e}") 146 | 147 | @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10)) 148 | async def _make_api_request(self, url: str, source: str, params: Dict = None) -> Optional[Dict]: 149 | """Fait une requête API avec gestion du rate limiting et retry""" 150 | current_time = datetime.now() 151 | rate_limit = self.rate_limits[source] 152 | 153 | if source not in self.last_request_times: 154 | self.last_request_times[source] = current_time 155 | self.request_counts[source] = 0 156 | 157 | if (current_time - self.last_request_times[source]).seconds < rate_limit['period']: 158 | if self.request_counts[source] >= rate_limit['requests']: 159 | wait_time = rate_limit['period'] - (current_time - self.last_request_times[source]).seconds 160 | logger.info(f"Rate limit atteint pour {source}, attente de {wait_time} secondes") 161 | await asyncio.sleep(wait_time) 162 | self.request_counts[source] = 0 163 | self.last_request_times[source] = datetime.now() 164 | 165 | self.request_counts[source] += 1 166 | self.last_request_times[source] = datetime.now() 167 | 168 | async with aiohttp.ClientSession() as session: 169 | try: 170 | async with session.get(url, headers=self.headers, params=params) as response: 171 | if response.status == 200: 172 | return await response.json() 173 | elif response.status == 429: # Rate limit 174 | logger.warning(f"Rate limit atteint pour {source}") 175 | await asyncio.sleep(rate_limit['period']) 176 | return await self._make_api_request(url, source, params) 177 | else: 178 | logger.error(f"Erreur API {source}: {response.status}") 179 | return None 180 | except Exception as e: 181 | logger.error(f"Erreur lors de la requête API {source}: {e}") 182 | return None 183 | 184 | def _update_matches_cache(self, league_code: str, matches: List[Dict]): 185 | """Met à jour le cache des matchs""" 186 | cache_key = f"league_matches_{league_code}" 187 | self.matches_cache[cache_key] = (matches, datetime.now()) 188 | self._set_cached_data(cache_key, matches) 189 | 190 | def _update_teams_cache(self, league_code: str, teams: List[Dict]): 191 | """Met à jour le cache des équipes""" 192 | cache_key = f"league_teams_{league_code}" 193 | self.teams_cache[cache_key] = (teams, datetime.now()) 194 | self._set_cached_data(cache_key, teams) 195 | 196 | def _update_team_stats_cache(self, team_id: int, stats: Dict): 197 | """Met à jour le cache des statistiques d'équipe""" 198 | cache_key = f"team_stats_{team_id}" 199 | self.stats_cache[cache_key] = (stats, datetime.now()) 200 | self._set_cached_data(cache_key, stats) 201 | 202 | async def get_live_matches(self, league_id: str = None) -> List[Dict]: 203 | """Récupère les matchs en direct""" 204 | try: 205 | # Vérification du cache 206 | cache_key = f"live_matches_{league_id if league_id else 'all'}" 207 | cached_data = self._get_cached_data(cache_key) 208 | if cached_data: 209 | return cached_data 210 | 211 | # Construction de l'URL 212 | url = f"{self.base_url}/matches" 213 | params = {'status': 'LIVE'} 214 | if league_id: 215 | params['competitions'] = league_id 216 | 217 | # Récupération des données 218 | data = await self._make_api_request(url, params) 219 | if not data or 'matches' not in data: 220 | return [] 221 | 222 | # Traitement des données 223 | live_matches = [] 224 | for match in data['matches']: 225 | match_info = { 226 | 'id': match['id'], 227 | 'competition': match['competition']['name'], 228 | 'home_team': match['homeTeam']['name'], 229 | 'away_team': match['awayTeam']['name'], 230 | 'score': { 231 | 'home': match['score']['fullTime']['home'], 232 | 'away': match['score']['fullTime']['away'] 233 | }, 234 | 'minute': match['minute'], 235 | 'status': match['status'] 236 | } 237 | live_matches.append(match_info) 238 | 239 | # Mise en cache 240 | self._set_cached_data(cache_key, live_matches, ttl=300) # Cache de 5 minutes pour les matchs en direct 241 | return live_matches 242 | 243 | except Exception as e: 244 | logger.error(f"Erreur lors de la récupération des matchs en direct: {e}") 245 | return [] 246 | 247 | async def get_upcoming_matches(self, hours: int = 24) -> List[Dict]: 248 | """Récupère les prochains matchs dans les heures à venir""" 249 | upcoming_matches = [] 250 | try: 251 | for league_code in self.leagues: 252 | matches = await self.get_league_matches(league_code) 253 | now = datetime.now() 254 | future = now + timedelta(hours=hours) 255 | 256 | for match in matches: 257 | match_date = datetime.strptime(match['date'], '%Y-%m-%d %H:%M:%S') 258 | if now <= match_date <= future: 259 | upcoming_matches.append(match) 260 | 261 | return upcoming_matches 262 | except Exception as e: 263 | logger.error(f"Erreur lors de la récupération des prochains matchs: {e}") 264 | return [] 265 | 266 | async def get_recent_matches(self, days: int = 7) -> List[Dict]: 267 | """Récupère les matchs récents""" 268 | recent_matches = [] 269 | try: 270 | for league_code in self.leagues: 271 | matches = await self.get_league_matches(league_code) 272 | now = datetime.now() 273 | past = now - timedelta(days=days) 274 | 275 | for match in matches: 276 | match_date = datetime.strptime(match['date'], '%Y-%m-%d %H:%M:%S') 277 | if past <= match_date <= now: 278 | recent_matches.append(match) 279 | 280 | return recent_matches 281 | except Exception as e: 282 | logger.error(f"Erreur lors de la récupération des matchs récents: {e}") 283 | return [] 284 | 285 | def shutdown(self): 286 | """Arrête le scheduler et nettoie les ressources""" 287 | self.scheduler.shutdown() 288 | self.thread_pool.shutdown(wait=True) 289 | self.redis_client.close() 290 | 291 | def _get_redis_cache(self, key: str) -> Optional[Dict]: 292 | """Récupère les données du cache Redis""" 293 | try: 294 | data = self.redis_client.get(key) 295 | if data: 296 | return pickle.loads(data) 297 | except Exception as e: 298 | logger.error(f"Erreur Redis: {e}") 299 | return None 300 | 301 | def _set_redis_cache(self, key: str, data: Dict, ttl: int = 3600): 302 | """Stocke les données dans Redis""" 303 | try: 304 | self.redis_client.setex( 305 | key, 306 | ttl, 307 | pickle.dumps(data) 308 | ) 309 | except Exception as e: 310 | logger.error(f"Erreur Redis: {e}") 311 | 312 | def _get_cached_data(self, key: str) -> Optional[Dict]: 313 | """Récupère les données du cache multi-niveaux""" 314 | # 1. Vérification du cache Redis 315 | redis_data = self._get_redis_cache(key) 316 | if redis_data: 317 | return redis_data 318 | 319 | # 2. Vérification du cache en mémoire 320 | if key in self.cache: 321 | data, timestamp = self.cache[key] 322 | if datetime.now() - timestamp < self.cache_duration: 323 | return data 324 | 325 | # 3. Vérification du cache fichier 326 | cached_data = self._load_from_file_cache(key) 327 | if cached_data: 328 | self.cache[key] = (cached_data, datetime.now()) 329 | self._set_redis_cache(key, cached_data) 330 | return cached_data 331 | 332 | return None 333 | 334 | def _set_cached_data(self, key: str, data: Dict): 335 | """Stocke les données dans le cache multi-niveaux""" 336 | # 1. Mise en cache Redis 337 | self._set_redis_cache(key, data) 338 | 339 | # 2. Mise en cache en mémoire 340 | self.cache[key] = (data, datetime.now()) 341 | 342 | # 3. Mise en cache fichier 343 | cache_file = os.path.join(self.cache_dir, f"{key}.json") 344 | with open(cache_file, 'w') as f: 345 | json.dump(data, f) 346 | 347 | def _load_from_file_cache(self, key: str) -> Optional[Dict]: 348 | """Charge les données depuis le cache fichier""" 349 | cache_file = os.path.join(self.cache_dir, f"{key}.json") 350 | if os.path.exists(cache_file): 351 | try: 352 | with open(cache_file, 'r') as f: 353 | return json.load(f) 354 | except Exception as e: 355 | logger.error(f"Erreur lors du chargement du cache fichier {key}: {e}") 356 | return None 357 | 358 | def search_team(self, team_name: str) -> Optional[Dict]: 359 | """Recherche une équipe par son nom""" 360 | cache_key = f"team_search_{team_name}" 361 | cached_data = self._get_cached_data(cache_key) 362 | if cached_data: 363 | return cached_data 364 | 365 | try: 366 | response = requests.get( 367 | f"{self.base_url}/teams", 368 | headers=self.headers, 369 | params={'name': team_name} 370 | ) 371 | response.raise_for_status() 372 | teams = response.json().get('teams', []) 373 | 374 | if teams: 375 | team_data = teams[0] 376 | self._set_cached_data(cache_key, team_data) 377 | return team_data 378 | return None 379 | except Exception as e: 380 | logger.error(f"Erreur lors de la recherche de l'équipe {team_name}: {e}") 381 | return None 382 | 383 | def get_team_matches(self, team_id: int, limit: int = 10) -> List[Dict]: 384 | """Récupère les derniers matchs d'une équipe""" 385 | cache_key = f"team_matches_{team_id}" 386 | cached_data = self._get_cached_data(cache_key) 387 | if cached_data: 388 | return cached_data 389 | 390 | try: 391 | response = requests.get( 392 | f"{self.base_url}/teams/{team_id}/matches", 393 | headers=self.headers, 394 | params={'limit': limit} 395 | ) 396 | response.raise_for_status() 397 | matches = response.json().get('matches', []) 398 | 399 | self._set_cached_data(cache_key, matches) 400 | return matches 401 | except Exception as e: 402 | logger.error(f"Erreur lors de la récupération des matchs de l'équipe {team_id}: {e}") 403 | return [] 404 | 405 | @lru_cache(maxsize=50) 406 | async def get_league_matches(self, league_code: str, season: str = None) -> List[Dict]: 407 | """Récupère les matchs d'une ligue depuis la source appropriée""" 408 | cache_key = f"league_matches_{league_code}_{season}" 409 | 410 | # Vérification du cache 411 | if cache_key in self.matches_cache: 412 | data, timestamp = self.matches_cache[cache_key] 413 | if datetime.now() - timestamp < self.cache_duration: 414 | return data 415 | 416 | league_info = self.leagues.get(league_code) 417 | if not league_info: 418 | logger.error(f"Ligue non trouvée: {league_code}") 419 | return [] 420 | 421 | try: 422 | if league_info['source'] == 'football-data.org': 423 | params = {'season': season} if season else {} 424 | response = await self._make_api_request( 425 | f"{self.base_url}/competitions/{league_code}/matches", 426 | 'football-data.org', 427 | params 428 | ) 429 | if response: 430 | matches_data = response['matches'] 431 | elif league_info['source'] == 'openligadb': 432 | response = await self._make_api_request( 433 | f"{self.openliga_url}/getmatchdata/{league_code}", 434 | 'openligadb' 435 | ) 436 | if response: 437 | matches_data = self._format_openliga_matches(response) 438 | else: 439 | logger.error(f"Source de données non supportée: {league_info['source']}") 440 | return [] 441 | 442 | # Mise en cache 443 | self.matches_cache[cache_key] = (matches_data, datetime.now()) 444 | self._set_cached_data(cache_key, matches_data) 445 | 446 | return matches_data 447 | 448 | except Exception as e: 449 | logger.error(f"Erreur lors de la récupération des matchs de la ligue {league_code}: {e}") 450 | return [] 451 | 452 | def _format_openliga_matches(self, data: List[Dict]) -> List[Dict]: 453 | """Formate les données de matchs d'OpenLigaDB""" 454 | formatted_matches = [] 455 | for match in data: 456 | formatted_match = { 457 | 'id': match.get('MatchID'), 458 | 'utcDate': match.get('MatchDateTime'), 459 | 'status': 'FINISHED' if match.get('MatchIsFinished') else 'SCHEDULED', 460 | 'homeTeam': { 461 | 'id': match.get('Team1', {}).get('TeamId'), 462 | 'name': match.get('Team1', {}).get('TeamName') 463 | }, 464 | 'awayTeam': { 465 | 'id': match.get('Team2', {}).get('TeamId'), 466 | 'name': match.get('Team2', {}).get('TeamName') 467 | }, 468 | 'score': { 469 | 'halfTime': { 470 | 'home': match.get('MatchResults', [{}])[0].get('PointsTeam1', 0), 471 | 'away': match.get('MatchResults', [{}])[0].get('PointsTeam2', 0) 472 | }, 473 | 'fullTime': { 474 | 'home': match.get('MatchResults', [{}])[-1].get('PointsTeam1', 0), 475 | 'away': match.get('MatchResults', [{}])[-1].get('PointsTeam2', 0) 476 | } 477 | } 478 | } 479 | formatted_matches.append(formatted_match) 480 | return formatted_matches 481 | 482 | @lru_cache(maxsize=10) 483 | async def get_league_teams(self, league_code: str) -> List[Dict]: 484 | """Récupère les équipes d'une ligue depuis la source appropriée""" 485 | cache_key = f"league_teams_{league_code}" 486 | 487 | # Vérification du cache 488 | if cache_key in self.teams_cache: 489 | data, timestamp = self.teams_cache[cache_key] 490 | if datetime.now() - timestamp < self.cache_duration: 491 | return data 492 | 493 | league_info = self.leagues.get(league_code) 494 | if not league_info: 495 | logger.error(f"Ligue non trouvée: {league_code}") 496 | return [] 497 | 498 | try: 499 | if league_info['source'] == 'football-data.org': 500 | response = await self._make_api_request( 501 | f"{self.base_url}/competitions/{league_code}/teams", 502 | 'football-data.org' 503 | ) 504 | if response: 505 | teams_data = response['teams'] 506 | elif league_info['source'] == 'openligadb': 507 | response = await self._make_api_request( 508 | f"{self.openliga_url}/getavailableteams/{league_code}", 509 | 'openligadb' 510 | ) 511 | if response: 512 | teams_data = self._format_openliga_teams(response) 513 | else: 514 | logger.error(f"Source de données non supportée: {league_info['source']}") 515 | return [] 516 | 517 | # Mise en cache 518 | self.teams_cache[cache_key] = (teams_data, datetime.now()) 519 | self._set_cached_data(cache_key, teams_data) 520 | 521 | return teams_data 522 | 523 | except Exception as e: 524 | logger.error(f"Erreur lors de la récupération des équipes de la ligue {league_code}: {e}") 525 | return [] 526 | 527 | def _format_openliga_teams(self, data: List[Dict]) -> List[Dict]: 528 | """Formate les données d'équipes d'OpenLigaDB""" 529 | formatted_teams = [] 530 | for team in data: 531 | formatted_team = { 532 | 'id': team.get('TeamId'), 533 | 'name': team.get('TeamName'), 534 | 'shortName': team.get('ShortName'), 535 | 'tla': team.get('TeamIconUrl', '').split('/')[-1].split('.')[0], 536 | 'crestUrl': team.get('TeamIconUrl') 537 | } 538 | formatted_teams.append(formatted_team) 539 | return formatted_teams 540 | 541 | def get_global_stats(self) -> Dict: 542 | """Récupère les statistiques globales""" 543 | cache_key = "global_stats" 544 | 545 | # Vérification du cache 546 | if cache_key in self.cache: 547 | data, timestamp = self.cache[cache_key] 548 | if datetime.now() - timestamp < self.cache_duration: 549 | return data 550 | 551 | try: 552 | stats = { 553 | 'total_matches': 0, 554 | 'accuracy': 0.0, 555 | 'leagues_covered': 5, 556 | 'last_update': datetime.now().strftime("%Y-%m-%d %H:%M:%S") 557 | } 558 | 559 | # Calcul des statistiques globales 560 | leagues = ['PL', 'BL1', 'SA', 'PD', 'FL1'] 561 | total_matches = 0 562 | correct_predictions = 0 563 | 564 | for league in leagues: 565 | matches = self.get_league_matches(league) 566 | total_matches += len(matches) 567 | 568 | # Calcul de la précision (exemple simplifié) 569 | correct_predictions += len(matches) * 0.65 # Estimation 570 | 571 | stats['total_matches'] = total_matches 572 | stats['accuracy'] = (correct_predictions / total_matches * 100) if total_matches > 0 else 0 573 | 574 | # Mise en cache 575 | self.cache[cache_key] = (stats, datetime.now()) 576 | self._set_cached_data(cache_key, stats) 577 | 578 | return stats 579 | 580 | except Exception as e: 581 | logger.error(f"Erreur lors de la récupération des stats globales: {e}") 582 | return { 583 | 'total_matches': 0, 584 | 'accuracy': 0.0, 585 | 'leagues_covered': 0, 586 | 'last_update': datetime.now().strftime("%Y-%m-%d %H:%M:%S") 587 | } 588 | 589 | def prepare_match_data(self, team1: str, team2: str) -> Optional[Tuple[Dict, Dict]]: 590 | """Prépare les données pour la prédiction d'un match""" 591 | team1_data = self.search_team(team1) 592 | team2_data = self.search_team(team2) 593 | 594 | if not team1_data or not team2_data: 595 | return None 596 | 597 | team1_stats = self.get_team_stats(team1_data['id']) 598 | team2_stats = self.get_team_stats(team2_data['id']) 599 | 600 | return (team1_stats, team2_stats) 601 | 602 | @lru_cache(maxsize=1000) 603 | async def get_team_stats(self, team_id: int) -> Dict: 604 | """Récupère les statistiques d'une équipe avec cache multi-niveaux""" 605 | cache_key = f"team_stats_{team_id}" 606 | 607 | # 1. Vérification du cache en mémoire 608 | if cache_key in self.stats_cache: 609 | data, timestamp = self.stats_cache[cache_key] 610 | if datetime.now() - timestamp < self.cache_duration: 611 | return data 612 | 613 | # 2. Vérification du cache fichier 614 | cached_data = self._load_from_file_cache(cache_key) 615 | if cached_data: 616 | self.stats_cache[cache_key] = (cached_data, datetime.now()) 617 | return cached_data 618 | 619 | try: 620 | # Récupération parallèle des données 621 | team_data_task = self._make_api_request(f"{self.base_url}/teams/{team_id}", 'football-data.org') 622 | matches_task = self._make_api_request( 623 | f"{self.base_url}/teams/{team_id}/matches", 624 | 'football-data.org', 625 | params={'limit': 50} 626 | ) 627 | 628 | team_data, matches = await asyncio.gather(team_data_task, matches_task) 629 | 630 | if not team_data or not matches: 631 | return {} 632 | 633 | # Traitement des données avec parallélisation 634 | stats = { 635 | 'id': team_id, 636 | 'name': team_data['name'], 637 | 'goals_scored': 0, 638 | 'goals_conceded': 0, 639 | 'wins': 0, 640 | 'draws': 0, 641 | 'losses': 0, 642 | 'form': [], 643 | 'ht_goals_scored': 0, 644 | 'ht_goals_conceded': 0, 645 | 'st_goals_scored': 0, 646 | 'st_goals_conceded': 0, 647 | 'recent_form': [], 648 | 'momentum': 0, 649 | 'home_performance': 0, 650 | 'away_performance': 0, 651 | 'clean_sheets': 0, 652 | 'failed_to_score': 0, 653 | 'avg_goals_scored': 0, 654 | 'avg_goals_conceded': 0, 655 | 'last_5_matches': [], 656 | 'advanced_stats': { 657 | 'xG': 0, # Expected Goals 658 | 'xGA': 0, # Expected Goals Against 659 | 'possession': 0, # Possession moyenne 660 | 'shots_on_target': 0, 661 | 'shots_conceded': 0, 662 | 'pass_accuracy': 0, 663 | 'fouls_committed': 0, 664 | 'fouls_suffered': 0 665 | } 666 | } 667 | 668 | # Traitement parallèle des matchs 669 | loop = asyncio.get_event_loop() 670 | match_stats = await loop.run_in_executor( 671 | self.thread_pool, 672 | self._process_matches, 673 | matches.get('matches', []), 674 | team_id 675 | ) 676 | 677 | # Mise à jour des statistiques 678 | stats.update(match_stats) 679 | 680 | # Calcul des statistiques avancées 681 | stats['advanced_stats'] = self._calculate_advanced_stats(matches.get('matches', []), team_id) 682 | 683 | # Mise en cache multi-niveaux 684 | self.stats_cache[cache_key] = (stats, datetime.now()) 685 | self._set_cached_data(cache_key, stats) 686 | 687 | return stats 688 | 689 | except Exception as e: 690 | logger.error(f"Erreur lors de la récupération des stats de l'équipe {team_id}: {e}") 691 | return {} 692 | 693 | def _process_matches(self, matches: List[Dict], team_id: int) -> Dict: 694 | """Traite les matchs en parallèle""" 695 | stats = { 696 | 'goals_scored': 0, 697 | 'goals_conceded': 0, 698 | 'wins': 0, 699 | 'draws': 0, 700 | 'losses': 0, 701 | 'form': [], 702 | 'ht_goals_scored': 0, 703 | 'ht_goals_conceded': 0, 704 | 'st_goals_scored': 0, 705 | 'st_goals_conceded': 0, 706 | 'clean_sheets': 0, 707 | 'failed_to_score': 0, 708 | 'home_performance': 0, 709 | 'away_performance': 0, 710 | 'last_5_matches': [] 711 | } 712 | 713 | for match in matches: 714 | if match['status'] == 'FINISHED': 715 | is_home = match['homeTeam']['id'] == team_id 716 | goals_for = match['score']['fullTime']['home'] if is_home else match['score']['fullTime']['away'] 717 | goals_against = match['score']['fullTime']['away'] if is_home else match['score']['fullTime']['home'] 718 | ht_goals_for = match['score']['halfTime']['home'] if is_home else match['score']['halfTime']['away'] 719 | ht_goals_against = match['score']['halfTime']['away'] if is_home else match['score']['halfTime']['home'] 720 | 721 | # Mise à jour des statistiques 722 | stats['goals_scored'] += goals_for 723 | stats['goals_conceded'] += goals_against 724 | stats['ht_goals_scored'] += ht_goals_for 725 | stats['ht_goals_conceded'] += ht_goals_against 726 | stats['st_goals_scored'] += (goals_for - ht_goals_for) 727 | stats['st_goals_conceded'] += (goals_against - ht_goals_against) 728 | 729 | if goals_for > goals_against: 730 | stats['wins'] += 1 731 | stats['form'].append('W') 732 | elif goals_for == goals_against: 733 | stats['draws'] += 1 734 | stats['form'].append('D') 735 | else: 736 | stats['losses'] += 1 737 | stats['form'].append('L') 738 | 739 | if goals_against == 0: 740 | stats['clean_sheets'] += 1 741 | if goals_for == 0: 742 | stats['failed_to_score'] += 1 743 | 744 | if is_home: 745 | stats['home_performance'] += (goals_for - goals_against) 746 | else: 747 | stats['away_performance'] += (goals_for - goals_against) 748 | 749 | if len(stats['last_5_matches']) < 5: 750 | stats['last_5_matches'].append({ 751 | 'date': match['utcDate'], 752 | 'opponent': match['awayTeam']['name'] if is_home else match['homeTeam']['name'], 753 | 'score': f"{goals_for}-{goals_against}", 754 | 'is_home': is_home 755 | }) 756 | 757 | # Calcul des moyennes 758 | total_matches = len(matches) 759 | if total_matches > 0: 760 | stats['avg_goals_scored'] = stats['goals_scored'] / total_matches 761 | stats['avg_goals_conceded'] = stats['goals_conceded'] / total_matches 762 | 763 | # Calcul du momentum 764 | recent_matches = stats['form'][-5:] if stats['form'] else [] 765 | momentum = sum(1 if r == 'W' else 0.5 if r == 'D' else 0 for r in recent_matches) 766 | stats['momentum'] = momentum / 5 if recent_matches else 0 767 | 768 | return stats 769 | 770 | def _calculate_advanced_stats(self, matches: List[Dict], team_id: int) -> Dict: 771 | """Calcule les statistiques avancées""" 772 | advanced_stats = { 773 | 'xG': 0, 774 | 'xGA': 0, 775 | 'possession': 0, 776 | 'shots_on_target': 0, 777 | 'shots_conceded': 0, 778 | 'pass_accuracy': 0, 779 | 'fouls_committed': 0, 780 | 'fouls_suffered': 0 781 | } 782 | 783 | total_matches = len(matches) 784 | if total_matches == 0: 785 | return advanced_stats 786 | 787 | for match in matches: 788 | if match['status'] == 'FINISHED': 789 | is_home = match['homeTeam']['id'] == team_id 790 | stats = match.get('stats', {}) 791 | 792 | # Calcul des statistiques avancées 793 | for stat in stats: 794 | if stat['team']['id'] == team_id: 795 | if stat['type'] == 'Shots on Goal': 796 | advanced_stats['shots_on_target'] += stat['value'] 797 | elif stat['type'] == 'Ball Possession': 798 | advanced_stats['possession'] += float(stat['value'].strip('%')) 799 | elif stat['type'] == 'Pass Accuracy': 800 | advanced_stats['pass_accuracy'] += float(stat['value'].strip('%')) 801 | elif stat['type'] == 'Fouls': 802 | advanced_stats['fouls_committed'] += stat['value'] 803 | else: 804 | if stat['type'] == 'Shots on Goal': 805 | advanced_stats['shots_conceded'] += stat['value'] 806 | elif stat['type'] == 'Fouls': 807 | advanced_stats['fouls_suffered'] += stat['value'] 808 | 809 | # Calcul des moyennes 810 | advanced_stats['possession'] /= total_matches 811 | advanced_stats['pass_accuracy'] /= total_matches 812 | advanced_stats['shots_on_target'] /= total_matches 813 | advanced_stats['shots_conceded'] /= total_matches 814 | advanced_stats['fouls_committed'] /= total_matches 815 | advanced_stats['fouls_suffered'] /= total_matches 816 | 817 | return advanced_stats 818 | 819 | # Initialisation 820 | data_manager = DataManager(api_key="votre_clé") 821 | 822 | # Récupération des matchs en cours 823 | live_matches = await data_manager.get_live_matches() 824 | 825 | # Récupération des prochains matchs 826 | upcoming_matches = await data_manager.get_upcoming_matches(hours=24) 827 | 828 | # Arrêt propre 829 | data_manager.shutdown() --------------------------------------------------------------------------------