├── .gitattributes ├── helpers ├── os_utils.py ├── __pycache__ │ ├── os_utils.cpython-39.pyc │ ├── os_utils.cpython-310.pyc │ ├── os_utils.cpython-311.pyc │ ├── parameters.cpython-39.pyc │ ├── handle_creds.cpython-310.pyc │ ├── handle_creds.cpython-311.pyc │ ├── handle_creds.cpython-39.pyc │ ├── parameters.cpython-310.pyc │ └── parameters.cpython-311.pyc ├── parameters.py └── handle_creds.py ├── signals └── readme.md ├── requirements.txt ├── creds.example.yml ├── test_net_creds.example.yml ├── Boot.spec ├── LICENSE ├── api_test.py ├── progressbar.py ├── README.md ├── Instalacion en android.txt ├── analisis_test_trades_estadisticas.py ├── megatronmod_strategy.py ├── languages_bot.py ├── config.yml ├── megatronmod.py ├── analisis_test_trades.py └── megatronmod_functions.py /.gitattributes: -------------------------------------------------------------------------------- 1 | *.exe filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /helpers/os_utils.py: -------------------------------------------------------------------------------- 1 | def rchop(s, suffix): 2 | if suffix and s.endswith(suffix): 3 | return s[:-len(suffix)] 4 | return s -------------------------------------------------------------------------------- /helpers/__pycache__/os_utils.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/os_utils.cpython-39.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/os_utils.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/os_utils.cpython-310.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/os_utils.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/os_utils.cpython-311.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/parameters.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/parameters.cpython-39.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/handle_creds.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/handle_creds.cpython-310.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/handle_creds.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/handle_creds.cpython-311.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/handle_creds.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/handle_creds.cpython-39.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/parameters.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/parameters.cpython-310.pyc -------------------------------------------------------------------------------- /helpers/__pycache__/parameters.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pantersxx3/binancebot/HEAD/helpers/__pycache__/parameters.cpython-311.pyc -------------------------------------------------------------------------------- /signals/readme.md: -------------------------------------------------------------------------------- 1 | In this folder goes filenames with extention .exs 2 | exs files are lists of pairs to be traded. 3 | ex module1.exs: 4 | ETHUSDT 5 | BTCUSDT -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | art 2 | ccxt 3 | python-binance 4 | tradingview_ta 5 | pandas 6 | pandas_ta 7 | requests 8 | colorama 9 | prettytable 10 | plotly 11 | telnetlib3 12 | ta 13 | pywin32 14 | pyyaml 15 | bokeh -------------------------------------------------------------------------------- /creds.example.yml: -------------------------------------------------------------------------------- 1 | # Ensure your keys and values have a space between them 2 | # Example: 3 | # Bad: 4 | # access_key:myaccess_key 5 | # Good: 6 | # access_key: myaccess_key 7 | 8 | 9 | prod: 10 | access_key: replace_me 11 | secret_key: replace_me 12 | discord: 13 | DISCORD_WEBHOOK: https://discord.com/api/webhooks/XXX/YYYYY 14 | -------------------------------------------------------------------------------- /test_net_creds.example.yml: -------------------------------------------------------------------------------- 1 | # Ensure your keys and values have a space between them 2 | # Example: 3 | # Bad: 4 | # access_key:myaccess_key 5 | # Good: 6 | # access_key: myaccess_key 7 | #https://testnet.binance.vision/ 8 | 9 | prod: 10 | access_key: JnL1Xef6QXNhebEtxV3TGRb3qeA05hA2XKNUpf2bDlJHOY9KFnhjjLZmKPvLjUPH 11 | secret_key: MrHoCidzYgtcRiMXhx13eC9SNMMWwKiYXT5fO3Hs71uFpUH9Jr07sbsLXANobyKx -------------------------------------------------------------------------------- /Boot.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | a = Analysis( 5 | ['Boot.py'], 6 | pathex=[], 7 | binaries=[], 8 | datas=[('config.yml', '.'), ('creds.yml', '.'), ('test_net_creds.yml', '.'), ('megatronmod_strategy.py', '.')], 9 | hiddenimports=['talib.stream'], 10 | hookspath=[], 11 | hooksconfig={}, 12 | runtime_hooks=[], 13 | excludes=[], 14 | noarchive=False, 15 | optimize=0, 16 | ) 17 | pyz = PYZ(a.pure) 18 | 19 | exe = EXE( 20 | pyz, 21 | a.scripts, 22 | a.binaries, 23 | a.datas, 24 | [], 25 | name='Boot', 26 | debug=False, 27 | bootloader_ignore_signals=False, 28 | strip=False, 29 | upx=True, 30 | upx_exclude=[], 31 | runtime_tmpdir=None, 32 | console=True, 33 | disable_windowed_traceback=False, 34 | argv_emulation=False, 35 | target_arch=None, 36 | codesign_identity=None, 37 | entitlements_file=None, 38 | ) 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 CyberPunkMetalHead 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /api_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | import pandas as pd 5 | import asyncio 6 | from binance import Client, AsyncClient, BinanceSocketManager 7 | from binance.exceptions import BinanceAPIException 8 | from pprint import pformat 9 | from binance.helpers import round_step_size 10 | from binance.enums import * 11 | TEST_API_KEY = 'LeDeK6MTfC5fHuctvxhEBGEr99afbXOlLxx9vfudt6Dpd6Sb9ZEvnwSGFbznzEr6' 12 | TEST_API_SECRET = 'LS1Ic3SNpoemeDVHeSJ6iQ4BnIrbBNw9y8SjcasBLerUrwRBAEKse8ub5STaoL6F' 13 | # Instantiate Binance API client TESTNET US Market 14 | client = Client(api_key=TEST_API_KEY, api_secret=TEST_API_SECRET, testnet=True, tld='us') 15 | 16 | 17 | def get_info(coin, file): 18 | info = client.get_symbol_info(coin) 19 | with open(file, "a") as f: 20 | f.write(str(info) + '\n') 21 | return info 22 | #step_size1 = "" #info['stepSize'] 23 | #step_size2 = info['filters'][2] 24 | #print("step_size1", step_size1, "step_size2", step_size2) 25 | #lot_size[coin] = step_size.index('1') - 1 26 | 27 | #for filt in info['filters']: 28 | #if filt['filterType'] == 'LOT_SIZE': 29 | #lot_size = int(filt['stepSize'].find('1') - 1) 30 | #print("lot_size", lot_size) 31 | #break -------------------------------------------------------------------------------- /helpers/parameters.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import argparse 3 | 4 | 5 | def load_config(file): 6 | try: 7 | 8 | with open(file, 'r') as file: 9 | return yaml.load(file, Loader=yaml.FullLoader) 10 | except FileNotFoundError as fe: 11 | exit(f'Could not find {file}') 12 | 13 | except Exception as e: 14 | exit(f'Encountered exception...\n {e}') 15 | 16 | def save_config(file1, data): 17 | try: 18 | 19 | with open(file1, 'w') as file: 20 | return yaml.dump(data, file) 21 | except FileNotFoundError as fe: 22 | exit(f'Could not find {file}') 23 | 24 | except Exception as e: 25 | exit(f'Encountered exception...\n {e}') 26 | 27 | def parse_args(): 28 | x = argparse.ArgumentParser() 29 | x.add_argument('--debug', '-d', help="extra logging", action='store_true') 30 | x.add_argument('--config', '-c', help="Path to config.yml") 31 | x.add_argument('--creds', '-u', help="Path to creds file") 32 | x.add_argument('--notimeout', help="Dont use timeout in prod", action="store_true") 33 | x.add_argument('--test', '-t', help="Test mode") 34 | #x.add_argument('--makevlist', '-m', help="Create a Volatile List", action="store_true") 35 | return x.parse_args() -------------------------------------------------------------------------------- /progressbar.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | def progressBar(iterable, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"): 4 | """ 5 | Call in a loop to create terminal progress bar 6 | @params: 7 | iterable - Required : iterable object (Iterable) 8 | prefix - Optional : prefix string (Str) 9 | suffix - Optional : suffix string (Str) 10 | decimals - Optional : positive number of decimals in percent complete (Int) 11 | length - Optional : character length of bar (Int) 12 | fill - Optional : bar fill character (Str) 13 | printEnd - Optional : end character (e.g. "\r", "\r\n") (Str) 14 | """ 15 | total = len(iterable) 16 | # Progress Bar Printing Function 17 | def printProgressBar (iteration): 18 | percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) 19 | filledLength = int(length * iteration // total) 20 | bar = fill * filledLength + '-' * (length - filledLength) 21 | print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd) 22 | # Initial Call 23 | printProgressBar(0) 24 | # Update Progress Bar 25 | for i, item in enumerate(iterable): 26 | yield item 27 | printProgressBar(i + 1) 28 | # Print New Line on Complete 29 | print() 30 | 31 | def set_progress_bar(message, l, whait) : 32 | # A List of Items 33 | items = list(range(0, 57)) 34 | 35 | # A Nicer, Single-Call Usage 36 | for item in progressBar(items, prefix = message, suffix = '', length = l): 37 | # Do stuff... 38 | time.sleep(whait/l) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | image 2 | 3 | image 4 | 5 | 6 | "# binancebot" 7 | 8 | #Progress has been made with the possibility of operating in backtest mode. 9 | #Progress will be made in being able to create a place where results and strategies can be shared. 10 | #There is a lot to do and many things can still be improved, I need collaborators who want to help improve this project and so that we can all earn, not only coins but experience. 11 | #The bot can run in 3 ways: 12 | 13 | > Online, with cryptocurrencies that we have in our account. 14 | 15 | > Online on Tesnet, we can test the bot without using our funds. 16 | 17 | > Backtest, we can test in a period of time how our chosen strategy works, depending on the chosen period it will be fully processed in a few minutes. 18 | 19 | INFORMATION 20 | ------------ 21 | Pressing Crtl + D will display the menu with options. 22 | 23 | > megatronmod.py It is the main module of the bot, it can be executed as a signal or as another function depending on what you need. 24 | 25 | > megatronmod_functions.py Contains all functions and indicators used by MegratronMod. 26 | 27 | > megatronmod_strategy.py Here the strategy that the bot will use is established. 28 | 29 | > languages_bot.py There are the translations of the bot into Spanish (partially) for now. 30 | 31 | > config.yml There are all the bot configurations. 32 | 33 | > tickers.txt In this file saved the quote cryptocurrencies and the base crypto in the configuration file. 34 | 35 | > The volatile list file will change depending on the day and time of creation, I recommend not using such a large list (this is created from the configuration file) because it will make the bot slow. 36 | 37 | LAST CLARIFICATION 38 | ------------------- 39 | I have been carrying out this project for more than 3 years, and although it disappointed more than one, I must say that there is no infallible bot. There is no bot that works as a fixed term, there are great limitations just by using trading indicators since these do not predict the future. Obviously, technology will advance, it will be able to come close, but it will not be infallible either. 40 | 41 | I'm tired, I've been working alone on this project for a long time.... 42 | 43 | For install ta-lib: https://github.com/cgohlke/talib-build/releases 44 | 45 | 46 | -------------------------------------------------------------------------------- /helpers/handle_creds.py: -------------------------------------------------------------------------------- 1 | from sys import exit 2 | 3 | 4 | def load_correct_creds(creds): 5 | try: 6 | 7 | return creds['prod']['access_key'], creds['prod']['secret_key'] 8 | 9 | except TypeError as te: 10 | message = 'Your credentials are formatted incorectly\n' 11 | message += f'TypeError:Exception:\n\t{str(te)}' 12 | exit(message) 13 | except Exception as e: 14 | message = 'oopsies, looks like you did something real bad. Fallback Exception caught...\n' 15 | message += f'Exception:\n\t{str(e)}' 16 | exit(message) 17 | 18 | def load_discord_creds(creds): 19 | return creds['discord']['DISCORD_WEBHOOK'] 20 | 21 | def test_api_key(client, BinanceAPIException): 22 | """Checks to see if API keys supplied returns errors 23 | 24 | Args: 25 | client (class): binance client class 26 | BinanceAPIException (clas): binance exeptions class 27 | 28 | Returns: 29 | bool | msg: true/false depending on success, and message 30 | """ 31 | try: 32 | client.get_account() 33 | return True, "API key validated succesfully" 34 | 35 | except BinanceAPIException as e: 36 | 37 | 38 | if e.code in [-2015,-2014]: 39 | bad_key = "Your API key is not formatted correctly..." 40 | america = "If you are in america, you will have to update the config to set AMERICAN_USER: True" 41 | ip_b = "If you set an IP block on your keys make sure this IP address is allowed. check ipinfo.io/ip" 42 | 43 | msg = f"Your API key is either incorrect, IP blocked, or incorrect tld/permissons...\n most likely: {bad_key}\n {america}\n {ip_b}" 44 | 45 | elif e.code == -2021: 46 | issue = "https://github.com/CyberPunkMetalHead/Binance-volatility-trading-bot/issues/28" 47 | desc = "Ensure your OS is time synced with a timeserver. See issue." 48 | msg = f"Timestamp for this request was 1000ms ahead of the server's time.\n {issue}\n {desc}" 49 | elif e.code == -1021: 50 | desc = "Your operating system time is not properly synced... Please sync ntp time with 'pool.ntp.org'" 51 | msg = f"{desc}\nmaybe try this:\n\tsudo ntpdate pool.ntp.org" 52 | else: 53 | msg = "Encountered an API Error code that was not caught nicely, please open issue...\n" 54 | msg += str(e) 55 | 56 | return False, msg 57 | 58 | except Exception as e: 59 | return False, f"Fallback exception occured:\n{e}" 60 | 61 | -------------------------------------------------------------------------------- /Instalacion en android.txt: -------------------------------------------------------------------------------- 1 | INSTALACION EN ANDROID 2 | 3 | ADVERTENCIA: Recuerde tener los datos activados y una buena señal, si esta es inestable o se pierde la coneccion, el bot perdera informacion y por ende 4 | perdera datos de compra y venta. 5 | 6 | Esta guia es para versiones de android 7 en adelante....(Termux no es compatible con version anteriores a android 7) 7 | Para ejecutar nuestro bot en android es necesario contar con la aplicacion Termux, para ello vamos a descargar la apk de f-droid(la version de Google Play no funciona) 8 | https://f-droid.org/packages/com.termux/ 9 | 10 | aca dejo el enlace directo a la ultima version(22/09/2021) 11 | https://f-droid.org/repo/com.termux_117.apk 12 | 13 | una ves descargado lo instalamos, capaz sea necesario habilitar la opcion en android "Fuentes desconocidas"(esto no representa ningun riesgo para el celular) 14 | Abrir la app instalada y comenzamos actualizando los repositorios, e instalando el paquete necesarios: 15 | 16 | pkg update & pkg upgrade 17 | pkg install openssh net-tools procps nano wget git python 18 | 19 | update (actualiza repositorios) 20 | upgrade (actualiza paquetes) 21 | openssh (acceso remoto) 22 | net-tools (averiguar IP) 23 | procps (matar procesos) 24 | nano (para editar archivos) 25 | wget (para descargar archivos de internet) 26 | git (para descargar el fork) 27 | python (lenguaje base del bot) 28 | 29 | Iniciar y detener demonio SSH 30 | Para iniciar el demonio, es decir, escuchar conexiones, se ejecuta(Nota: se escucharán conexiones en el puerto 8022): 31 | sshd 32 | 33 | Para matarlo o detenerlo, se usa: 34 | pkill sshd 35 | 36 | Ya estamos escuchando conexiones SSH, pero todavía no sabemos nuestro usuario, contraseña e IP. 37 | Averigua tu usuario escribiendo: 38 | whoami 39 | 40 | Ahora ponle una contraseña segura ejecutando: 41 | passwd 42 | 43 | Finalmente mira cuál IP tienes, escribiendo: 44 | ifconfig 45 | 46 | Cuando tengas esos datos ya puedes conectarte desde otro lugar. Si usas linux en el terminal cliente ejecuta: 47 | ssh USUARIO@TU_IP -p 8022 48 | 49 | Desde windows se puede utilizar Putty https://www.putty.org/ o instalar ssh para windows https://www.openssh.com/portable.html 50 | 51 | clonaremos el repositorio del bot de la siguiente manera en termux 52 | git clone https://github.com/pantersxx3/Binance-Bot.git 53 | 54 | luego escribimos: 55 | cd Binance-Bot 56 | 57 | Actulizamos los paquetes necesarios para ejecutar el bot de la siguiente manera 58 | pip3.9 install --upgrade pip 59 | pip3.9 install -r requirements.txt 60 | 61 | 62 | Para actualizar periodicamente el bot ejecutamos el comando: 63 | git pull 64 | -------------------------------------------------------------------------------- /analisis_test_trades_estadisticas.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import ta 3 | 4 | # === Funciones que ya tenías === 5 | def Calculate_Market_Direction(data, adx_period=20, adx_threshold=20): 6 | adx = ta.trend.ADXIndicator( 7 | high=data["High"], low=data["Low"], close=data["Close"], window=adx_period 8 | ).adx().iloc[-1] 9 | di_plus = ta.trend.ADXIndicator( 10 | high=data["High"], low=data["Low"], close=data["Close"], window=adx_period 11 | ).adx_pos().iloc[-1] 12 | di_minus = ta.trend.ADXIndicator( 13 | high=data["High"], low=data["Low"], close=data["Close"], window=adx_period 14 | ).adx_neg().iloc[-1] 15 | 16 | if adx > adx_threshold: 17 | if di_plus > di_minus: 18 | return "alcista" 19 | elif di_minus > di_plus: 20 | return "bajista" 21 | return "sin_tendencia" 22 | 23 | def Atr_Normalized(data, atr_period=20): 24 | atr = ta.volatility.AverageTrueRange( 25 | high=data["High"], low=data["Low"], close=data["Close"], window=atr_period 26 | ).average_true_range().iloc[-1] 27 | return atr / data["Close"].iloc[-1] 28 | 29 | def Check_Volume(data, window=20, umbral=1.2): 30 | volumen_actual = data["Volume"].iloc[-1] 31 | volumen_promedio = data["Volume"].rolling(window).mean().iloc[-1] 32 | return bool(volumen_actual > umbral * volumen_promedio) 33 | 34 | def Check_Consolidation(data, adx_threshold=20, atr_threshold=0.003): 35 | adx = ta.trend.ADXIndicator( 36 | high=data["High"], low=data["Low"], close=data["Close"], window=20 37 | ).adx().iloc[-1] 38 | atr = Atr_Normalized(data, 20) 39 | return adx < adx_threshold and atr < atr_threshold 40 | 41 | def Detect_Market_Type(data, umbral_alto_volatilidad=0.01): 42 | tendencia = Calculate_Market_Direction(data, 20, 20) 43 | volatilidad = Atr_Normalized(data, 20) 44 | consolidacion = Check_Consolidation(data, 20, 0.003) 45 | volumen_alto = Check_Volume(data, 20, 1.2) 46 | 47 | if tendencia == "alcista" and volumen_alto: 48 | return "tendencia_alcista_confirmada" 49 | elif tendencia == "bajista" and volumen_alto: 50 | return "tendencia_bajista_confirmada" 51 | elif consolidacion and not volumen_alto: 52 | return "consolidacion_confirmado" 53 | elif volatilidad > umbral_alto_volatilidad: 54 | return "volatil_confirmado" 55 | else: 56 | return "rango_lateral_confirmado" 57 | 58 | 59 | # === Función para agregar indicadores al DataFrame === 60 | def add_indicators(df): 61 | # RSI 62 | df["RSI"] = ta.momentum.RSIIndicator(df["Close"], window=14).rsi() 63 | 64 | # MACD 65 | macd = ta.trend.MACD(df["Close"]) 66 | df["MACD"] = macd.macd() 67 | df["MACD_signal"] = macd.macd_signal() 68 | 69 | # EMAs 70 | df["EMA20"] = df["Close"].ewm(span=20).mean() 71 | df["EMA50"] = df["Close"].ewm(span=50).mean() 72 | df["EMA200"] = df["Close"].ewm(span=200).mean() 73 | 74 | # ATR normalizado 75 | atr = ta.volatility.AverageTrueRange( 76 | high=df["High"], low=df["Low"], close=df["Close"], window=20 77 | ) 78 | df["ATR_norm"] = atr.average_true_range() / df["Close"] 79 | 80 | return df 81 | 82 | 83 | # === Función para etiquetar el tipo de mercado en todo el histórico === 84 | def label_market_types(df): 85 | labels = [] 86 | for i in range(len(df)): 87 | sub = df.iloc[:i+1] # dataset hasta la vela actual 88 | if len(sub) < 50: # esperar al menos 50 velas para cálculos 89 | labels.append(None) 90 | continue 91 | labels.append(Detect_Market_Type(sub)) 92 | df["market_type"] = labels 93 | return df 94 | 95 | 96 | # === Función principal de análisis === 97 | def analyze_market_statistics(df): 98 | df = add_indicators(df) 99 | df = label_market_types(df) 100 | 101 | stats = df.groupby("market_type")[["RSI", "MACD", "ATR_norm", "EMA20", "EMA50", "EMA200"]].agg( 102 | ["mean", "min", "max", "std"] 103 | ) 104 | 105 | return stats 106 | 107 | 108 | # === Uso === 109 | # Suponiendo que df es tu DataFrame con columnas: ["Open","High","Low","Close","Volume"] 110 | # Ejemplo: 111 | df = pd.read_csv("BTCUSDT.csv") # tu dataset de 1 año 112 | stats = analyze_market_statistics(df) 113 | print(stats) 114 | -------------------------------------------------------------------------------- /megatronmod_strategy.py: -------------------------------------------------------------------------------- 1 | # Megatronmod Strategy - All in One 2 | # Created by: Horacio Oscar Fanelli - Pantersxx3 and NokerPlay 3 | # This mod can be used only with: 4 | # https://github.com/pantersxx3/Binance-Bot 5 | # 6 | # No future support offered, use this script at own risk - test before using real funds 7 | # If you lose money using this MOD (and you will at some point) you've only got yourself to blame! 8 | 9 | #Inficators avaibles: 10 | #MF.Crossover, MF.Crossunder, MF.Cross, MF.Ichimoku, MF.Bollinger MF.Bands, MF.Supertrend, MF.Momentum, MF.Hikinashi 11 | #MF.Macd, MF.Cci, MF.SL, MF.TP, MF.Bought_at, MF.Zigzag, MF.Ema, MF.Sma, MF.Stochastic, MF.Rsi, MF.Wma, MF.Hma 12 | 13 | import megatronmod_functions as MF 14 | from datetime import datetime 15 | import sys 16 | import os 17 | import random 18 | 19 | def buy(Data, CLOSE, pair): 20 | try: 21 | # HOST = "localhost" 22 | # PORT = 10000 23 | # tn = telnetlib.Telnet(HOST, PORT) 24 | 25 | buySignal = False 26 | # H, L = MF.Pivots_Hl(Data) 27 | # buySignal = MF.Ema(Data, 9) < MF.Ema(Data, 21) and MF.Rsi(Data, 14) < 30 and CLOSE < MF.Ema(Data, 21) and MF.Macd_Ind(Data, 12, 26, 9) < -1.05 28 | # buySignal = MF.Rsi(Data, 14) < 30 and MF.check_volume(Data) 29 | # buySignal = MF.Rsi(Data, 14) < 30 and MF.check_volume(Data) and MF.Ema(Data, 50) < MF.Ema(Data, 200) 30 | # buySignal = MF.Rsi(Data, 14) < 30 and MF.check_volume(Data) and MF.Macd_Ind(Data, 12, 26, 9) < -1.5 and MF.Ema(Data, 50) < MF.Ema(Data, 200) 31 | # buySignal = MF.Rsi(Data, 14) < 30 and MF.check_volume(Data) 32 | valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta = MF.calcular_rangos_dinamicos_macd_rsi(Data, 14, 12, 26, 9, 70, 30, 30, 30) 33 | #print(valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta) 34 | values = MF.guardar_rangos_dinamicos(pair, 15, valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta) 35 | valor_rsi_sobrecompra = values["rsi_over"] 36 | #valor_rsi_sobreventa = values["rsi_under"] 37 | valor_macd_buy = values["macd_buy"] 38 | #valor_macd_venta = values["macd_sell"] 39 | buySignal = MF.Rsi(Data, 14) < valor_rsi_sobrecompra and MF.Check_Volume(Data) and MF.Macd_Ind(Data, 12, 26, 9) < valor_macd_buy and MF.Ema(Data, 50) < MF.Ema(Data, 200) and MF.Low_Volatility(Data, CLOSE) 40 | # buySignal = MF.Ema(Data, 9) < MF.Ema(Data, 21) and MF.Rsi(Data, 14) < 30 and CLOSE < MF.Ema(Data, 21) 41 | # buySignal = MF.spread_strategy(0.01, 0.07, Data) == 1 and CLOSE > MF.Ema(Data, 200) and (CLOSE < MF.B(Data) or CLOSE <= round(MF.B(Data) - ((0.1 * MF.B(Data))/100), 5)) 42 | # buySignal = MF.Rsi(Data, 14) < 50 and L <= CLOSE and MF.Macd_Ind(Data, 12, 26, 9) < -0.95 43 | except Exception as e: 44 | exc_type, exc_obj, exc_tb = sys.exc_info() 45 | print(e) 46 | print('Buy Error on line ' + str(exc_tb.tb_lineno)) 47 | pass 48 | return buySignal 49 | 50 | 51 | def sell(Data, CLOSE, pair): 52 | try: 53 | sellSignal = False 54 | B = MF.Bought_at(pair) 55 | # sellSignal = MF.Ema(Data, 9) > MF.Ema(Data, 21) and MF.Rsi(Data, 14) > 70 and CLOSE > MF.Ema(Data, 21)and MF.Macd_Ind(Data, 12, 26, 9) > 0.85 56 | # sellSignal = MF.Rsi(Data, 14) > 70 and MF.check_volume(Data) and MF.Ema(Data, 50) > MF.Ema(Data, 200) 57 | # sellSignal = bool(MF.Rsi(Data, 14) > 70 and MF.check_volume(Data) and MF.Macd_Ind(Data, 12, 26, 9) > 0.85 and MF.Ema(Data, 50) > MF.Ema(Data, 200)) or MF.Sl(pair, CLOSE, 4) 58 | # sellSignal = MF.Rsi(Data, 14) > 70 and MF.check_volume(Data) or MF.Sl(pair, CLOSE, 4) 59 | # sellSignal = MF.Rsi(Data, 14) > 70 and MF.check_volume(Data) and MF.Macd_Ind(Data, 12, 26, 9) > 0.85 and MF.Ema(Data, 50) > MF.Ema(Data, 200) and MF.low_volatility(Data, CLOSE) or MF.Sl(pair, CLOSE, 4) 60 | valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta = MF.calcular_rangos_dinamicos_macd_rsi(Data, 14, 12, 26, 9, 70, 30, 30, 30) 61 | #print(valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta) 62 | values = MF.guardar_rangos_dinamicos(pair, 15, valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta) 63 | #valor_rsi_sobrecompra = values["rsi_over"] 64 | valor_rsi_sobreventa = values["rsi_under"] 65 | #valor_macd_buy = values["macd_buy"] 66 | valor_macd_venta = values["macd_sell"] 67 | sellSignal = MF.Rsi(Data, 14) > valor_rsi_sobreventa and MF.Check_Volume(Data) and MF.Macd_Ind(Data, 12, 26, 9) > valor_macd_venta and MF.Ema(Data, 50) > MF.Ema(Data, 200) and MF.Low_Volatility(Data, CLOSE) 68 | # H, L = MF.Pivots_Hl(Data) 69 | # sellSignal = MF.Rsi(Data, 14) > 50 and H >= CLOSE and MF.Macd_Ind(Data, 12, 26, 9) > 1 or MF.Sl(pair, CLOSE, 1) 70 | except Exception as e: 71 | print(e) 72 | exc_type, exc_obj, exc_tb = sys.exc_info() 73 | print('Sell Error on line ' + str(exc_tb.tb_lineno)) 74 | pass 75 | return sellSignal 76 | -------------------------------------------------------------------------------- /languages_bot.py: -------------------------------------------------------------------------------- 1 | MSG1 = { 2 | "es" : "Excepción en función", 3 | "en" : "Exception in function" 4 | } 5 | 6 | MSG2 = { 7 | "es" : "Error en linea", 8 | "en" : "Error on line" 9 | } 10 | 11 | MSG3 = { 12 | "es" : "Descargando datos", 13 | "en" : "Downloading data" 14 | } 15 | 16 | MSG4 = { 17 | "es" : "Proceso Finalizado", 18 | "en" : "Ended process" 19 | } 20 | 21 | MSG5 = { 22 | "es" : "BOT", 23 | "en" : "BOT" 24 | } 25 | 26 | MSG6 = { 27 | "es" : "Fecha de inicio", 28 | "en" : "Start date" 29 | } 30 | 31 | MSG7 = { 32 | "es" : "Posicion actual", 33 | "en" : "Actual position" 34 | } 35 | 36 | MSG8 = { 37 | "es" : "se establecieron a partir de monedas específicas", 38 | "en" : "were set from specific currencies" 39 | } 40 | 41 | MSG9 = { 42 | "es" : "No se puede continuar porque no hay monedas en el rango seleccionado, cambie la configuración y vuelva a iniciar el bot", 43 | "en" : "Cannot continue because there are no coins in the selected range, change the settings and start the bot again" 44 | } 45 | 46 | MSG10 = { 47 | "es" : "Guardando", 48 | "en" : "Saving" 49 | } 50 | 51 | MSG11 = { 52 | "es" : "monedas a", 53 | "en" : "coins to" 54 | } 55 | 56 | MSG12 = { 57 | "es" : "Ya existe una lista creada recientemente, si desea crear una nueva lista, detenga el bot y elimine la anterior.", 58 | "en" : "There is already a recently created list, if you want to create a new list, stop the bot and delete the previous one." 59 | } 60 | 61 | MSG13 = { 62 | "es" : "si crea una nueva lista al continuar una sesión anterior, puede que no coincida con la anterior y dé errores", 63 | "en" : "if you create a new list when continuing a previous session, it may not coincide with the previous one and give errors" 64 | } 65 | 66 | MSG14 = { 67 | "es" : "REMEMBER", 68 | "en" : "RECUERDA" 69 | } 70 | 71 | MSG15 = { 72 | "es" : "Simbolos", 73 | "en" : "Symbol" 74 | } 75 | 76 | MSG16 = { 77 | "es" : "Comprado a", 78 | "en" : "Bought At" 79 | } 80 | 81 | MSG17 = { 82 | "es" : "Ahora en", 83 | "en" : "Now At" 84 | } 85 | 86 | MSG18 = { 87 | "es" : "Cambio", 88 | "en" : "Change" 89 | } 90 | 91 | MSG19 = { 92 | "es" : "Ganancia", 93 | "en" : "Profit" 94 | } 95 | 96 | MSG20 = { 97 | "es" : "Tiempo de Espera", 98 | "en" : "Time Held" 99 | } 100 | 101 | MSG21 = { 102 | "es" : "Volumen", 103 | "en" : "Volume" 104 | } 105 | 106 | MSG22 = { 107 | "es" : "COMIENZO", 108 | "en" : "STARTED" 109 | } 110 | 111 | MSG23 = { 112 | "es" : "CORRIENDO POR", 113 | "en" : "RUNNING FOR" 114 | } 115 | 116 | MSG24 = { 117 | "es" : "COMPRAS PAUSADAS", 118 | "en" : "BUYING PAUSE" 119 | } 120 | 121 | MSG25 = { 122 | "es" : "MODO TEST", 123 | "en" : "TEST MODE" 124 | } 125 | 126 | MSG26 = { 127 | "es" : "MODO BACKTEST", 128 | "en" : "BACKTEST MODE" 129 | } 130 | 131 | MSG27 = { 132 | "es" : "BILLETERA BINANCE", 133 | "en" : "BINANCE WALLET" 134 | } 135 | 136 | MSG28 = { 137 | "es" : "RETENCIONES ACTUALES", 138 | "en" : "CURRENT HOLDS" 139 | } 140 | 141 | MSG29 = { 142 | "es" : "GANO", 143 | "en" : "WIN" 144 | } 145 | 146 | MSG30 = { 147 | "es" : "TRANSACCIONES TOTALES", 148 | "en" : "TOTAL TRADES" 149 | } 150 | 151 | MSG31 = { 152 | "es" : "PERDIO", 153 | "en" : "LOSS" 154 | } 155 | 156 | MSG32 = { 157 | "es" : "GANADO", 158 | "en" : "WON" 159 | } 160 | 161 | MSG33 = { 162 | "es" : "PENDIENTE", 163 | "en" : "PENDING" 164 | } 165 | 166 | MSG34 = { 167 | "es" : "No tiene saldo suficiente para operar en Binance. Importe mínimo 10 USDT!", 168 | "en" : "You do not have enough balance to trade on Binance. Minimum amount 10 USDT!" 169 | } 170 | 171 | MSG35 = { 172 | "es" : "Creando lista volátil, espera un momento (3 minutos aproximadamente)", 173 | "en" : "Creating volatile list, wait a moment(3 minutes approximately)" 174 | } 175 | 176 | MSG36 = { 177 | "es" : "sesión", 178 | "en" : "session" 179 | } 180 | 181 | MSG37 = { 182 | "es" : "perdido en esta ", 183 | "en" : "won in this " 184 | } 185 | 186 | MSG38 = { 187 | "es" : "ganado en esta sesion", 188 | "en" : "loss in this session" 189 | } 190 | 191 | MSG39 = { 192 | "es" : "MONEDAS GUARDADAS", 193 | "en" : "SAVED COINS" 194 | } 195 | 196 | MSG40 = { 197 | "es" : "FINAL", 198 | "en" : "END" 199 | } 200 | 201 | MSG41 = { 202 | "es" : "Presione Ctrl-C para detener el script y abrir el menu.", 203 | "en" : "Press Ctrl-C to stop the script and open menu." 204 | } 205 | 206 | MSG42 = { 207 | "es" : "El modo de prueba está deshabilitado en la configuración, estás usando fondos _REALES_.", 208 | "en" : "Test mode is disabled in the configuration, you are using _LIVE_ funds." 209 | } 210 | 211 | MSG43 = { 212 | "es" : "¡Esperar 10 segundos antes de operar en vivo como medida de seguridad!", 213 | "en" : "Waiting 10 seconds before live trading as a security measure!" 214 | } 215 | 216 | MSG42 = { 217 | "es" : "", 218 | "en" : "" 219 | } 220 | 221 | MSG43 = { 222 | "es" : "", 223 | "en" : "" 224 | } 225 | 226 | MSG44 = { 227 | "es" : "", 228 | "en" : "" 229 | } 230 | 231 | MSG45 = { 232 | "es" : "", 233 | "en" : "" 234 | } 235 | 236 | MSG46 = { 237 | "es" : "", 238 | "en" : "" 239 | } 240 | 241 | MSG47 = { 242 | "es" : "", 243 | "en" : "" 244 | } 245 | 246 | MSG48 = { 247 | "es" : "", 248 | "en" : "" 249 | } 250 | 251 | MSG49 = { 252 | "es" : "", 253 | "en" : "" 254 | } 255 | 256 | MSG50 = { 257 | "es" : "", 258 | "en" : "" 259 | } 260 | 261 | MSG51 = { 262 | "es" : "", 263 | "en" : "" 264 | } 265 | 266 | MSG52 = { 267 | "es" : "", 268 | "en" : "" 269 | } 270 | 271 | MSG53 = { 272 | "es" : "", 273 | "en" : "" 274 | } 275 | -------------------------------------------------------------------------------- /config.yml: -------------------------------------------------------------------------------- 1 | # These options apply to how the script will operate. 2 | script_options: 3 | LANGUAGE: 'es' 4 | # Switch between testnet and mainnet 5 | # Setting this to False will use REAL funds, use at your own risk 6 | #MODES: (ONLINE, ONLINETESNET), (TESTMODE, BACKTESTING) 7 | MODE: "BACKTESTING" 8 | BACKTESTING_MODE_TIME_START: "01/01/23 00:00:00" #"01/10/21 00:00:00" # 9 | BACKTESTING_MODE_TIME_END: "31/12/23 00:00:00" #"01/10/22 00:00:00" # 10 | BOT_TIMEFRAME: "1m" 11 | LOG_TRADES: True 12 | JSON_REPORT: 'report.json' 13 | MICROSECONDS: 0.3 14 | LOG_FILE: 'log.txt' 15 | TRADES_LOG_FILE: 'trades.csv' 16 | TRADES_GRAPH: 'graphics.html' 17 | TRADES_INDICATORS: 'indicator.csv' 18 | #USE_TRADES_INDICATORS: False 19 | #FILE_SYMBOL_INFO: 'symbol.info' 20 | #if it is empty no information will be saved 21 | #HISTORY_LOG_FILE: '' #'history.html' 22 | COINS_BOUGHT: 'coins_bought.json' 23 | BOT_STATS: 'bot_stats.json' 24 | PRINT_TABLE_COMMISSIONS: False 25 | USE_VOLATILE_METOD: False 26 | DEBUG: True 27 | SILENT_MODE: True 28 | #through telnet it is possible to obtain the value of this or that variable in a function 29 | #REMOTE_INSPECTOR_BOT_PORT: 0 #9998 30 | #if 0 remote server is disabled 31 | #REMOTE_INSPECTOR_MEGATRONMOD_PORT: 0 #9999 32 | #all binance requests from our bot will be proxied 33 | PROXY_HTTP: '' #'http://localhost:8090' 34 | PROXY_HTTPS: '' 35 | # Set this to true if you are accessing binance from within the United States of America 36 | # Need to change TLD 37 | AMERICAN_USER: False 38 | BUY_PAUSED: False 39 | 40 | # These options apply to the trading methods the script executes 41 | trading_options: 42 | PAIR: ["BTC"] 43 | # select your base currency to use for trading (trade for example USDT pairs) 44 | PAIR_WITH: USDT #FDUSD 45 | #When activating this option, as long as the TRADE_SLOTS option is 1, 46 | #it will be bought with all the capital in the wallet. 47 | #The method works.... 48 | COMPOUND_INTEREST: False 49 | # Total amount per trade (your base currency balance must be at least TRADE_SLOTS * TRADE_TOTAL) 50 | # Binance uses a minimum of 10 USDT per trade, add a bit extra to enable selling if the price drops. 51 | # Recommended: no less than 12 USDT. Suggested: 15 or more. 52 | TRADE_TOTAL: 1000 53 | # Maximum number of trade 'slots' at any time (your USDT balance must be at least TRADE_SLOTS * TRADE_TOTAL) 54 | TRADE_SLOTS: 1 55 | # EX_PAIRS is a list of fiat currencies and margin symbols that I am excluding. 56 | # Anything added here will be excluded from coin withdrawal and will not be sold. 57 | EXCLUDE_PAIRS: ['PAX', 'EUR', 'GBP', 'JPY', 'USD', 'AUD', 'DOWN', 'UP', 'ONE', 'USDC', 'SHIB', 'VET', 'AXS', 'NEAR', 'GRT', 'CHR', 'BUSD', 'TUSD', 'LUNA', 'UST', 'BTTC', 'USDP', 'POLY'] 58 | # define in % when to sell a coin that's not making a profit. 59 | STOP_LOSS: 0.5 60 | # define in % when to take profit on a profitable coin. default 0.25 61 | TAKE_PROFIT: 1 62 | # Use custom tickers.txt list for filtering pairs. 63 | # Name of custom tickers list 64 | #CUSTOM_LIST: True 65 | #TICKERS_LIST: 'tickers.txt' 66 | # whether to use trailing stop loss or not; default is True 67 | # when hit TAKE_PROFIT, move STOP_LOSS to TRAILING_STOP_LOSS percentage points below TAKE_PROFIT hence locking in profit 68 | # when hit TAKE_PROFIT, move TAKE_PROFIT up by TRAILING_TAKE_PROFIT percentage points 69 | # NOTE -SELL_ON_SIGNAL_ONLY will override USE_TRAILING_STOP_LOSS 70 | USE_TRAILING_STOP_LOSS: False 71 | TRAILING_STOP_LOSS: .1 72 | TRAILING_TAKE_PROFIT: .1 73 | # Following are used to override SL, TP, TSL & TTP & SELL_ON_SIGNAL_ONLY in the case of a market crash 74 | # when hit SESSION_TAKE_PROFIT %, sell all coins and stop bot 75 | # when hit SESSION_STOP_LOSS %, sell all coins and stop bot. 76 | # Note, SESSION_STOP_LOSS needs to be a NEGATIVE number i.e. -2 77 | SESSION_TPSL_OVERRIDE: False 78 | SESSION_TAKE_PROFIT: 0.2 79 | SESSION_STOP_LOSS: -1 80 | # Let a signalling module control sell of coin 81 | # NOTE - If USE_TRAILING_STOP_LOSS: True then this needs to be False 82 | SELL_ON_SIGNAL_ONLY: True 83 | # Trading fee in % per trade. 84 | # If using 0.075% (using BNB for fees) you must have BNB in your account to cover trading fees. 85 | # If using BNB for fees, it MUST be enabled in your Binance 'Dashboard' page (checkbox). 86 | TRADING_FEE: 0.075 87 | #sells the percentage that is placed on it, this serves to save a part of what was purchased. 88 | #If get error APIError(code=-1013), change percentage 89 | #sell what you bought with TRADE_TOTAL and save the profits. 90 | SELL_PART: False 91 | # Discord integration 92 | # Used to push alerts, messages etc to a discord channel 93 | MSG_DISCORD: False 94 | #restart all modules every hour 95 | RESTART_MODULES: False 96 | STATIC_MAIN_INFO: False 97 | DISABLE_TIMESTAMPS: True 98 | SHOW_INITIAL_CONFIG: False 99 | #sell the currency that was unsold so many minutes ago.(values in minutes) 100 | MAX_HOLDING_TIME: 0 101 | SHOW_TABLE_COINS_BOUGHT: True 102 | SORT_TABLE_BY: "Time Held" 103 | REVERSE_SORT: False 104 | #Enables the use of currencies with greater or lesser volume 105 | #This method is only compatible in ONLINE and ONLINETESNET mode 106 | USE_MOST_VOLUME_COINS: False 107 | #the list is updated every n number of minutes. by default it is updated every 24 hours (1440 minutes). 108 | #Sweethackercasper's idea, thanks mate. 109 | UPDATE_MOST_VOLUME_COINS: 1440 110 | #It should not be touched here, used as record. 111 | VOLATILE_VOLUME: volatile_volume_17-09-2024(00_02_21) 112 | #idea from my psychologist, thanks Sebastian V. 113 | #Two different currencies can be used as a range (within the market table in Binance ordered from highest to lowest) 114 | #https://www.binance.com/es-AR/markets/spot_margin-USDT sort by volume 115 | COINS_MAX_VOLUME: BTC 116 | COINS_MIN_VOLUME: TIA 117 | #if the losses are equal to or greater than the established percentage, the bot will stop completely. if it is 0 it is deactivated. 118 | PANIC_STOP: 0 119 | 120 | SIGNALLING_MODULES: 121 | # NOTE: Only use the "os_xxxxxxxxx" or "nigec_xxxxxxxxx" etc signal/pause modules with this fork as 122 | # the default ones WILL NOT work due to customisations for my specific purposes 123 | # 124 | - megatronmod 125 | -------------------------------------------------------------------------------- /megatronmod.py: -------------------------------------------------------------------------------- 1 | # Megatronmod Strategy - All in One 2 | # Created by: Horacio Oscar Fanelli - Pantersxx3 and NokerPlay 3 | # This mod can be used only with: 4 | # https://github.com/pantersxx3/Binance-Bot 5 | # 6 | # No future support offered, use this script at own risk - test before using real funds 7 | # If you lose money using this MOD (and you will at some point) you've only got yourself to blame! 8 | 9 | import os 10 | import sys 11 | import json 12 | import glob 13 | import math 14 | import time 15 | import socket 16 | import threading 17 | import pandas as pd 18 | import pandas_ta as ta 19 | import megatronmod_strategy as MS 20 | import megatronmod_functions as MF 21 | from datetime import date, datetime, timedelta 22 | from helpers.parameters import parse_args, load_config 23 | import traceback 24 | 25 | # Diccionario para almacenar las variables locales de cada función 26 | variables_funciones = {} 27 | 28 | global config_file, creds_file, parsed_creds, parsed_config, USE_MOST_VOLUME_COINS, PAIR_WITH, SELL_ON_SIGNAL_ONLY, TEST_MODE, LOG_FILE 29 | global COINS_BOUGHT, EXCHANGE, SCREENER, STOP_LOSS, TAKE_PROFIT, TRADE_SLOTS, BACKTESTING_MODE, BACKTESTING_MODE_TIME_START, SIGNAL_NAME 30 | global access_key, secret_key, client, txcolors, bought, timeHold, ACTUAL_POSITION, args, BACKTESTING_MODE_TIME_START, USE_MOST_VOLUME_COINS 31 | global TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES, LANGUAGE, BOT_TIMEFRAME 32 | 33 | class txcolors: 34 | BUY = '\033[92m' 35 | WARNING = '\033[93m' 36 | SELL_LOSS = '\033[91m' 37 | SELL_PROFIT = '\033[32m' 38 | DIM = '\033[2m\033[35m' 39 | Red = '\033[31m' 40 | DEFAULT = '\033[39m' 41 | 42 | DEFAULT_CONFIG_FILE = 'config.yml' 43 | SIGNAL_NAME = 'MEGATRONMOD' 44 | SIGNAL_FILE_BUY = 'signals/' + SIGNAL_NAME + '.buy' 45 | SIGNAL_FILE_SELL ='signals/' + SIGNAL_NAME + '.sell' 46 | 47 | # Settings 48 | args = parse_args() 49 | config_file = args.config if args.config else DEFAULT_CONFIG_FILE 50 | parsed_config = load_config(config_file) 51 | 52 | global TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES, MODE, LANGUAGE 53 | 54 | PAIR = parsed_config['trading_options']['PAIR'] 55 | PAIR_WITH = parsed_config['trading_options']['PAIR_WITH'] 56 | TRADE_SLOTS = parsed_config['trading_options']['TRADE_SLOTS'] 57 | MODE = parsed_config['script_options']['MODE'] 58 | SELL_ON_SIGNAL_ONLY = parsed_config['trading_options']['SELL_ON_SIGNAL_ONLY'] 59 | LOG_FILE = parsed_config['script_options'].get('LOG_FILE') 60 | USE_MOST_VOLUME_COINS = parsed_config['trading_options']['USE_MOST_VOLUME_COINS'] 61 | LANGUAGE = parsed_config['script_options']['LANGUAGE'] 62 | #REMOTE_INSPECTOR_MEGATRONMOD_PORT = parsed_config['script_options']['REMOTE_INSPECTOR_MEGATRONMOD_PORT'] 63 | BOT_TIMEFRAME = parsed_config['script_options']['BOT_TIMEFRAME'] 64 | #USE_SIGNALLING_MODULES = False if BACKTESTING_MODE else True 65 | 66 | MICROSECONDS = 2 67 | 68 | 69 | def register_func_name(function_name, items): 70 | global variables_funciones 71 | variables_funciones[function_name] = {k: v for k, v in items} 72 | 73 | def convertir_a_str(value): 74 | if isinstance(value, dict): 75 | return str(value) 76 | elif isinstance(value, list): 77 | return str(value) 78 | elif isinstance(value, pd.DataFrame): 79 | return value.to_string() # Convierte el DataFrame a texto legible 80 | else: 81 | return str(value) 82 | 83 | # def handle_client(client_socket): 84 | # try: 85 | # global variables_funciones 86 | # while True: 87 | # request = client_socket.recv(1024).decode().strip() 88 | # parts = request.split(".") 89 | # if len(parts) == 2: 90 | # funcion = parts[0] 91 | # variable = parts[1] 92 | 93 | # if variable == "all_val": 94 | # all_vars = "\n".join([f"{k}: {convertir_a_str(v)}" for k, v in variables_funciones[funcion].items()]) 95 | # response = f"{funcion}:\n {all_vars}\n " 96 | # else: 97 | # if funcion in variables_funciones and variable in variables_funciones[funcion]: 98 | # response = f"{funcion}.{variable}: {variables_funciones[funcion][variable]}\n" 99 | # else: 100 | # response = f"Variable {variable} no encontrada en la función {funcion}\n" 101 | # else: 102 | # response = "Comando no reconocido. Use 'funcion.variable'\n" 103 | 104 | # client_socket.send(response.encode()) 105 | 106 | # except Exception as e: 107 | # MF.write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 108 | # exc_type, exc_obj, exc_tb = sys.exc_info() 109 | # MF.write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 110 | # pass 111 | 112 | # def start_telnet_server(): 113 | # if REMOTE_INSPECTOR_MEGATRONMOD_PORT > 0: 114 | # server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 115 | # server.bind(('0.0.0.0', REMOTE_INSPECTOR_MEGATRONMOD_PORT)) # Escucha en todas las interfaces en el puerto 9999 116 | # server.listen(5) 117 | # print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT} Servidor Telnet: escuchando en el puerto 9999') 118 | 119 | # while True: 120 | # client_socket, addr = server.accept() 121 | # print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT} Servidor Telnet: Conexión aceptada desde {addr}') 122 | 123 | # # Crear un hilo separado para manejar la conexión 124 | # client_handler = threading.Thread(target=handle_client, args=(client_socket,)) 125 | # client_handler.start() 126 | 127 | def analyze(d, pairs, buy=True, position = {}): 128 | try: 129 | global TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES, MODE, LANGUAGE 130 | 131 | signal_coins1 = [] 132 | signal_coins2 = [] 133 | analysis = {} 134 | buySignal00 = False 135 | sellSignal00 = False 136 | position2 = 0 137 | 138 | from Boot import set_correct_mode 139 | TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES = set_correct_mode(LANGUAGE, MODE, True) 140 | 141 | if TEST_MODE: 142 | file_prefix = 'test_' 143 | else: 144 | file_prefix = 'live_' 145 | 146 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Analyzing {len(pairs)} coins...{txcolors.DEFAULT}') 147 | 148 | for pair in pairs: 149 | if BACKTESTING_MODE: 150 | if len(position) > 0: 151 | position2 = position[pair] #MF.read_position_csv(pair) 152 | if not os.path.exists(pair + '.csv'): 153 | print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Whaiting for Download Data...{txcolors.DEFAULT}') 154 | if USE_SIGNALLING_MODULES: 155 | while not os.path.exists(pair + '.csv'): 156 | time.sleep(0.5) #Wait for download 157 | else: 158 | print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Data file not found. Whaiting for Download Data...{txcolors.DEFAULT}') 159 | 160 | analysis = MF.get_analysis(d, BOT_TIMEFRAME, pair, position2, 300) 161 | 162 | if not analysis.empty: 163 | CLOSE = float(analysis['Close'].iloc[-1]) #round(float(analysis['Close'].iloc[-1]),6) 164 | #OPEN_1MIN = round(float(analysis['Open'].iloc[-1]),6) 165 | #CLOSE_ANT = round(float(analysis['Close'].iloc[-2]),6) 166 | time1 = 0 167 | #TIME_1M = analysis['time'].iloc[-1] 168 | #time1 = int(TIME_1M)/1000 169 | #time_1MIN = datetime.fromtimestamp(int(time1)).strftime("%d/%m/%y %H:%M:%S") 170 | #buySignal00 = MS.buy(analysis, CLOSE, pair) 171 | #sellSignal00 = MS.sell(analysis, CLOSE, pair) 172 | 173 | 174 | 175 | if buy: 176 | bought_at, timeHold, coins_bought = MF.load_json(pair) 177 | if coins_bought < TRADE_SLOTS and bought_at == 0: 178 | if MS.buy(analysis, CLOSE, pair): 179 | signal_coins1.append({ 'time': position2, 'symbol': pair, 'price': CLOSE}) 180 | #MF.write_log(f'BUY {CLOSE} {position2}', LOG_FILE, False, False) 181 | if USE_SIGNALLING_MODULES: 182 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Buy signal detected...{txcolors.DEFAULT}') 183 | with open(SIGNAL_FILE_BUY,'w+') as f: 184 | f.write(pair + '\n') 185 | #break 186 | 187 | if SELL_ON_SIGNAL_ONLY: 188 | bought_at, timeHold, coins_bought = MF.load_json(pair) 189 | if float(bought_at) != 0 and float(coins_bought) != 0 and float(CLOSE) != 0: 190 | sellSignal00 = MS.sell(analysis, CLOSE, pair) 191 | analysis = {} 192 | if sellSignal00 and float(bought_at) != 0: 193 | signal_coins2.append({ 'time': position2, 'symbol': pair, 'price': CLOSE}) 194 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Sell signal detected...{txcolors.DEFAULT}') 195 | #MF.write_log(f'SELL {CLOSE} {bought_at} {position2}', LOG_FILE, False, False) 196 | if USE_SIGNALLING_MODULES: 197 | with open(SIGNAL_FILE_SELL,'w+') as f: 198 | f.write(pair + '\n') 199 | #break 200 | d = {} 201 | register_func_name("analyze", locals().items()) 202 | except Exception as e: 203 | MF.write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 204 | exc_type, exc_obj, exc_tb = sys.exc_info() 205 | MF.write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 206 | pass 207 | return signal_coins1, signal_coins2 208 | 209 | def timeframe_to_seconds(timeframe): 210 | multipliers = { 211 | 's': 1, # segundos 212 | 'm': 60, # minutos 213 | 'h': 3600, # horas 214 | 'd': 86400, # días 215 | 'w': 604800, # semanas 216 | } 217 | unit = timeframe[-1] 218 | value = int(timeframe[:-1]) 219 | if unit in multipliers: 220 | return value * multipliers[unit] 221 | else: 222 | return 0 223 | 224 | def do_work(): 225 | try: 226 | global TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES, MODE, LANGUAGE, BOT_TIMEFRAME 227 | signalcoins1 = [] 228 | signalcoins2 = [] 229 | pairs = [] 230 | dataBuy = {} 231 | dataSell = {} 232 | 233 | from Boot import set_correct_mode 234 | TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES = set_correct_mode(LANGUAGE, MODE, True) 235 | 236 | #telnet_thread = threading.Thread(target=start_telnet_server) 237 | #telnet_thread.daemon = True # El hilo se detendrá si el programa principal termina 238 | #telnet_thread.start() 239 | 240 | if USE_MOST_VOLUME_COINS == True: 241 | TICKERS = 'volatile_volume_' + str(date.today()) + '.txt' 242 | for line in open(TICKERS): 243 | pairs=[line.strip() + PAIR_WITH for line in open(TICKERS)] 244 | else: 245 | for pair in PAIR: 246 | pairs.append(pair + PAIR_WITH) 247 | 248 | while True: 249 | #if not threading.main_thread().is_alive(): exit() 250 | if os.path.exists("signal.sig"): 251 | print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Exit...{txcolors.DEFAULT}') 252 | os.remove("signal.sig") 253 | sys.exit(0) 254 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Analyzing {len(pairs)} coins...{txcolors.DEFAULT}') 255 | # if BACKTESTING_MODE: 256 | # while os.path.exists('ok.ok'): 257 | # time.sleep(0.001) #do_work 258 | # signalcoins1, signalcoins2 = analyze(pd.DataFrame([]), pairs, True) 259 | # with open('ok.ok','w') as f: 260 | # f.write('1') 261 | # else: 262 | signalcoins1, signalcoins2 = analyze(pd.DataFrame([]), pairs, True) 263 | #time.sleep(0.001) #do_work 264 | DISABLE_WAIT = False 265 | if not DISABLE_WAIT: 266 | if "s" in BOT_TIMEFRAME: 267 | time.sleep(timeframe_to_seconds(BOT_TIMEFRAME)) 268 | else: 269 | current_time = time.localtime() 270 | seconds_until_next_minute = timeframe_to_seconds(BOT_TIMEFRAME) - current_time.tm_sec 271 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}Esperando {seconds_until_next_minute} segundos hasta el siguiente analisis...') 272 | time.sleep(seconds_until_next_minute) 273 | 274 | register_func_name("do_work", locals().items()) 275 | except Exception as e: 276 | MF.write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: do_work(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 277 | exc_type, exc_obj, exc_tb = sys.exc_info() 278 | MF.write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 279 | pass 280 | #except KeyboardInterrupt as ki: 281 | #pass -------------------------------------------------------------------------------- /analisis_test_trades.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from prettytable import PrettyTable 3 | import glob 4 | import sys 5 | import os 6 | import argparse 7 | import operator 8 | import numpy as np 9 | import re 10 | import textwrap 11 | 12 | def read_csv_with_comments(file_path): 13 | """ 14 | Lee un archivo CSV ignorando las líneas que empiezan con # 15 | """ 16 | try: 17 | # Método 1: Usar comment parameter 18 | df = pd.read_csv(file_path, sep=',', comment='#') 19 | return df 20 | except Exception: 21 | # Método 2: Filtrar manualmente las líneas 22 | try: 23 | with open(file_path, 'r', encoding='utf-8') as file: 24 | lines = [line for line in file if not line.strip().startswith('#')] 25 | 26 | from io import StringIO 27 | csv_string = ''.join(lines) 28 | df = pd.read_csv(StringIO(csv_string), sep=',') 29 | return df 30 | except Exception: 31 | # Método 3: Usar on_bad_lines='skip' (pandas >= 1.3.0) 32 | try: 33 | df = pd.read_csv(file_path, sep=',', on_bad_lines='skip') 34 | return df 35 | except Exception: 36 | # Método 4: Para versiones más antiguas de pandas 37 | df = pd.read_csv(file_path, sep=',', error_bad_lines=False, warn_bad_lines=True) 38 | return df 39 | 40 | def extract_strategy_info(file_path): 41 | """ 42 | Extrae la información de las estrategias buySignal y sellSignal de los comentarios del archivo 43 | """ 44 | buy_signal = "No encontrado" 45 | sell_signal = "No encontrado" 46 | 47 | try: 48 | with open(file_path, 'r', encoding='utf-8') as file: 49 | lines = file.readlines() 50 | 51 | for line in lines: 52 | if line.strip().startswith('#'): 53 | # Buscar patrones de buySignal y sellSignal 54 | if 'buySignal' in line: 55 | # Extraer la parte después del = 56 | match = re.search(r'buySignal\s*=\s*(.+)', line) 57 | if match: 58 | buy_signal = match.group(1).strip() 59 | 60 | elif 'sellSignal' in line: 61 | # Extraer la parte después del = 62 | match = re.search(r'sellSignal\s*=\s*(.+)', line) 63 | if match: 64 | sell_signal = match.group(1).strip() 65 | 66 | # Si ya encontramos ambas señales, salir del bucle 67 | if buy_signal != "No encontrado" and sell_signal != "No encontrado": 68 | break 69 | 70 | except Exception as e: 71 | print(f"Error al leer estrategias del archivo {file_path}: {e}") 72 | 73 | return buy_signal, sell_signal 74 | 75 | def wrap_text(text, width=30): 76 | """Envuelve el texto en múltiples líneas para que no sea demasiado ancho""" 77 | if text == "No encontrado": 78 | return text 79 | return '\n'.join(textwrap.wrap(text, width=width)) 80 | 81 | def calculate_pl_ratio(ganancias, perdidas): 82 | """Calcula el ratio Ganancia/Pérdida""" 83 | if perdidas == 0: 84 | return float('inf') if ganancias > 0 else 0 85 | return abs(ganancias / perdidas) 86 | 87 | def calculate_sharpe_ratio(returns, risk_free_rate=0.0): 88 | """Calcula el Sharpe Ratio""" 89 | if len(returns) == 0: 90 | return 0 91 | 92 | excess_returns = returns - risk_free_rate 93 | if np.std(excess_returns) == 0: 94 | return 0 95 | 96 | return np.mean(excess_returns) / np.std(excess_returns) 97 | 98 | def calculate_max_drawdown(cumulative_returns): 99 | """Calcula el máximo drawdown""" 100 | if len(cumulative_returns) == 0: 101 | return 0 102 | 103 | peak = cumulative_returns[0] 104 | max_drawdown = 0 105 | 106 | for value in cumulative_returns: 107 | if value > peak: 108 | peak = value 109 | drawdown = (peak - value) / peak 110 | if drawdown > max_drawdown: 111 | max_drawdown = drawdown 112 | 113 | return max_drawdown 114 | 115 | parser = argparse.ArgumentParser(description='Analizador de operaciones de trading desde archivos CSV.') 116 | parser.add_argument('archivos', nargs='*', help='Los archivos CSV a analizar. Si no se especifican, se mostrará un menú.') 117 | parser.add_argument('-g', '--ganancia', action='store_true', help='Muestra la ganancia total.') 118 | parser.add_argument('-co', '--cantidad_operaciones', action='store_true', help='Muestra la cantidad de operaciones.') 119 | parser.add_argument('-p', '--perdida', action='store_true', help='Muestra la perdida total.') 120 | parser.add_argument('-pg', '--porcentaje_ganancia', action='store_true', help='Muestra el porcentaje de ganancia.') 121 | parser.add_argument('-pr', '--promedio_horas', action='store_true', help='Muestra el promedio de horas de hold.') 122 | parser.add_argument('-pa', '--promedio_ganancia', action='store_true', help='Muestra la ganancia promedio.') 123 | parser.add_argument('-c', '--comision', action='store_true', help='Muestra la comisión total.') 124 | parser.add_argument('-gm', '--ganancia_mas_alto', action='store_true', help='Muestra la ganancia más alta.') 125 | parser.add_argument('-sortby', '--ordenar_por', type=str, help='Ordena los resultados por una métrica. Ej: -sortby ganancia') 126 | parser.add_argument('-desc', '--descendente', action='store_true', help='Ordena los resultados de forma descendente.') 127 | parser.add_argument("--plratio", action="store_true", help="Mostrar ratio Ganancia/Pérdida") 128 | parser.add_argument("--sharpe", action="store_true", help="Mostrar Sharpe ratio") 129 | parser.add_argument("--drawdown", action="store_true", help="Mostrar Drawdown máximo") 130 | parser.add_argument("--estrategia", action="store_true", help="Mostrar estrategias de compra y venta") 131 | args = parser.parse_args() 132 | 133 | 134 | if args.archivos: 135 | if args.archivos == ["*"]: 136 | patron = os.path.join("./", f'*.csv') 137 | archivos_seleccionados = glob.glob(patron) 138 | else: 139 | archivos_seleccionados = args.archivos 140 | else: 141 | patron = os.path.join("./", f'*.csv') 142 | files = glob.glob(patron) 143 | 144 | if not files: 145 | print("No se encontraron archivos CSV en el directorio.") 146 | sys.exit() 147 | 148 | for i, file in enumerate(files): 149 | print(f"[{i+1}] {file}") 150 | 151 | print("\nSelecciona los archivos que deseas analizar (ej: 1,3,4):") 152 | seleccion_str = input("Tu selección: ") 153 | try: 154 | indices_seleccionados = [int(x.strip()) - 1 for x in seleccion_str.split(',')] 155 | archivos_seleccionados = [files[i] for i in indices_seleccionados if 0 <= i < len(files)] 156 | except (ValueError, IndexError): 157 | print("Entrada no válida. Se analizarán todos los archivos por defecto.") 158 | archivos_seleccionados = files 159 | 160 | if not archivos_seleccionados: 161 | print("No se seleccionaron archivos. Saliendo.") 162 | sys.exit() 163 | 164 | mapeo_columnas = { 165 | 'cantidad_operaciones': 'Cantidad de operaciones', 166 | 'ganancia': 'Total', 167 | 'perdida': 'Perdio', 168 | 'porcentaje_ganancia': 'Porcentaje Ganancia', 169 | 'promedio_horas': 'Promedio (horas)', 170 | 'promedio_ganancia': 'Ganancias Promedio', 171 | 'comision': 'Commission', 172 | 'ganancia_mas_alto': 'Ganancia más alto (Sell)', 173 | 'plratio': 'PL Ratio', 174 | 'sharpe': 'Sharpe Ratio', 175 | 'drawdown': 'Max Drawdown', 176 | 'buy_signal': 'Buy Signal', 177 | 'sell_signal': 'Sell Signal' 178 | } 179 | 180 | opciones_seleccionadas = any([args.ganancia, args.cantidad_operaciones, args.perdida, args.porcentaje_ganancia, 181 | args.promedio_horas, args.promedio_ganancia, args.comision, args.ganancia_mas_alto, 182 | args.plratio, args.sharpe, args.drawdown, args.estrategia]) 183 | 184 | if opciones_seleccionadas: 185 | resultados_por_archivo = [] 186 | for file in archivos_seleccionados: 187 | try: 188 | # Extraer información de la estrategia primero 189 | buy_signal, sell_signal = extract_strategy_info(file) 190 | 191 | df = read_csv_with_comments(file) 192 | df["Datetime"] = pd.to_datetime(df["Datetime"]) 193 | 194 | order_ids_to_remove = df[(df['Profit $ USDT'] == 0.0) & (df['Type'] == 'Sell')]['OrderID'].unique() 195 | df = df[~df['OrderID'].isin(order_ids_to_remove)] 196 | 197 | if len(df) > 0 and "Buy" == df.iloc[-1]["Type"]: 198 | df.drop(df.index[-1], inplace=True) 199 | 200 | buys = df[df['Type'] == "Buy"][['OrderID', 'Datetime']] 201 | sells = df[df['Type'] == "Sell"][['OrderID', 'Datetime', 'Profit $ USDT']] 202 | 203 | merged_trades = pd.merge(buys, sells, on='OrderID', suffixes=('_Buy', '_Sell')) 204 | merged_trades["Hold Time"] = (merged_trades["Datetime_Sell"] - merged_trades["Datetime_Buy"]).dt.total_seconds() / 3600 205 | unique_hold_times = merged_trades.drop_duplicates(subset='OrderID', keep='first') 206 | hold_time_series = unique_hold_times.set_index('OrderID')['Hold Time'] 207 | 208 | df["Hold Time"] = pd.NA 209 | df.loc[df['Type'] == 'Sell', "Hold Time"] = df.loc[df['Type'] == 'Sell', 'OrderID'].map(hold_time_series) 210 | df["Hold Time"] = pd.to_numeric(df["Hold Time"], errors='coerce') 211 | 212 | max_hold = df["Hold Time"].max() 213 | promedio_hold = df["Hold Time"].mean() 214 | sells_profits = df[df['Type'] == "Sell"]["Profit $ USDT"] 215 | max_price = sells_profits.max() 216 | avg_profit = sells_profits.mean() 217 | total_profit = sells_profits.sum() 218 | ganado = sells_profits[sells_profits > 0].sum() 219 | perdido = sells_profits[sells_profits < 0].sum() 220 | Commission = df["Commission"].sum() 221 | ganancia_percent = (total_profit / ganado) * 100 if ganado > 0 else 0.0 222 | 223 | # Calcular nuevas métricas 224 | pl_ratio = calculate_pl_ratio(ganado, perdido) 225 | 226 | # Calcular Sharpe Ratio (usando returns diarios aproximados) 227 | daily_returns = [] 228 | if len(merged_trades) > 0: 229 | merged_trades = merged_trades.sort_values('Datetime_Sell') 230 | daily_returns = merged_trades['Profit $ USDT'].values 231 | sharpe_ratio = calculate_sharpe_ratio(daily_returns) 232 | else: 233 | sharpe_ratio = 0 234 | 235 | # Calcular Drawdown máximo 236 | cumulative_returns = np.cumsum(daily_returns) if len(daily_returns) > 0 else [0] 237 | max_drawdown = calculate_max_drawdown(cumulative_returns) 238 | 239 | resultados_archivo = { 240 | 'archivo': os.path.basename(file), 241 | 'Cantidad de operaciones': len(merged_trades), 242 | 'Total': round(total_profit, 2), 243 | 'Perdio': round(perdido, 2), 244 | 'Porcentaje Ganancia': round(ganancia_percent, 2), 245 | 'Promedio (horas)': round(promedio_hold, 2), 246 | 'Ganancias Promedio': str(round(avg_profit, 2)) + " USDT", 247 | 'Commission': round(Commission, 2), 248 | 'Ganancia más alto (Sell)': round(max_price, 2), 249 | 'PL Ratio': round(pl_ratio, 2), 250 | 'Sharpe Ratio': round(sharpe_ratio, 2), 251 | 'Max Drawdown': round(max_drawdown * 100, 2), # Convertir a porcentaje 252 | 'Buy Signal': wrap_text(buy_signal, 40), # Texto envuelto 253 | 'Sell Signal': wrap_text(sell_signal, 40) # Texto envuelto 254 | } 255 | resultados_por_archivo.append(resultados_archivo) 256 | 257 | except FileNotFoundError: 258 | print(f"Error: El archivo '{file}' no fue encontrado. Ignorando.") 259 | except Exception as e: 260 | print(f"Ocurrió un error al procesar el archivo '{file}': {e}. Ignorando.") 261 | exc_type, exc_obj, exc_tb = sys.exc_info() 262 | print('Error on line ' + str(exc_tb.tb_lineno)) 263 | 264 | if args.ordenar_por: 265 | clave_ordenamiento = mapeo_columnas.get(args.ordenar_por) 266 | if clave_ordenamiento: 267 | try: 268 | resultados_por_archivo.sort(key=operator.itemgetter(clave_ordenamiento), reverse=args.descendente) 269 | except KeyError: 270 | print(f"Advertencia: No se puede ordenar por '{args.ordenar_por}'. La columna no existe en los resultados. Se mostrará sin ordenar.") 271 | 272 | my_table = PrettyTable() 273 | my_table.title = 'Informe de Análisis de Trades' 274 | my_table.border = True 275 | my_table.align = "c" 276 | my_table.valign = "m" 277 | my_table.hrules = True # Agregar líneas horizontales entre filas 278 | 279 | columnas = ['Archivo'] 280 | if args.cantidad_operaciones: columnas.append(mapeo_columnas['cantidad_operaciones']) 281 | if args.ganancia: columnas.append(mapeo_columnas['ganancia']) 282 | if args.perdida: columnas.append(mapeo_columnas['perdida']) 283 | if args.porcentaje_ganancia: columnas.append(mapeo_columnas['porcentaje_ganancia']) 284 | if args.promedio_horas: columnas.append(mapeo_columnas['promedio_horas']) 285 | if args.promedio_ganancia: columnas.append(mapeo_columnas['promedio_ganancia']) 286 | if args.comision: columnas.append(mapeo_columnas['comision']) 287 | if args.ganancia_mas_alto: columnas.append(mapeo_columnas['ganancia_mas_alto']) 288 | if args.plratio: columnas.append(mapeo_columnas['plratio']) 289 | if args.sharpe: columnas.append(mapeo_columnas['sharpe']) 290 | if args.drawdown: columnas.append(mapeo_columnas['drawdown']) 291 | if args.estrategia: 292 | columnas.append(mapeo_columnas['buy_signal']) 293 | columnas.append(mapeo_columnas['sell_signal']) 294 | 295 | my_table.field_names = columnas 296 | 297 | for resultado in resultados_por_archivo: 298 | fila = [resultado['archivo']] 299 | if args.cantidad_operaciones: fila.append(resultado[mapeo_columnas['cantidad_operaciones']]) 300 | if args.ganancia: fila.append(resultado[mapeo_columnas['ganancia']]) 301 | if args.perdida: fila.append(resultado[mapeo_columnas['perdida']]) 302 | if args.porcentaje_ganancia: fila.append(resultado[mapeo_columnas['porcentaje_ganancia']]) 303 | if args.promedio_horas: fila.append(resultado[mapeo_columnas['promedio_horas']]) 304 | if args.promedio_ganancia: fila.append(resultado[mapeo_columnas['promedio_ganancia']]) 305 | if args.comision: fila.append(resultado[mapeo_columnas['comision']]) 306 | if args.ganancia_mas_alto: fila.append(resultado[mapeo_columnas['ganancia_mas_alto']]) 307 | if args.plratio: fila.append(resultado[mapeo_columnas['plratio']]) 308 | if args.sharpe: fila.append(resultado[mapeo_columnas['sharpe']]) 309 | if args.drawdown: fila.append(resultado[mapeo_columnas['drawdown']]) 310 | if args.estrategia: 311 | fila.append(resultado[mapeo_columnas['buy_signal']]) 312 | fila.append(resultado[mapeo_columnas['sell_signal']]) 313 | 314 | my_table.add_row(fila) 315 | 316 | print(my_table.get_string()) 317 | else: 318 | my_table = PrettyTable() 319 | my_table.format = True 320 | my_table.border = True 321 | my_table.align = "c" 322 | my_table.valign = "m" 323 | my_table.left_padding_width = 1 324 | my_table.right_padding_width = 1 325 | my_table.hrules = True # Agregar líneas horizontales entre filas 326 | my_table.title = f'Informe' 327 | # Agregar las nuevas métricas a la tabla completa 328 | my_table.add_column("Data-Archivo", [ 329 | "Cantidad de operaciones", "Total", "Gano", "Perdio", "Porcentaje Ganancia", 330 | "Ganancia más alto (Sell)", "Ganancia más bajo (Sell)", "Máximo (horas)", 331 | "Mínimo (horas)", "Promedio (horas)", "Ganancias Promedio", "Commission", 332 | "PL Ratio", "Sharpe Ratio", "Max Drawdown %", 333 | "0 to 1", "1.01 to 2", "2.01 to 3", "3.01 to 4", "4.01 to 5", 334 | "-1 to 0", "-2 to -1.01", "-3 to -2.01", "-4 to -3.01", "-5 to -4.01", 335 | "Mes 1","Mes 2","Mes 3","Mes 4","Mes 5","Mes 6","Mes 7","Mes 8","Mes 9","Mes 10","Mes 11","Mes 12" 336 | #"Buy Signal", "Sell Signal" 337 | ]) 338 | 339 | for file in archivos_seleccionados: 340 | try: 341 | # Extraer información de la estrategia 342 | buy_signal, sell_signal = extract_strategy_info(file) 343 | 344 | df = read_csv_with_comments(file) 345 | df["Datetime"] = pd.to_datetime(df["Datetime"]) 346 | 347 | order_ids_to_remove = df[(df['Profit $ USDT'] == 0.0) & (df['Type'] == 'Sell')]['OrderID'].unique() 348 | df = df[~df['OrderID'].isin(order_ids_to_remove)] 349 | 350 | if len(df) > 0 and "Buy" == df.iloc[-1]["Type"]: 351 | df.drop(df.index[-1], inplace=True) 352 | 353 | buys = df[df['Type'] == "Buy"][['OrderID', 'Datetime']] 354 | sells = df[df['Type'] == "Sell"][['OrderID', 'Datetime', 'Profit $ USDT']] 355 | 356 | merged_trades = pd.merge(buys, sells, on='OrderID', suffixes=('_Buy', '_Sell')) 357 | merged_trades["Hold Time"] = (merged_trades["Datetime_Sell"] - merged_trades["Datetime_Buy"]).dt.total_seconds() / 3600 358 | unique_hold_times = merged_trades.drop_duplicates(subset='OrderID', keep='first') 359 | hold_time_series = unique_hold_times.set_index('OrderID')['Hold Time'] 360 | 361 | df["Hold Time"] = pd.NA 362 | df.loc[df['Type'] == 'Sell', "Hold Time"] = df.loc[df['Type'] == 'Sell', 'OrderID'].map(hold_time_series) 363 | df["Hold Time"] = pd.to_numeric(df["Hold Time"], errors='coerce') 364 | 365 | max_hold = df["Hold Time"].max() 366 | min_hold = df["Hold Time"].min() 367 | promedio_hold = df["Hold Time"].mean() 368 | 369 | sells_profits = df[df['Type'] == "Sell"]["Profit $ USDT"] 370 | max_price = sells_profits.max() 371 | min_price = sells_profits.min() 372 | avg_profit = sells_profits.mean() 373 | 374 | total_profit = sells_profits.sum() 375 | 376 | ganado = sells_profits[sells_profits > 0].sum() 377 | perdido = sells_profits[sells_profits < 0].sum() 378 | 379 | Commission = df["Commission"].sum() 380 | 381 | if ganado > 0: 382 | ganancia_percent = (total_profit / ganado) * 100 383 | else: 384 | ganancia_percent = 0.0 385 | 386 | # Calcular nuevas métricas 387 | pl_ratio = calculate_pl_ratio(ganado, perdido) 388 | 389 | # Calcular Sharpe Ratio 390 | daily_returns = [] 391 | if len(merged_trades) > 0: 392 | merged_trades = merged_trades.sort_values('Datetime_Sell') 393 | daily_returns = merged_trades['Profit $ USDT'].values 394 | sharpe_ratio = calculate_sharpe_ratio(daily_returns) 395 | else: 396 | sharpe_ratio = 0 397 | 398 | # Calcular Drawdown máximo 399 | cumulative_returns = np.cumsum(daily_returns) if len(daily_returns) > 0 else [0] 400 | max_drawdown = calculate_max_drawdown(cumulative_returns) 401 | 402 | profit = sells_profits 403 | ranges = [ 404 | (0, 1), (1.01, 2), (2.01, 3), (3.01, 4), (4.01, 5), 405 | (-1, 0), (-2, -1.01), (-3, -2.01), (-4, -3.01), (-5, -4.01) 406 | ] 407 | range_counts = {} 408 | for low, high in ranges: 409 | if low < high: 410 | count = profit[(profit > low) & (profit <= high)].count() 411 | else: 412 | count = profit[(profit < low) & (profit >= high)].count() 413 | range_counts[f"{low} to {high}"] = count 414 | 415 | rango = [] 416 | rango = ["No Data"] * 10 417 | c = 0 418 | for k, v in range_counts.items(): 419 | rango[c] = f"{v}" 420 | c = c + 1 421 | 422 | df["Month"] = df["Datetime"].dt.to_period("M") 423 | ops_por_mes = df.groupby("Month").size() 424 | ops_por_mes_desc = ops_por_mes.sort_index(ascending=True) 425 | ganancia_por_mes = df.groupby("Month")['Profit $ USDT'].apply(lambda x: x[x > 0].sum()) 426 | perdida_por_mes = df.groupby("Month")['Profit $ USDT'].apply(lambda x: x[x < 0].sum()) 427 | 428 | meses_con_ganancia = {} 429 | for mes_period, num_ops in ops_por_mes.items(): 430 | ganancia = ganancia_por_mes.get(mes_period, 0) 431 | perdida = perdida_por_mes.get(mes_period, 0) 432 | meses_con_ganancia[mes_period] = f"{num_ops} ({round(ganancia, 2)}/{round(perdida, 2)})" 433 | 434 | meses = ["No Data"] * 13 435 | meses_ordenados = sorted(meses_con_ganancia.keys()) 436 | for mes_periodo in meses_ordenados: 437 | mes_numero = mes_periodo.month 438 | meses[mes_numero] = meses_con_ganancia[mes_periodo] 439 | 440 | file_name_display = os.path.basename(file)[:15] + "..." if len(os.path.basename(file)) > 15 else os.path.basename(file) 441 | my_table.add_column(file_name_display, [ 442 | len(merged_trades), round(total_profit,2), round(ganado,2), round(perdido,2), 443 | round(ganancia_percent,2), round(max_price,2), round(min_price,2), 444 | round(max_hold,2), round(min_hold,4), round(promedio_hold,2), 445 | str(round(avg_profit,2)) + " USDT", round(Commission,2), 446 | round(pl_ratio, 2), round(sharpe_ratio, 2), round(max_drawdown * 100, 2), 447 | rango[0], rango[1], rango[2], rango[3], rango[4], rango[5], 448 | rango[6], rango[7], rango[8], rango[9], 449 | meses[1], meses[2], meses[3], meses[4], meses[5], meses[6], 450 | meses[7], meses[8], meses[9], meses[10], meses[11], meses[12] 451 | #wrap_text(buy_signal, 25), 452 | #wrap_text(sell_signal, 25) 453 | ]) 454 | except FileNotFoundError: 455 | print(f"Error: El archivo '{file}' no fue encontrado. Ignorando.") 456 | except pd.errors.EmptyDataError: 457 | print(f"Error: El archivo '{file}' está vacío. Ignorando.") 458 | except Exception as e: 459 | print(f"Ocurrió un error al procesar el archivo '{file}': {e}. Ignorando.") 460 | exc_type, exc_obj, exc_tb = sys.exc_info() 461 | print('Error on line ' + str(exc_tb.tb_lineno)) 462 | 463 | print(my_table.get_string()) -------------------------------------------------------------------------------- /megatronmod_functions.py: -------------------------------------------------------------------------------- 1 | # Megatronmod Strategy - All in One 2 | # Created by: Horacio Oscar Fanelli - Pantersxx3 and NokerPlay 3 | # This mod can be used only with: 4 | # https://github.com/pantersxx3/Binance-Bot 5 | # 6 | # No future support offered, use this script at own risk - test before using real funds 7 | # If you lose money using this MOD (and you will at some point) you've only got yourself to blame! 8 | 9 | #Crossover, Crossunder, Cross, Ichimoku, Bollinger Bands, Supertrend, Momentum, Hikinashi 10 | #Macd, Cci, SL, TP, Bought_at, Zigzag, Ema, Sma, Stochastic, Rsi, Wma, Hma 11 | 12 | from tradingview_ta import TA_Handler, Interval, Exchange 13 | from binance.client import Client, BinanceAPIException 14 | from helpers.parameters import parse_args, load_config 15 | 16 | from datetime import date, datetime, timedelta 17 | from collections import defaultdict 18 | import pandas_ta as ta #pta 19 | import pandas as pd 20 | from ta.trend import SMAIndicator, EMAIndicator, CCIIndicator, MACD, ADXIndicator 21 | from ta.momentum import RSIIndicator 22 | from ta.volatility import BollingerBands, AverageTrueRange 23 | from ta.volume import OnBalanceVolumeIndicator, VolumeWeightedAveragePrice 24 | import threading 25 | import os 26 | import sys 27 | import glob 28 | import math 29 | import time 30 | import ccxt 31 | import re 32 | import json 33 | import numpy as np 34 | import random 35 | 36 | # Load creds modules 37 | from helpers.handle_creds import ( 38 | load_correct_creds, load_discord_creds 39 | ) 40 | 41 | global config_file, creds_file, parsed_creds, parsed_config, USE_MOST_VOLUME_COINS, PAIR_WITH, SELL_ON_SIGNAL_ONLY, TEST_MODE, LOG_FILE 42 | global COINS_BOUGHT, EXCHANGE, SCREENER, STOP_LOSS, TAKE_PROFIT, TRADE_SLOTS, BACKTESTING_MODE, BACKTESTING_MODE_TIME_START, SIGNAL_NAME 43 | global access_key, secret_key, client, txcolors, bought, timeHold, ACTUAL_POSITION, args, TEST_MODE, BACKTESTING_MODE 44 | global USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES, LANGUAGE, MAX_HOLDING_TIME 45 | 46 | 47 | class txcolors: 48 | BUY = '\033[92m' 49 | WARNING = '\033[93m' 50 | SELL_LOSS = '\033[91m' 51 | SELL_PROFIT = '\033[32m' 52 | DIM = '\033[2m\033[35m' 53 | Red = '\033[31m' 54 | DEFAULT = '\033[39m' 55 | 56 | DEFAULT_CONFIG_FILE = 'config.yml' 57 | DEFAULT_CREDS_FILE = 'creds.yml' 58 | 59 | # Settings 60 | args = parse_args() 61 | config_file = args.config if args.config else DEFAULT_CONFIG_FILE 62 | creds_file = args.creds if args.creds else DEFAULT_CREDS_FILE 63 | parsed_creds = load_config(creds_file) 64 | parsed_config = load_config(config_file) 65 | access_key, secret_key = load_correct_creds(parsed_creds) 66 | 67 | LANGUAGE = parsed_config['script_options']['LANGUAGE'] 68 | USE_MOST_VOLUME_COINS = parsed_config['trading_options']['USE_MOST_VOLUME_COINS'] 69 | #BACKTESTING_MODE_TIME_START = parsed_config['script_options']['BACKTESTING_MODE_TIME_START'] 70 | PAIR_WITH = parsed_config['trading_options']['PAIR_WITH'] 71 | SELL_ON_SIGNAL_ONLY = parsed_config['trading_options']['SELL_ON_SIGNAL_ONLY'] 72 | MODE = parsed_config['script_options']['MODE'] 73 | LOG_FILE = parsed_config['script_options'].get('LOG_FILE') 74 | COINS_BOUGHT = parsed_config['script_options'].get('COINS_BOUGHT') 75 | STOP_LOSS = parsed_config['trading_options']['STOP_LOSS'] 76 | TAKE_PROFIT = parsed_config['trading_options']['TAKE_PROFIT'] 77 | TRADES_GRAPH = parsed_config['script_options'].get('TRADES_GRAPH') 78 | TRADES_INDICATORS = parsed_config['script_options'].get('TRADES_INDICATORS') 79 | TRADE_SLOTS = parsed_config['trading_options']['TRADE_SLOTS'] 80 | #BACKTESTING_MODE = parsed_config['script_options']['BACKTESTING_MODE'] 81 | BACKTESTING_MODE_TIME_START = parsed_config['script_options']['BACKTESTING_MODE_TIME_START'] 82 | MAX_HOLDING_TIME = parsed_config['trading_options']['MAX_HOLDING_TIME'] 83 | MICROSECONDS = 2 84 | 85 | #ACTUAL_POSITION = 0 86 | SIGNAL_NAME = 'MEGATRONMOD' 87 | #CREATE_BUY_SELL_FILES = True 88 | #DEBUG = True 89 | EXCHANGE = 'BINANCE' 90 | SCREENER = 'CRYPTO' 91 | 92 | #JSON_FILE_BOUGHT = SIGNAL_NAME + '.json' 93 | 94 | def write_log(logline, LOGFILE = LOG_FILE, show = True, time = False): 95 | try: 96 | from Boot import set_correct_mode 97 | TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES = set_correct_mode(LANGUAGE, MODE, True) 98 | if TEST_MODE: 99 | file_prefix = 'test_' 100 | else: 101 | file_prefix = 'live_' 102 | with open(file_prefix + LOGFILE,'a') as f: 103 | ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') 104 | result = ansi_escape.sub('', logline) 105 | if show: print(f'{logline}') 106 | if time: 107 | timestamp = datetime.now().strftime('%Y-%d-%m %H:%M:%S') + ',' 108 | else: 109 | timestamp = '' 110 | f.write(timestamp + result + '\n') 111 | except Exception as e: 112 | print(f'{txcolors.DEFAULT}{SIGNAL_NAME} write_log: Exception in function: {e}{txcolors.DEFAULT}') 113 | exc_type, exc_obj, exc_tb = sys.exc_info() 114 | print('Error on line ' + str(exc_tb.tb_lineno)) 115 | exit(1) 116 | 117 | def read_position_csv(coin): 118 | try: 119 | pos1 = 0 120 | if os.path.exists(coin + '.position'): 121 | f = open(coin + '.position', 'r') 122 | pos1 = int(f.read().replace('.0', '')) 123 | f.close() 124 | else: 125 | pos1 = -1 126 | #os.remove(coin + '.position') 127 | except Exception as e: 128 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: read_position_csv(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 129 | exc_type, exc_obj, exc_tb = sys.exc_info() 130 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 131 | pass 132 | return pos1 133 | 134 | def get_analysis(d, tf, p, position1=0, num_records=1000): 135 | try: 136 | global BACKTESTING_MODE, BACKTESTING_MODE_TIME_START 137 | from Boot import set_correct_mode 138 | TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES = set_correct_mode(LANGUAGE, MODE, True) 139 | c = pd.DataFrame([]) 140 | e = 0 141 | if BACKTESTING_MODE: 142 | if position1 > 0: 143 | if d.empty: 144 | d = pd.read_csv(p + '.csv') 145 | d.columns = ['time', 'Open', 'High', 'Low', 'Close', 'Volume'] 146 | d['Close'] = d['Close'].astype(float) 147 | else: 148 | d['Close'] = d['Close'].astype(float) 149 | #c = d.query('time < @position1').tail(num_records) 150 | idx = d["time"].searchsorted(position1) 151 | c = d.iloc[max(0, idx - num_records):idx] 152 | inttime = int(position1)/1000 153 | position2 = c['time'].iloc[0] 154 | #print(f'{txcolors.SELL_PROFIT}{SIGNAL_NAME}: {txcolors.DEFAULT}{BACKTESTING_MODE_TIME_START} - Posicion actual {datetime.fromtimestamp(inttime).strftime("%d/%m/%y %H:%M:%S")} - {position2} - {position1}...{txcolors.DEFAULT}') 155 | d = pd.DataFrame([]) 156 | else: 157 | client = Client(access_key, secret_key) 158 | if "m" in tf: 159 | back = 'min ago UTC' 160 | elif "h" in tf: 161 | back = 'hour ago UTC' 162 | elif "d" in tf: 163 | back = 'day ago UTC' 164 | elif "w" in tf: 165 | back = 'week ago UTC' 166 | elif "M" in tf: 167 | back = 'month ago UTC' 168 | 169 | klines = client.get_historical_klines(symbol=p, interval=tf, start_str=str(num_records) + back, limit=num_records) 170 | c = pd.DataFrame(klines) 171 | c.columns = ['time', 'Open', 'High', 'Low', 'Close', 'Volume', 'CloseTime', 'QuoteAssetVolume', 'Trades', 'TakerBuyBase', 'TakerBuyQuote', 'Ignore'] 172 | c = c.drop(c.columns[[6, 7, 8, 9, 10, 11]], axis=1) 173 | c['time'] = pd.to_datetime(c['time'], unit='ms') 174 | c['Close'] = c['Close'].astype(float) 175 | #print(c) 176 | except Exception as e: 177 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: get_analysis(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 178 | exc_type, exc_obj, exc_tb = sys.exc_info() 179 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, True) 180 | return c 181 | 182 | 183 | def Crossunder(arr1, arr2): 184 | CrossUnder = 0 185 | if not arr1 == None or not arr2 == None: 186 | if arr1 != arr2: 187 | if arr1 > arr2 and arr2 < arr1: 188 | CrossUnder = True 189 | else: 190 | CrossUnder = False 191 | else: 192 | CrossUnder = False 193 | return CrossUnder 194 | 195 | def Crossover(arr1, arr2): 196 | CrossOver = 0 197 | if not arr1 == None or not arr2 == None: 198 | if arr1 != arr2: 199 | if arr1 < arr2 and arr2 > arr1: 200 | CrossOver = True 201 | else: 202 | CrossOver = False 203 | else: 204 | CrossOver = False 205 | return CrossOver 206 | 207 | def Cross(arr1, arr2): 208 | if round(arr1, 8) == round(arr2, 8): 209 | Cross = True 210 | else: 211 | Cross = False 212 | return Cross 213 | 214 | def isNone(var): 215 | if var == None: 216 | r = 0 217 | else: 218 | r = var 219 | return r 220 | 221 | def read_volume_value(coin, type): 222 | try: 223 | if TEST_MODE: 224 | file_prefix = 'test_' 225 | else: 226 | file_prefix = 'live_' 227 | 228 | if os.path.exists(file_prefix + 'trades.csv'): 229 | column_names = ["Datetime", "OrderID", "Type", "Coin", "Volume", "Buy Price", "Amount of Buy USDT", "Sell Price", "Amount of Sell USDT", "Sell Reason", "Profit $ USDT", "Commission"] 230 | df = pd.read_csv(file_prefix + 'trades.csv', delimiter=',', names=column_names) 231 | #Datetime, OrderID, Type, Coin, Volume, Buy Price, Amount of Buy USDT, Sell Price, Amount of Sell USDT, Sell Reason, Profit $ USDT 232 | filtered = df[(df["Type"] == " " + type) & (df["Coin"] == " " + coin.replace(PAIR_WITH, ""))] 233 | if len(filtered) > 1: 234 | return round(float(filtered["Volume"].iloc[-1]),8) 235 | elif len(filtered) == 1: 236 | return round(float(filtered["Volume"].iloc[0]), 8) 237 | else: 238 | return 0.0 239 | else: 240 | return 0.0 241 | except Exception as e: 242 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: read_comission_value(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 243 | exc_type, exc_obj, exc_tb = sys.exc_info() 244 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 245 | 246 | def read_commission_value(coin, type): 247 | try: 248 | if TEST_MODE: 249 | file_prefix = 'test_' 250 | else: 251 | file_prefix = 'live_' 252 | 253 | if os.path.exists(file_prefix + 'trades.csv'): 254 | column_names = ["Datetime", "OrderID", "Type", "Coin", "Volume", "Buy Price", "Amount of Buy USDT", "Sell Price", "Amount of Sell USDT", "Sell Reason", "Profit $ USDT", "Commission"] 255 | df = pd.read_csv(file_prefix + 'trades.csv', names=column_names) 256 | #Datetime, OrderID, Type, Coin, Volume, Buy Price, Amount of Buy USDT, Sell Price, Amount of Sell USDT, Sell Reason, Profit $ USDT 257 | filtered = df[(df["Type"] == " " + type) & (df["Coin"] == " " + coin.replace(PAIR_WITH, ""))] 258 | if len(filtered) > 1: 259 | return round(float(filtered["Commission"].iloc[-1]), 8) 260 | elif len(filtered) == 1: 261 | return round(float(filtered["Commission"].iloc[0]), 8) 262 | else: 263 | return 0.0 264 | else: 265 | return 0.0 266 | except Exception as e: 267 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: read_commission_value(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 268 | exc_type, exc_obj, exc_tb = sys.exc_info() 269 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 270 | 271 | def read_sell_value(coin): 272 | try: 273 | if TEST_MODE: 274 | file_prefix = 'test_' 275 | else: 276 | file_prefix = 'live_' 277 | 278 | if os.path.exists(file_prefix + 'trades.csv'): 279 | column_names = ["Datetime", "OrderID", "Type", "Coin", "Volume", "Buy Price", "Amount of Buy USDT", "Sell Price", "Amount of Sell USDT", "Sell Reason", "Profit $ USDT", "Commission"] 280 | df = pd.read_csv(file_prefix + 'trades.csv', names=column_names) 281 | #Datetime, OrderID, Type, Coin, Volume, Buy Price, Amount of Buy USDT, Sell Price, Amount of Sell USDT, Sell Reason, Profit $ USDT 282 | filtered = df[(df["Type"] ==' Sell') & (df["Coin"] == " " + coin.replace(PAIR_WITH, ""))] 283 | if len(filtered) > 1: 284 | return round(float(filtered["Sell Price"].iloc[-1]), 8) 285 | elif len(filtered) == 1: 286 | return round(float(filtered["Sell Price"].iloc[0]), 8) 287 | else: 288 | return 0.0 289 | else: 290 | return 0.0 291 | except Exception as e: 292 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: read_sell_value(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 293 | exc_type, exc_obj, exc_tb = sys.exc_info() 294 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 295 | 296 | def load_json(p): 297 | try: 298 | from Boot import set_correct_mode 299 | TEST_MODE, BACKTESTING_MODE, USE_TESNET_IN_ONLINEMODE, USE_SIGNALLING_MODULES = set_correct_mode(LANGUAGE, MODE, True) 300 | bought_analysis1MIN = {} 301 | value1 = 0 302 | value2 = 0 303 | value3 = 0 304 | if TEST_MODE: 305 | file_prefix = 'test_' 306 | else: 307 | file_prefix = 'live_' 308 | coins_bought_file_path = file_prefix + COINS_BOUGHT 309 | if os.path.exists(coins_bought_file_path) and os.path.getsize(coins_bought_file_path) > 2: 310 | with open(coins_bought_file_path,'r') as f: 311 | bought_analysis1MIN = json.load(f) 312 | for analysis1MIN in bought_analysis1MIN.keys(): 313 | value3 = value3 + 1 314 | if p in bought_analysis1MIN: 315 | value1 = round(float(bought_analysis1MIN[p]['bought_at']),8) 316 | value2 = round(float(bought_analysis1MIN[p]['time']),8) 317 | bought_analysis1MIN = {} 318 | except Exception as e: 319 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: load_json(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 320 | exc_type, exc_obj, exc_tb = sys.exc_info() 321 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 322 | return value1, value2, value3 323 | 324 | def isfloat(num): 325 | try: 326 | float(num) 327 | return True 328 | except ValueError: 329 | return False 330 | 331 | def print_dic(dic, with_key=False, with_value=True): 332 | try: 333 | str1 = '' 334 | for key, value in dic.items(): 335 | if with_key == False: 336 | if not value == {}: 337 | if isfloat(value): 338 | str1 = str1 + str(round(float(value),8)) + ',' 339 | else: 340 | str1 = str1 + str(value) + ',' 341 | else: 342 | if with_value: 343 | if not value == {}: 344 | if isfloat(value): 345 | str1 = str1 + str(key) + ':' + str(round(float(value),8)) + ',' 346 | else: 347 | str1 = str1 + str(key) + ':' + str(value) + ',' 348 | else: 349 | str1 = str1 + str(key) + ',' 350 | except Exception as e: 351 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: print_dic(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 352 | exc_type, exc_obj, exc_tb = sys.exc_info() 353 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 354 | return str1[:-1] 355 | 356 | def list_indicators(): 357 | try: 358 | list_indicators = [] 359 | list_indicators = ["Bollinger_Bands", "Cci", "Cross", "Crossover", "Crossunder", "Ema", "Heikinashi", "Hma", "Ichimoku", "Macd", "Momentum", "Rsi", "Sl", "Sma", "Stochastic", "Supertrend", "Tp", "Wma", "Zigzag"] 360 | except Exception as e: 361 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.Red}Exception: list_indicators(): {e}', SIGNAL_NAME + '.log', True, False) 362 | exc_type, exc_obj, exc_tb = sys.exc_info() 363 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 364 | pass 365 | return list_variables 366 | 367 | def defaultdict_from_dict(d): 368 | nd = lambda: defaultdict(nd) 369 | ni = nd() 370 | ni.update(d) 371 | return ni 372 | 373 | def ret_time(df): 374 | TIME_1M = df['time'].iloc[-1] 375 | if not isinstance(TIME_1M, pd._libs.tslibs.timestamps.Timestamp): 376 | time1 = int(TIME_1M)/1000 377 | time_1MIN = datetime.fromtimestamp(int(time1)).strftime("%d/%m/%y %H:%M:%S") 378 | else: 379 | time_1MIN = TIME_1M 380 | return time_1MIN 381 | 382 | # def save_indicator(items): 383 | # try: 384 | 385 | # if TEST_MODE: 386 | # file_prefix = 'test_' 387 | # else: 388 | # file_prefix = 'live_' 389 | 390 | # data_indicator = pd.DataFrame([]) 391 | # csv_indicators = file_prefix + TRADES_INDICATORS 392 | 393 | # for name, myvalue in list(items): 394 | # if name.endswith('_IND'): # or name == 'time_1MIN': 395 | # myvalue = str(myvalue).strip() 396 | # data_indicators = pd.DataFrame([]) 397 | # data_indicators[name] = [myvalue] 398 | 399 | # if not data_indicators.empty: 400 | # data_indicators.to_csv(csv_indicators.replace('.csv', '') + "_" + name + '.csv', mode='a', index=False, header=False) 401 | 402 | # except Exception as e: 403 | # write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.Red}Exception: #save_indicator(): {e}', SIGNAL_NAME + '.log', True, False) 404 | # exc_type, exc_obj, exc_tb = sys.exc_info() 405 | # write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 406 | # pass 407 | 408 | # def save_strategy(items): 409 | # try: 410 | # TRADES_STRATEGY = 'strategy.csv' 411 | # if TEST_MODE: 412 | # file_prefix = 'test_' 413 | # else: 414 | # file_prefix = 'live_' 415 | 416 | # data_strategy = pd.DataFrame([]) #buySignal price sellSignal price result 417 | # csv_strategy = file_prefix + TRADES_STRATEGY 418 | 419 | # for name, myvalue in list(items): 420 | # if name.startswith('buy') or name.startswith('sell'): 421 | # myvalue = str(myvalue).strip() 422 | # data_strategy = pd.DataFrame([]) 423 | # if 'buy' in name: 424 | # data_strategy [name] = [myvalue] 425 | 426 | # if not data_strategy.empty: 427 | # data_strategy.to_csv(csv_strategy, mode='a', index=False, header=False) 428 | 429 | # except Exception as e: 430 | # write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.Red}Exception: #save_indicator(): {e}', SIGNAL_NAME + '.log', True, False) 431 | # exc_type, exc_obj, exc_tb = sys.exc_info() 432 | # write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 433 | # pass 434 | 435 | def Ichimoku(DF_Data, TENKA, KIJUN, SENKU): 436 | df = pd.DataFrame(DF_Data) 437 | df[['spanA', 'spanB', 'tenkan_sen', 'kijun_sen', 'chikou_span']] = ta.ichimoku(DF_Data['High'], DF_Data['Low'], DF_Data['Close'], TENKA, KIJUN, SENKU) 438 | spanA = round(df['spanA'], 8) 439 | spanB = round(df['spanB'], 8) 440 | tenkan_sen_IND = round(df['tenkan_sen'], 8) 441 | kijun_sen_IND = round(df['kijun_sen'], 8) 442 | chikou_span_IND = round(df['chikou_span'], 8) 443 | #time_1MIN = ret_time(DF_Data) 444 | #save_indicator(locals().items()) 445 | return spanA_IND, spanB_IND, tenkan_sen_IND, kijun_sen_IND, chikou_span_IND 446 | 447 | def Heikinashi(DF_Data): 448 | HEIKINASHI_1M_DATA = pd.DataFrame() 449 | HEIKINASHI_1M_DATA[['ha_open', 'ha_high', 'ha_low', 'ha_Close']] = ta.ha(DF_Data['Open'], DF_Data['High'], DF_Data['Low'], DF_Data['Close']) 450 | HEIKINASHI_OPEN_IND = round(HEIKINASHI_1M_DATA['ha_open'], 8) 451 | HEIKINASHI_HIGH_IND = round(HEIKINASHI_1M_DATA['ha_high'], 8) 452 | HEIKINASHI_LOW_IND = round(HEIKINASHI_1M_DATA['ha_low'], 8) 453 | HEIKINASHI_CLOSE_IND = round(HEIKINASHI_1M_DATA['ha_Close'], 8) 454 | #time_1MIN = ret_time(DF_Data) 455 | #save_indicator(locals().items()) 456 | return HEIKINASHI_OPEN_IND, HEIKINASHI_HIGH_IND, HEIKINASHI_LOW_IND, HEIKINASHI_CLOSE_IND 457 | 458 | def Bollinger_Bands(DF_Data, LENGHT, STD): 459 | df = pd.DataFrame() 460 | df[['lower', 'middle', 'upper', 'bandwidth', 'percentcolumns']] = ta.bbands(DF_Data['Close'], length=LENGHT, std=STD) 461 | B1_IND = round(df['upper'].iloc[-1], 8) 462 | BM_IND = round(df['middle'].iloc[-1], 8) 463 | B2_IND = round(df['lower'].iloc[-1], 8) 464 | #time_1MIN = ret_time(DF_Data) 465 | #save_indicator(locals().items()) 466 | return B1_IND, BM_IND, B2_IND 467 | 468 | def Supertrend(DF_Data, LENGHT, MULT): 469 | df = pd.DataFrame() 470 | df[['supertrend', 'supertrend_direc', 'supertrend_down', 'supertrend_up']] = ta.supertrend(pd.to_numeric(DF_Data['High']), pd.to_numeric(DF_Data['Low']), pd.to_numeric(DF_Data['Close']), length=LENGHT, multiplier=MULT) 471 | SUPERTRENDUP_IND = round(df['supertrend_up'].iloc[-1], 8) 472 | SUPERTRENDDOWN_IND = round(df['supertrend_down'].iloc[-1], 8) 473 | SUPERTREND_IND = round(df['supertrend'].iloc[-1], 8) 474 | #time_1MIN = ret_time(DF_Data) 475 | #save_indicator(locals().items()) 476 | return SUPERTREND_IND, SUPERTRENDDOWN_IND, SUPERTRENDUP_IND 477 | 478 | def Momentum(DF_Data, LENGHT): 479 | MOMENTUM_IND = round(ta.mom(DF_Data['Close'], timeperiod=LENGHT).iloc[-1], 8) 480 | #time_1MIN = ret_time(DF_Data) 481 | #save_indicator(locals().items()) 482 | return MOMENTUM_IND 483 | 484 | def Ema(DF_Data, LENGHT): 485 | EMA_IND = round(ta.ema(DF_Data['Close'], length=LENGHT).iloc[-1], 8) 486 | #time_1MIN = ret_time(DF_Data) 487 | #save_indicator(locals().items()) 488 | return EMA_IND 489 | 490 | def Ema_df(DF_Data, LENGHT): 491 | EMA_IND = ta.ema(DF_Data['Close'], length=LENGHT) 492 | return EMA_IND 493 | 494 | def Sma(DF_Data, LENGHT): 495 | SMA_IND = round(ta.sma(DF_Data['Close'],length=LENGHT).iloc[-1], 8) 496 | #time_1MIN = ret_time(DF_Data) 497 | #save_indicator(locals().items()) 498 | return SMA_IND 499 | 500 | def Sma_df(DF_Data, LENGHT): 501 | SMA_IND = ta.sma(DF_Data['Close'],length=LENGHT) 502 | return SMA_IND 503 | 504 | def Stochastic(DF_Data, LENGHT, K, D): 505 | STOCHK_1M_DATA = pd.DataFrame() 506 | STOCHK_1M_DATA[['k', 'd']] = ta.stoch(DF_Data['High'], DF_Data['Low'], DF_Data['Close'], LENGHT, K, D) 507 | STOCHK_IND = round(STOCHK_1M_DATA['k'].iloc[-1], 8) 508 | STOCHD_IND = round(STOCHK_1M_DATA['d'].iloc[-1], 8) 509 | #time_1MIN = ret_time(DF_Data) 510 | #save_indicator(locals().items()) 511 | return STOCHK_IND, STOCHD_IND 512 | 513 | def Rsi(DF_Data, LENGHT): 514 | RSI_IND = round(ta.rsi(DF_Data['Close'], LENGHT).iloc[-1], 8) 515 | #time_1MIN = ret_time(DF_Data) 516 | #save_indicator(locals().items()) 517 | return RSI_IND 518 | 519 | def Rsi_df(DF_Data, LENGHT): 520 | RSI_IND = ta.rsi(DF_Data['Close'], LENGHT) 521 | #time_1MIN = ret_time(DF_Data) 522 | #save_indicator(locals().items()) 523 | return RSI_IND 524 | 525 | def Wma(DF_Data, LENGHT): 526 | WMA_IND = round(ta.wma(DF_Data['Close'], LENGHT).iloc[-1], 8) 527 | #time_1MIN = ret_time(DF_Data) 528 | #save_indicator(locals().items()) 529 | return WMA_IND 530 | 531 | def Hma(DF_Data, LENGHT): 532 | HMA_IND = round(ta.hma(DF_Data['Close'], LENGHT).iloc[-1], 8) 533 | #time_1MIN = ret_time(DF_Data) 534 | #save_indicator(locals().items()) 535 | return HMA_IND 536 | 537 | def Macd(DF_Data, FAST, SLOW, SIGNAL): 538 | MACD_IND, MACDHIST_IND, MACDSIG_IND = round(ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL).iloc[-1], 8) 539 | #time_1MIN = ret_time(DF_Data) 540 | #save_indicator(locals().items()) 541 | return MACD_IND, MACDHIST_IND, MACDSIG_IND 542 | 543 | def Macd_df(DF_Data, FAST, SLOW, SIGNAL): 544 | macd_df = ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL) 545 | MACD_IND = macd_df[f"MACD_{FAST}_{SLOW}_{SIGNAL}"] 546 | MACDHIST_IND = macd_df[f"MACDh_{FAST}_{SLOW}_{SIGNAL}"] 547 | MACDSIG_IND = macd_df[f"MACDs_{FAST}_{SLOW}_{SIGNAL}"] 548 | return MACD_IND, MACDHIST_IND, MACDSIG_IND 549 | 550 | def Macd_Ind(DF_Data, FAST, SLOW, SIGNAL): 551 | MACD_IND, MACDHIST_IND, MACDSIG_IND = round(ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL).iloc[-1], 8) 552 | #time_1MIN = ret_time(DF_Data) 553 | #save_indicator(locals().items()) 554 | return MACD_IND 555 | 556 | def Macd_Hist(DF_Data, FAST, SLOW, SIGNAL): 557 | MACD_IND, MACDHIST_IND, MACDSIG_IND = round(ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL).iloc[-1], 8) 558 | #time_1MIN = ret_time(DF_Data) 559 | #save_indicator(locals().items()) 560 | return MACDHIST_IND 561 | 562 | def Macd_Sig(DF_Data, FAST, SLOW, SIGNAL): 563 | MACD_IND, MACDHIST_IND, MACDSIG_IND = round(ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL).iloc[-1], 8) 564 | #time_1MIN = ret_time(DF_Data) 565 | #save_indicator(locals().items()) 566 | return MACDSIG_IND 567 | 568 | def Macd_Sig_df(DF_Data, FAST, SLOW, SIGNAL): 569 | macd_df = ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL) 570 | # MACD_IND = macd_df[f"MACD_{FAST}_{SLOW}_{SIGNAL}"] 571 | # MACDHIST_IND = macd_df[f"MACDh_{FAST}_{SLOW}_{SIGNAL}"] 572 | MACDSIG_IND = macd_df[f"MACDs_{FAST}_{SLOW}_{SIGNAL}"] 573 | return MACDSIG_IND 574 | 575 | def Macd_Ind_df(DF_Data, FAST, SLOW, SIGNAL): 576 | macd_df = ta.macd(DF_Data['Close'],FAST, SLOW, SIGNAL) 577 | MACD_IND = macd_df[f"MACD_{FAST}_{SLOW}_{SIGNAL}"] 578 | # MACDHIST_IND = macd_df[f"MACDh_{FAST}_{SLOW}_{SIGNAL}"] 579 | # MACDSIG_IND = macd_df[f"MACDs_{FAST}_{SLOW}_{SIGNAL}"] 580 | return MACD_IND 581 | 582 | def Cci(DF_Data, LENGHT): 583 | CCI_IND = round(DF_Data.ta.cci(length=LENGHT).iloc[-1], 8) 584 | #time_1MIN = ret_time(DF_Data) 585 | #save_indicator(locals().items()) 586 | return CCI_IND 587 | 588 | def Sl(PAIR, CLOSE, STOPLOSS): 589 | try: 590 | r = False 591 | bought_at = Bought_at(PAIR) #, timeHold, coins_bought = load_json(PAIR) 592 | SL = float(bought_at) - ((float(bought_at) * float(STOPLOSS)) / 100) 593 | r = float(CLOSE) < float(SL) and float(SL) != 0.0 594 | except Exception as e: 595 | exc_type, exc_obj, exc_tb = sys.exc_info() 596 | print('Sl Error on line ' + str(exc_tb.tb_lineno)) 597 | pass 598 | return r 599 | 600 | def Tp(PAIR, CLOSE, TAKEPROFIT): 601 | try: 602 | sellSignalTP = False 603 | bought_at = Bought_at(PAIR) #, timeHold, coins_bought = load_json(PAIR) 604 | TP = float(bought_at) + ((float(bought_at) * float(TAKEPROFIT)) / 100) 605 | sellSignalTP = (float(CLOSE) > float(TP) and float(TP) != 0.0) 606 | except Exception as e: 607 | exc_type, exc_obj, exc_tb = sys.exc_info() 608 | print('Tp Error on line ' + str(exc_tb.tb_lineno)) 609 | pass 610 | return sellSignalTP 611 | 612 | def Bought_at(PAIR): 613 | bought_at, timeHold, coins_bought = load_json(PAIR) 614 | return bought_at 615 | 616 | def Bought_timeHold(PAIR): 617 | bought_at, timeHold, coins_bought = load_json(PAIR) 618 | return timeHold 619 | 620 | def TimeHold(pair, current_timestamp): 621 | current_timestamp_seconds = current_timestamp/1000 622 | TimeHold_seconds = Bought_timeHold(pair)/1000 623 | TimeHold_sec = current_timestamp_seconds - TimeHold_seconds 624 | TimeHold_min = TimeHold_sec / 60 625 | return TimeHold_min 626 | 627 | def Zigzag(DF_data, length): 628 | try: 629 | zigzag = pd.Series(np.nan, index=DF_data.index) 630 | last_extreme = {'price': DF_data['Close'].iloc[0], 'index': 0, 'type': None} 631 | 632 | for i in range(1, len(DF_data)): 633 | high = DF_data['High'].iloc[i] 634 | low = DF_data['Low'].iloc[i] 635 | 636 | # Cálculo de cambios porcentuales 637 | change_from_high = (high - last_extreme['price']) / last_extreme['price'] * 100 638 | change_from_low = (last_extreme['price'] - low) / last_extreme['price'] * 100 639 | 640 | if last_extreme['type'] in [None, 'baja']: 641 | if change_from_high >= length: 642 | # Nuevo pico confirmado 643 | zigzag[last_extreme['index']] = np.nan # Elimina extremo anterior 644 | last_extreme.update({'price': high, 'index': i, 'type': 'alta'}) 645 | zigzag.iloc[i] = high 646 | elif low < last_extreme['price']: 647 | # Actualiza valle temporal 648 | last_extreme.update({'price': low, 'index': i, 'type': 'baja'}) 649 | zigzag.iloc[i] = low 650 | 651 | elif last_extreme['type'] == 'alta': 652 | if change_from_low >= length: 653 | # Nuevo valle confirmado 654 | zigzag[last_extreme['index']] = np.nan # Elimina extremo anterior 655 | last_extreme.update({'price': low, 'index': i, 'type': 'baja'}) 656 | zigzag.iloc[i] = low 657 | elif high > last_extreme['price']: 658 | # Actualiza pico temporal 659 | last_extreme.update({'price': high, 'index': i, 'type': 'alta'}) 660 | zigzag.iloc[i] = high 661 | 662 | return last_extreme 663 | 664 | except Exception as e: 665 | exc_type, exc_obj, exc_tb = sys.exc_info() 666 | print(e) 667 | print('zigzag Error on line ' + str(exc_tb.tb_lineno)) 668 | pass 669 | #return DF_Data 670 | 671 | def contar_decimales(numero): 672 | numero_str = str(numero) 673 | if '.' in numero_str: 674 | parte_decimal = numero_str.split('.')[1] 675 | return len(parte_decimal) 676 | else: 677 | return 0 678 | 679 | def should_sell_due_to_risk(coin, CLOSE, time, sl, th): 680 | r = False 681 | if Sl(coin, CLOSE, sl): 682 | r = True 683 | time_held = TimeHold(coin, time) 684 | if time_held > th: 685 | r = True 686 | return r 687 | 688 | def Stop_Time(coin, time, s_time): 689 | time_held = TimeHold(coin, time) 690 | if time_held > s_time: 691 | return True 692 | return False 693 | # def Dynamic_StopLoss(coin, DF_Data, CLOSE, LENGHT, time_wait, value): 694 | # try: 695 | # coinfile = coin + ".st" 696 | # if Bought_at(coin): 697 | # #BA, BM, BB = Bollinger_Bands(DF_Data, LENGHT, 2) 698 | # if os.path.exists(coinfile): 699 | # if CLOSE > value: 700 | # os.remove(coinfile) 701 | # return False 702 | 703 | # if os.path.exists(coin + '.position'): 704 | # with open(coin + '.position', 'r') as f: 705 | # last_time = float(f.read().replace(".0", "")) 706 | # else: 707 | # return False 708 | 709 | # with open(coinfile, "r") as f: 710 | # first_time = float(f.read().strip()) 711 | # time_elapsed = ((last_time - first_time) / 60) / 1000 712 | 713 | # if time_elapsed >= time_wait: 714 | # os.remove(coinfile) 715 | # return True 716 | # else: 717 | # if CLOSE < value: 718 | # if os.path.exists(coin + '.position'): 719 | # with open(coin + '.position', 'r') as f: 720 | # last_time = float(f.read().replace(".0", "")) 721 | # else: 722 | # return False 723 | 724 | # with open(coinfile, 'w') as f: 725 | # f.write(str(last_time)) 726 | # return False 727 | # except Exception as e: 728 | # exc_type, exc_obj, exc_tb = sys.exc_info() 729 | # print(e) 730 | # print('check_bollingerLow_and_holding Error on line ' + str(exc_tb.tb_lineno)) 731 | # pass 732 | # return False 733 | 734 | def calculate_spread(min_spread, max_spread, DF_Data): 735 | open_price = DF_Data['Open'].iloc[-1] # precio de compra 736 | close_price = DF_Data['Close'].iloc[-1] # precio de venta 737 | spread = (close_price - open_price) / open_price * 100 # % de spread 738 | if spread <= min_spread: 739 | return 1 740 | elif spread >= max_spread: 741 | return 2 742 | 743 | def calculate_fibonacci(data, length=100): 744 | if len(data) < length: 745 | raise ValueError("No hay suficientes datos para calcular Fibonacci.") 746 | sub_data = data[-length:] 747 | 748 | max_price = sub_data['High'].max() 749 | min_price = sub_data['Low'].min() 750 | 751 | diff = max_price - min_price 752 | 753 | niveles = { 754 | '0.0': max_price, 755 | '0.236': max_price - 0.236 * diff, 756 | '0.382': max_price - 0.382 * diff, 757 | '0.5': max_price - 0.5 * diff, 758 | '0.618': max_price - 0.618 * diff, 759 | '0.786': max_price - 0.786 * diff, 760 | '1.0': min_price 761 | } 762 | return niveles 763 | 764 | def Fibonacci(data, length=100): 765 | fibo = calculate_fibonacci(data, length) 766 | nivel_618 = fibo['0.618'] 767 | 768 | precio_anterior = data['Close'].iloc[-2] 769 | precio_actual = data['Close'].iloc[-1] 770 | 771 | if precio_anterior < nivel_618 and precio_actual > nivel_618: 772 | return 1 #"COMPRA" 773 | elif precio_anterior > nivel_618 and precio_actual < nivel_618: 774 | return -1 #"VENTA" 775 | else: 776 | return 0 #"ESPERAR" 777 | 778 | def B(data): 779 | return (data['High'].iloc[-1] + data['Low'].iloc[-1] + data['Close'].iloc[-1]) / 3 780 | 781 | def Adx(data, adx_period=14): 782 | adx = ADXIndicator( 783 | high=data['High'], 784 | low=data['Low'], 785 | close=data['Close'], 786 | window=adx_period 787 | ) 788 | 789 | ADX = adx.adx() 790 | DI_positive = adx.adx_pos() 791 | DI_negative = adx.adx_neg() 792 | 793 | return ADX.iloc[-1], DI_positive.iloc[-1], DI_negative.iloc[-1] 794 | 795 | def filter_with_adx(data, adx_umbral=25): 796 | adx_indicador = ADXIndicator(data['High'], data['Low'], data['Close'], window=14) 797 | adx = adx_indicador.adx().iloc[-1] 798 | return adx < adx_umbral 799 | 800 | def Atr(data, atr_period=14): 801 | atr = AverageTrueRange( 802 | high=data['High'], 803 | low=data['Low'], 804 | close=data['Close'], 805 | window=atr_period 806 | ).average_true_range() 807 | 808 | return atr.iloc[-1] # ← Valor ABSOLUTO, no normalizado 809 | 810 | def Atr_df(data, atr_period=14): 811 | atr = AverageTrueRange( 812 | high=data['High'], 813 | low=data['Low'], 814 | close=data['Close'], 815 | window=atr_period 816 | ).average_true_range() 817 | 818 | return atr 819 | 820 | def Dynamic_Sl_Atr(coin, current_price, data, atr_multiplier=2.5): 821 | bought_at = Bought_at(coin) 822 | if bought_at <= 0: 823 | return False 824 | atr_absolute = Atr(data, 14) 825 | sl_price = bought_at - (atr_absolute * atr_multiplier) 826 | if current_price <= sl_price: 827 | return True 828 | return False 829 | 830 | def Check_Volume_df(data, window=5, umbral=1.2): 831 | volumen_actual = data['Volume'] 832 | volumen_promedio = data['Volume'].rolling(window).mean() 833 | return volumen_actual > umbral * volumen_promedio 834 | 835 | def Check_Volume_Growth(data, velas=3): 836 | # Detecta si el volumen ha subido en las últimas N velas 837 | vol = data['Volume'].tail(velas).values 838 | return all(vol[i] > vol[i-1] for i in range(1, len(vol))) 839 | 840 | def Low_Volatility(Data, CLOSE): 841 | return Atr(Data, 14) < (CLOSE * 0.005) 842 | 843 | def Low_Volatility_df(Data, CLOSE): 844 | return Atr_df(Data, 14) < (CLOSE * 0.005) 845 | 846 | def Calculate_Market_Direction(data, adx_period=14, adx_threshold=20): 847 | ADX, DI_positive, DI_negative = Adx(data, adx_period) 848 | 849 | if ADX > adx_threshold: 850 | if DI_positive > DI_negative: 851 | return 'alcista' 852 | elif DI_negative > DI_positive: 853 | return 'bajista' 854 | return 'sin_tendencia' 855 | 856 | def Atr_Normalized(data, atr_period=14): 857 | atr_value = AverageTrueRange( 858 | high=data['High'], 859 | low=data['Low'], 860 | close=data['Close'], 861 | window=atr_period 862 | ).average_true_range().iloc[-1] 863 | 864 | normalized_atr = atr_value / data['Close'].iloc[-1] 865 | return normalized_atr # ← Ej: 0.015 para 1.5% de volatilidad 866 | 867 | def Check_Volume(data, window=5, umbral=1.2): 868 | volumen_actual = data['Volume'].iloc[-1] 869 | volumen_promedio = data['Volume'].rolling(window).mean().iloc[-1] 870 | return bool(volumen_actual > umbral * volumen_promedio) 871 | 872 | def Check_Consolidation(data, adx_threshold=20, atr_threshold=0.003): 873 | adx, _, _ = Adx(data) 874 | atr = Atr_Normalized(data) 875 | return adx < adx_threshold and atr < atr_threshold 876 | 877 | def Detect_Market_Type(data, umbral_alto_volatilidad=0.01): 878 | tendencia = Calculate_Market_Direction(data, 20, 20) 879 | volatilidad = Atr_Normalized(data, 20) 880 | consolidacion = Check_Consolidation(data, 20, 0.003) 881 | volumen_alto = Check_Volume(data, 20, 1.2) 882 | if tendencia == 'alcista' and volumen_alto: 883 | return 'tendencia_alcista_confirmada' 884 | elif tendencia == 'bajista' and volumen_alto: 885 | return 'tendencia_bajista_confirmada' 886 | elif consolidacion and not volumen_alto: 887 | return 'consolidacion_confirmado' 888 | elif volatilidad > umbral_alto_volatilidad: 889 | return 'volatil_confirmado' 890 | else: 891 | return 'rango_lateral_confirmado' 892 | 893 | def Pivots_Hl_df(df, left=10, right=10): 894 | highs = df['High'].to_numpy(dtype=float) 895 | lows = df['Low'].to_numpy(dtype=float) 896 | 897 | pivot_highs_idx = [] 898 | pivot_lows_idx = [] 899 | 900 | for i in range(left, len(df) - right): 901 | window_high = highs[i-left:i+right+1] 902 | window_low = lows[i-left:i+right+1] 903 | 904 | if highs[i] == np.max(window_high): 905 | pivot_highs_idx.append(i) 906 | 907 | if lows[i] == np.min(window_low): 908 | pivot_lows_idx.append(i) 909 | 910 | # Valores en los índices encontrados 911 | pivot_high_prices = highs[pivot_highs_idx] if pivot_highs_idx else [] 912 | pivot_low_prices = lows[pivot_lows_idx] if pivot_lows_idx else [] 913 | 914 | return pivot_high_prices, pivot_low_prices 915 | 916 | def Pivots_Hl(df, left=10, right=10): 917 | high_prices, low_prices = Pivots_Hl_df(df, left, right) 918 | last_high = high_prices[-1] if len(high_prices) > 0 else None 919 | last_low = low_prices[-1] if len(low_prices) > 0 else None 920 | return last_high, last_low 921 | 922 | def calcular_rangos_dinamicos_macd_rsi(df, rsi_l=14, macd_f=12, macd_l=26, macd_s=9, rsi_pct_buy=90, rsi_pct_sell=10, macd_pct_buy=75, macd_pct_sell=25): 923 | df = df.copy() 924 | df['RSI'] = Rsi_df(df, rsi_l) 925 | df['MACD'] = Macd_Ind_df(df, macd_f, macd_l, macd_s) 926 | 927 | # Convertir a numérico y eliminar NaN 928 | macd = pd.to_numeric(df['MACD'], errors='coerce').dropna() 929 | rsi = pd.to_numeric(df['RSI'], errors='coerce').dropna() 930 | 931 | # RSI: percentiles dinámicos 932 | valor_rsi_sobrecompra = np.percentile(rsi, rsi_pct_buy) 933 | valor_rsi_sobreventa = np.percentile(rsi, rsi_pct_sell) 934 | 935 | # MACD: percentiles dinámicos (separados en positivos y negativos) 936 | macd_pos = macd[macd > 0] 937 | macd_neg = macd[macd < 0] 938 | 939 | valor_macd_buy = np.percentile(macd_pos, macd_pct_buy) if len(macd_pos) > 0 else 0 940 | valor_macd_venta = np.percentile(macd_neg, macd_pct_sell) if len(macd_neg) > 0 else 0 941 | 942 | return valor_rsi_sobrecompra, valor_rsi_sobreventa, valor_macd_buy, valor_macd_venta 943 | 944 | def guardar_rangos_dinamicos(coin, n_velas, rsi_over, rsi_under, macd_buy, macd_sell): 945 | try: 946 | FILE = "rangos_dinamicos.json" 947 | FILE1 = "rangos_dinamicos_1.json" 948 | 949 | if not os.path.exists(FILE1): 950 | data_1 = {} 951 | data_1[coin] = [] 952 | data_1[coin] = { 953 | "coin": coin, 954 | "rsi_over": rsi_over, 955 | "rsi_under": rsi_under, 956 | "macd_buy": macd_buy, 957 | "macd_sell": macd_sell 958 | } 959 | with open(FILE1, "w") as f: 960 | json.dump(data_1, f, indent=2) 961 | data_1 = {} 962 | data_1[coin] = [] 963 | 964 | if not os.path.exists(FILE): 965 | data = {} 966 | else: 967 | with open(FILE, "r") as f: 968 | data = json.load(f) 969 | 970 | # si la coin no existe, inicializar lista 971 | if coin not in data: 972 | data[coin] = [] 973 | 974 | # agregar nuevo registro 975 | data[coin].append({ 976 | "rsi_over": rsi_over, 977 | "rsi_under": rsi_under, 978 | "macd_buy": macd_buy, 979 | "macd_sell": macd_sell 980 | }) 981 | 982 | # guardar el archivo actualizado 983 | with open(FILE, "w") as f: 984 | json.dump(data, f, indent=2) 985 | 986 | # si llegamos a n_velas registros → calcular promedio 987 | if len(data[coin]) >= n_velas: 988 | rsi_over_avg = round(sum(d["rsi_over"] for d in data[coin]) / len(data[coin]), 2) 989 | rsi_under_avg = round(sum(d["rsi_under"] for d in data[coin]) / len(data[coin]), 2) 990 | macd_buy_avg = round(sum(d["macd_buy"] for d in data[coin]) / len(data[coin]), 2) 991 | macd_sell_avg = round(sum(d["macd_sell"] for d in data[coin]) / len(data[coin]), 2) 992 | os.remove(FILE) 993 | os.remove(FILE1) 994 | data_1 = {} 995 | data_1[coin] = {} 996 | data_1[coin] = { 997 | "rsi_over": rsi_over_avg, 998 | "rsi_under": rsi_under_avg, 999 | "macd_buy": macd_buy_avg, 1000 | "macd_sell": macd_sell_avg 1001 | } 1002 | with open(FILE1, "w") as f: 1003 | json.dump(data_1, f, indent=2) 1004 | #data[coin] = [] 1005 | #with open(FILE, "w") as f: 1006 | #json.dump(data, f, indent=2) 1007 | else: 1008 | data2 = {} 1009 | with open(FILE1, "r") as f: 1010 | data2 = json.load(f) 1011 | return { 1012 | "rsi_over": float(data2[coin]['rsi_over']), 1013 | "rsi_under": float(data2[coin]['rsi_under']), 1014 | "macd_buy": float(data2[coin]['macd_buy']), 1015 | "macd_sell": float(data2[coin]['macd_sell']) 1016 | } 1017 | 1018 | return { 1019 | "rsi_over": rsi_over_avg, 1020 | "rsi_under": rsi_under_avg, 1021 | "macd_buy": macd_buy_avg, 1022 | "macd_sell": macd_sell_avg 1023 | } 1024 | 1025 | except Exception as e: 1026 | write_log(f'{txcolors.DEFAULT}{SIGNAL_NAME}: {txcolors.SELL_LOSS} - Exception: guardar_rangos_dinamicos(): {e}{txcolors.DEFAULT}', SIGNAL_NAME + '.log', True, False) 1027 | exc_type, exc_obj, exc_tb = sys.exc_info() 1028 | write_log('Error on line ' + str(exc_tb.tb_lineno), SIGNAL_NAME + '.log', True, False) 1029 | --------------------------------------------------------------------------------