├── SWParser ├── gui │ ├── __init__.py │ ├── gui.py │ ├── MainWindow.ui │ └── MainWindow.py ├── __init__.py ├── smon_decryptor.pyc ├── setup.py ├── test_parser.py ├── parser.py └── monsters.py ├── AUTHORS ├── requirements.txt ├── icons ├── 16x16.png ├── 24x24.png ├── 32x32.png ├── 48x48.png └── 256x256.png ├── .gitignore ├── plugins ├── GWLogger.yapsy-plugin ├── RaidLogger.yapsy-plugin ├── FullLogger.yapsy-plugin ├── RecruitEvaluator.yapsy-plugin ├── ArenaLogger.yapsy-plugin ├── WorldBossLogger.yapsy-plugin ├── SummonLogger.yapsy-plugin ├── SwagLogger.yapsy-plugin ├── GoogleSheetWriter.yapsy-plugin ├── LiveOptimizer.yapsy-plugin ├── RunLogger.yapsy-plugin ├── GuildBattleLogger.yapsy-plugin ├── ToALogger.yapsy-plugin ├── SwarfarmLogger.yapsy-plugin ├── FullLogger.py ├── SwagLogger.py ├── SwarfarmLogger.py ├── GoogleSheetWriter.py ├── WorldBossLogger.py ├── SummonLogger.py ├── ToALogger.py ├── RecruitEvaluator.py ├── LiveOptimizer.py ├── RaidLogger.py ├── GuildBattleLogger.py ├── ArenaLogger.py ├── RunLogger.py └── GWLogger.py ├── swproxy.config ├── setup.py ├── SWProxy.spec ├── SWPlugin.py ├── README.md ├── LICENSE ├── SWProxy.py └── proxy.py /SWParser/gui/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /SWParser/__init__.py: -------------------------------------------------------------------------------- 1 | from parser import * 2 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Youness Alaoui 2 | Leonardo Stern 3 | Azrethos -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pycrypto 2 | dpkt 3 | yapsy 4 | gspread 5 | oauth2client 6 | -------------------------------------------------------------------------------- /icons/16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/icons/16x16.png -------------------------------------------------------------------------------- /icons/24x24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/icons/24x24.png -------------------------------------------------------------------------------- /icons/32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/icons/32x32.png -------------------------------------------------------------------------------- /icons/48x48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/icons/48x48.png -------------------------------------------------------------------------------- /icons/256x256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/icons/256x256.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | *.csv 3 | *.json 4 | *.sh 5 | *.pcap 6 | *.pyc 7 | smon_decryptor.py 8 | proxy.log 9 | -------------------------------------------------------------------------------- /SWParser/smon_decryptor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fperegrinvs/SWProxy-plugins/HEAD/SWParser/smon_decryptor.pyc -------------------------------------------------------------------------------- /plugins/GWLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Guidwar Summary Logger 3 | Module = GWLogger 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Writes Logs of GuildWars 8 | -------------------------------------------------------------------------------- /plugins/RaidLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Raid Logger 3 | Module = RaidLogger 4 | 5 | [Documentation] 6 | Author = Ani 7 | Version = 0.1 8 | Description = Log raid runs -------------------------------------------------------------------------------- /plugins/FullLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Full Logger 3 | Module = FullLogger 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Print SW API requests command name 8 | -------------------------------------------------------------------------------- /plugins/RecruitEvaluator.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Recruit Evaluator 3 | Module = RecruitEvaluator 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Evaluate Recruits 8 | -------------------------------------------------------------------------------- /plugins/ArenaLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Arena Logger 3 | Module = ArenaLogger 4 | 5 | [Documentation] 6 | Author = Ani 7 | Version = 0.1 8 | Description = Log arena battles -------------------------------------------------------------------------------- /plugins/WorldBossLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = World Boss Logger 3 | Module = WorldBossLogger 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Log World Boss Results 8 | -------------------------------------------------------------------------------- /plugins/SummonLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Summon Logger 3 | Module = SummonLogger 4 | 5 | [Documentation] 6 | Author = Ani 7 | Version = 0.1 8 | Description = Log summoning results 9 | -------------------------------------------------------------------------------- /plugins/SwagLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Swag Logger 3 | Module = SwagLogger 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Capture guildwar information and upload to Swag 8 | -------------------------------------------------------------------------------- /plugins/GoogleSheetWriter.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Google Sheets Writer 3 | Module = GoogleSheetWriter 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Writes Logs on Google Sheets 8 | -------------------------------------------------------------------------------- /plugins/LiveOptimizer.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Live Optimizer 3 | Module = LiveOptimizer 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Keep the rune optimizer synched with SW 8 | -------------------------------------------------------------------------------- /plugins/RunLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Runs Logger 3 | Module = RunLogger 4 | 5 | [Documentation] 6 | Author = Leonardo Stern, Ani 7 | Version = 0.1 8 | Description = Log dungeons and scenario runs 9 | -------------------------------------------------------------------------------- /plugins/GuildBattleLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Guild Battle Logger 3 | Module = GuildBattleLogger 4 | 5 | [Documentation] 6 | Author = Ani 7 | Version = 0.1 8 | Description = Log guild battles 9 | -------------------------------------------------------------------------------- /plugins/ToALogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = Trial of Ascension Logger 3 | Module = ToALogger 4 | 5 | [Documentation] 6 | Author = Ani 7 | Version = 0.1 8 | Description = Log ToA runs and teams used 9 | -------------------------------------------------------------------------------- /plugins/SwarfarmLogger.yapsy-plugin: -------------------------------------------------------------------------------- 1 | [Core] 2 | Name = SWARFARM Logger 3 | Module = SwarfarmLogger 4 | 5 | [Documentation] 6 | Version = 0.1 7 | Description = Capture dungeon/scenario and summon results and upload to SWARFARM 8 | -------------------------------------------------------------------------------- /SWParser/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, find_packages 4 | try: 5 | import py2exe 6 | except: 7 | pass 8 | 9 | 10 | setup(name='SWParser', 11 | version='1.0', 12 | description='Summoners War Data Parser', 13 | author='Youness Alaoui', 14 | author_email='kakaroto@kakaroto.homelinux.net', 15 | url='https://github.com/kakaroto/SWParser', 16 | packages=find_packages(), 17 | console = ['parser.py'] 18 | ) 19 | -------------------------------------------------------------------------------- /swproxy.config: -------------------------------------------------------------------------------- 1 | { 2 | "full_log": false, 3 | "full_log_filename": "full-log.txt", 4 | "live_sync": false, 5 | "disable_swarfarm_logger": false, 6 | "enable_google_sheet_writer": false, 7 | "log_runs": true, 8 | "log_wipes": false, 9 | "log_summon": true, 10 | "log_raids": true, 11 | "log_arena": true, 12 | "log_world_boss": true, 13 | "log_toa": true, 14 | "log_guild_battle": true, 15 | "google_key": "sw-runs-141453f2a4d3.json", 16 | "sheet_name": "SW Runs" 17 | } 18 | -------------------------------------------------------------------------------- /plugins/FullLogger.py: -------------------------------------------------------------------------------- 1 | import SWPlugin 2 | import json 3 | 4 | class FullLogger(SWPlugin.SWPlugin): 5 | def __init__(self): 6 | with open('swproxy.config') as f: 7 | self.config = json.load(f) 8 | 9 | def process_request(self, req_json, resp_json): 10 | config = self.config 11 | if 'full_log' not in config or config['full_log'] == False: 12 | return 13 | 14 | 15 | with open(config['full_log_filename'], "a") as fr: 16 | import time 17 | fr.write('%s\n' % time.ctime()) 18 | fr.write('Request (%s):\n' % resp_json['command']) 19 | fr.write('%s\n' % json.dumps(req_json)) 20 | fr.write('Response (%s):\n' % resp_json['command']) 21 | fr.write('%s\n\n' % json.dumps(resp_json)) 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from distutils.core import setup 4 | import glob 5 | try: 6 | import py2exe 7 | except: 8 | pass 9 | 10 | plugin_patterns = ('plugins/*.py', 'plugins/*.yapsy-plugin') 11 | plugins = [] 12 | for pattern in plugin_patterns: 13 | plugins.extend(glob.glob(pattern)) 14 | 15 | setup(name='SWParser', 16 | version='1.0', 17 | description='Summoners War Data Parser', 18 | author='Youness Alaoui', 19 | author_email='kakaroto@kakaroto.homelinux.net', 20 | url='https://github.com/kakaroto/SWParser', 21 | packages = ['SWParser', 'SWParser.gui'], 22 | options={"py2exe":{"optimize":2,"includes":["sip"], "dll_excludes": ["MSVCP90.dll"]}}, 23 | console = ['SWParser.py', 'SWProxy.py'], 24 | data_files=[('plugins', plugins)], 25 | ) 26 | -------------------------------------------------------------------------------- /SWProxy.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python -*- 2 | 3 | block_cipher = None 4 | 5 | a = Analysis(['SWProxy.py'], 6 | pathex=['.'], 7 | binaries=None, 8 | datas=[ ('icons/*.png', 'icons'), 9 | ('AUTHORS', '.') ], 10 | hiddenimports=[], 11 | hookspath=[], 12 | runtime_hooks=[], 13 | excludes=[], 14 | win_no_prefer_redirects=False, 15 | win_private_assemblies=False, 16 | cipher=block_cipher) 17 | pyz = PYZ(a.pure, a.zipped_data, 18 | cipher=block_cipher) 19 | exe = EXE(pyz, 20 | a.scripts, 21 | a.binaries, 22 | a.zipfiles, 23 | a.datas, 24 | name='SWProxy', 25 | debug=False, 26 | strip=False, 27 | upx=True, 28 | console=True ) -------------------------------------------------------------------------------- /SWPlugin.py: -------------------------------------------------------------------------------- 1 | from yapsy import IPlugin 2 | from yapsy.PluginManager import PluginManager 3 | import os 4 | import logging 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | class classproperty(object): 9 | def __init__(self, getter): 10 | self.getter= getter 11 | def __get__(self, instance, owner): 12 | return self.getter(owner) 13 | 14 | class SWPlugin(IPlugin.IPlugin): 15 | _plugins = None 16 | 17 | @classmethod 18 | def load_plugins(cls): 19 | manager = PluginManager() 20 | manager.setPluginPlaces([os.path.join(os.getcwd(), "plugins/")]) 21 | manager.collectPlugins() 22 | ret = manager.getAllPlugins() 23 | logger.info('Loaded {} plugins'.format(len(ret))) 24 | return ret 25 | 26 | @classproperty 27 | def plugins(cls): 28 | if cls._plugins is None: 29 | cls._plugins = cls.load_plugins() 30 | return cls._plugins 31 | 32 | def process_request(self, req_json, resp_json): 33 | pass 34 | 35 | def process_csv_row(self, csv_type, data_type, data): 36 | pass 37 | 38 | @classmethod 39 | def call_plugins(cls, func_name, args): 40 | for plugin in cls.plugins: 41 | try: 42 | getattr(plugin.plugin_object, func_name)(*args) 43 | except Exception as e: 44 | logging.exception('Exception while executing plugin "%s": %s' \ 45 | % (plugin.name, e)) 46 | -------------------------------------------------------------------------------- /plugins/SwagLogger.py: -------------------------------------------------------------------------------- 1 | import SWPlugin 2 | import logging 3 | import json 4 | import os 5 | import threading 6 | import urllib 7 | import urllib2 8 | 9 | logger = logging.getLogger("SWProxy") 10 | 11 | 12 | class SwagLogger(SWPlugin.SWPlugin): 13 | log_url = 'https://gw.swop.one/data/upload/' 14 | 15 | def __init__(self): 16 | super(SwagLogger, self).__init__() 17 | self.plugin_enabled = True 18 | 19 | config_name = 'swproxy.config' 20 | if not os.path.exists(config_name): 21 | self.config = {} 22 | else: 23 | with open(config_name) as f: 24 | self.config = json.load(f) 25 | 26 | self.plugin_enabled = not self.config.get('disable_swag_logger', False) 27 | 28 | def process_request(self, req_json, resp_json): 29 | if self.plugin_enabled: 30 | t = threading.Thread(target=self.process_data, args=(req_json, resp_json)) 31 | t.start() 32 | 33 | def process_data(self, req_json, resp_json): 34 | command = req_json.get('command') 35 | 36 | if command == 'GetGuildWarBattleLogByGuildId': 37 | if resp_json: 38 | try: 39 | request = urllib2.Request(self.log_url) 40 | request.add_header('Content-Type','application/json') 41 | resp = urllib2.urlopen(request, json.dumps(resp_json)) 42 | except urllib2.HTTPError as e: 43 | logger.warn('SwagLogger - Error: {}'.format(e.readline())) 44 | else: 45 | resp.close() 46 | logger.info('SwagLogger - {} logged successfully'.format(command)) 47 | -------------------------------------------------------------------------------- /SWParser/test_parser.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import SWParser.parser 3 | 4 | 5 | class ParserCase(unittest.TestCase): 6 | def test_monster_name_full(self): 7 | code_map = { 8 | 0: "???[0] (???[0])", 9 | 10100: "Fairy (???[0])", 10 | 10101: "Fairy (Water)", 11 | 10102: "Fairy (Fire)", 12 | 10103: "Fairy (Wind)", 13 | 10104: "Fairy (Light)", 14 | 10105: "Fairy (Dark)", 15 | 10106: "Fairy (???[6])", 16 | 10111: "Elucia", 17 | 10112: "Iselia", 18 | 10113: "Aeilene", 19 | 10114: "Neal", 20 | 10115: "Sorin", 21 | 10000: "???[100] (???[0])", 22 | 10001: "???[100] (Water)", 23 | 12211: "AWAKENED Slime (Water)", 24 | 19111: "AWAKENED Fairy Queen (Water)", 25 | 1000111: "Homunculus (Water)", 26 | } 27 | for uid, name in code_map.iteritems(): 28 | self.assertEqual( 29 | SWParser.parser.monster_name(uid), 30 | name, 31 | ) 32 | 33 | def test_monster_name_short(self): 34 | code_map = { 35 | 0: "???[0]", 36 | 10100: "Fairy", 37 | 10101: "Fairy", 38 | 10102: "Fairy", 39 | 10103: "Fairy", 40 | 10104: "Fairy", 41 | 10105: "Fairy", 42 | 10106: "Fairy", 43 | 10111: "Elucia", 44 | 10112: "Iselia", 45 | 10113: "Aeilene", 46 | 10114: "Neal", 47 | 10115: "Sorin", 48 | 10000: "???[100]", 49 | 10001: "???[100]", 50 | 12211: "???[122]", 51 | 19111: "???[191]", 52 | 1000111: "Homunculus (Water)", 53 | } 54 | for uid, name in code_map.iteritems(): 55 | self.assertEqual( 56 | SWParser.parser.monster_name(uid, full=False), 57 | name, 58 | ) 59 | 60 | if __name__ == '__main__': 61 | unittest.main() 62 | -------------------------------------------------------------------------------- /plugins/SwarfarmLogger.py: -------------------------------------------------------------------------------- 1 | import SWPlugin 2 | import logging 3 | import json 4 | import os 5 | import threading 6 | import urllib 7 | import urllib2 8 | 9 | logger = logging.getLogger("SWProxy") 10 | 11 | 12 | class SwarfarmLogger(SWPlugin.SWPlugin): 13 | commands_url = 'https://swarfarm.com/data/log/accepted_commands/' 14 | log_url = 'https://swarfarm.com/data/log/upload/' 15 | accepted_commands = None 16 | 17 | def __init__(self): 18 | super(SwarfarmLogger, self).__init__() 19 | self.plugin_enabled = True 20 | 21 | config_name = 'swproxy.config' 22 | if not os.path.exists(config_name): 23 | self.config = {} 24 | else: 25 | with open(config_name) as f: 26 | self.config = json.load(f) 27 | 28 | self.plugin_enabled = not self.config.get('disable_swarfarm_logger', False) 29 | 30 | if self.plugin_enabled: 31 | # Get the list of accepted commands from the server 32 | logger.info('SwarfarmLogger - Retrieving list of accepted log types from SWARFARM...') 33 | try: 34 | resp = urllib2.urlopen(self.commands_url) 35 | self.accepted_commands = json.loads(resp.readline()) 36 | resp.close() 37 | logger.info('SwarfarmLogger - Looking for the following commands to log:\r\n' + ', '.join(self.accepted_commands.keys())) 38 | except urllib2.HTTPError: 39 | logger.fatal('SwarfarmLogger - Unable to retrieve accepted log types. SWARFARM logging is disabled.') 40 | self.plugin_enabled = False 41 | 42 | def process_request(self, req_json, resp_json): 43 | if self.plugin_enabled: 44 | t = threading.Thread(target=self.process_data, args=(req_json, resp_json)) 45 | t.start() 46 | 47 | def process_data(self, req_json, resp_json): 48 | command = req_json.get('command') 49 | 50 | if command in self.accepted_commands: 51 | accepted_data = self.accepted_commands[command] 52 | result_data = {} 53 | 54 | if 'request' in accepted_data: 55 | result_data['request'] = {item: req_json.get(item) for item in accepted_data['request']} 56 | 57 | if 'response' in accepted_data: 58 | result_data['response'] = {item: resp_json.get(item) for item in accepted_data['response']} 59 | 60 | if result_data: 61 | data = json.dumps(result_data) 62 | try: 63 | resp = urllib2.urlopen(self.log_url, data=urllib.urlencode({'data': data})) 64 | except urllib2.HTTPError as e: 65 | logger.warn('SwarfarmLogger - Error: {}'.format(e.readline())) 66 | else: 67 | resp.close() 68 | logger.info('SwarfarmLogger - {} logged successfully'.format(command)) 69 | -------------------------------------------------------------------------------- /plugins/GoogleSheetWriter.py: -------------------------------------------------------------------------------- 1 | import SWPlugin 2 | import gspread 3 | import threading 4 | import json 5 | import os 6 | from oauth2client.service_account import ServiceAccountCredentials 7 | 8 | 9 | class GoogleSheetWriter(SWPlugin.SWPlugin): 10 | def __init__(self): 11 | config_name = 'swproxy.config' 12 | if not os.path.exists(config_name): 13 | self.config = {} 14 | return 15 | 16 | with open('swproxy.config') as f: 17 | self.config = json.load(f) 18 | 19 | def process_csv_row(self, csv_type, data_type, data): 20 | if not 'enable_google_sheet_writer' in self.config or not self.config['enable_google_sheet_writer']: 21 | return 22 | 23 | if csv_type not in ['run_logger', 'arena_logger', 'summon_logger', 'raid_logger', 'worldboss_logger', 'toa_logger', 'guild_battle_logger']: 24 | return 25 | 26 | t = threading.Thread(target=self.save_row, args = (csv_type, data_type, data)) 27 | t.start() 28 | return\ 29 | 30 | def save_row(self, csv_type, data_type, data): 31 | if data_type == 'entry': 32 | if csv_type == 'run_logger': 33 | tab = 'Runs' 34 | last_column = 'Y' 35 | total = 'AA1' 36 | elif csv_type == 'arena_logger': 37 | tab = 'Arena' 38 | last_column = 'P' 39 | total = 'R1' 40 | elif csv_type == 'summon_logger': 41 | tab = 'Summon' 42 | last_column = 'F' 43 | total = 'H1' 44 | elif csv_type == 'raid_logger': 45 | tab = 'Raid' 46 | last_column = 'K' 47 | total = 'M1' 48 | elif csv_type == 'worldboss_logger': 49 | tab = 'World Boss' 50 | last_column = 'AA' 51 | total = 'AC1' 52 | elif csv_type == 'toa_logger': 53 | tab = 'ToA' 54 | last_column = 'O' 55 | total = 'Q1' 56 | elif csv_type == 'guild_battle_logger': 57 | tab = 'Guild' 58 | last_column = 'S' 59 | total = 'U1' 60 | 61 | names, row = data 62 | key_file = self.config['google_key'] 63 | sheet_name = self.config['sheet_name'] 64 | scope = ['https://spreadsheets.google.com/feeds'] 65 | credentials = ServiceAccountCredentials.from_json_keyfile_name(key_file, scope) 66 | gc = gspread.authorize(credentials) 67 | wks = gc.open(sheet_name).worksheet(tab) 68 | line = int(wks.acell(total).value) + 2 69 | cl = wks.range('A%s:%s%s' % (line, last_column, line)) 70 | for (i, name) in enumerate(names): 71 | if name in row: 72 | cl[i].value = row[name] 73 | 74 | wks.update_cells(cl) 75 | -------------------------------------------------------------------------------- /plugins/WorldBossLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | import SWPlugin 6 | 7 | class WorldBossLogger(SWPlugin.SWPlugin): 8 | def __init__(self): 9 | with open('swproxy.config') as f: 10 | self.config = json.load(f) 11 | 12 | def process_request(self, req_json, resp_json): 13 | config = self.config 14 | if 'log_world_boss' not in config or not config['log_world_boss']: 15 | return 16 | 17 | command = req_json['command'] 18 | if command == 'BattleWorldBossStart': 19 | return self.log_world_boss(req_json, resp_json, config) 20 | 21 | def build_unit_dictionary(self, wizard_id): 22 | with open('%s-optimizer.json' % wizard_id) as f: 23 | user_data = json.load(f) 24 | mon_dict = {} 25 | for mon in user_data["mons"]: 26 | mon_dict[mon['unit_id']] = mon['name'] 27 | return mon_dict 28 | 29 | def log_world_boss(self, req_json, resp_json, config): 30 | if not config["log_world_boss"]: 31 | return 32 | 33 | wizard_id = str(req_json['wizard_id']) 34 | user_mons = self.build_unit_dictionary(wizard_id) 35 | result = resp_json['worldboss_battle_result'] 36 | 37 | filename = "%s-worldboss.csv" % wizard_id 38 | is_new_file = not os.path.exists(filename) 39 | 40 | with open(filename, "ab") as log_file: 41 | field_names = ['date', 'boss_n', 'atk_power', 'elem_bonus', 'damage', 'grade'] 42 | header = {'date': 'Date', 'boss_n': 'Boss #', 'atk_power': 'Attack Power', 43 | 'elem_bonus': 'Elemental Bonus', 'damage': 'Damage', 'grade': 'Grade'} 44 | 45 | for i in range(1, 21): 46 | field = 'mob%s' % i 47 | field_names.append(field) 48 | header[field] = 'Mob %s' % i 49 | 50 | SWPlugin.SWPlugin.call_plugins('process_csv_row', ('worldboss_logger', 'header', (field_names, header))) 51 | 52 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 53 | if is_new_file: 54 | log_writer.writerow(header) 55 | 56 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'boss_n': req_json['worldboss_id'] - 10000, 57 | 'atk_power': result['total_battle_point'], 'elem_bonus': result['bonus_battle_point'], 58 | 'damage': result['total_damage'], 'grade': resp_json['reward_info']['name']} 59 | 60 | for i in range(1, len(req_json['unit_id_list']) + 1): 61 | id = req_json['unit_id_list'][i-1]['unit_id'] 62 | log_entry['mob%s' % i] = user_mons[id] 63 | 64 | SWPlugin.SWPlugin.call_plugins('process_csv_row', ('worldboss_logger', 'entry', (field_names, log_entry))) 65 | log_writer.writerow(log_entry) 66 | return 67 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## SWProxy Plugins 2 | plugins for the SW Proxy 3 | 4 | To get the latest versions of the plugins that are guaranteed to work with the latest SWProxy release, please visit the [Releases](https://github.com/lstern/SWProxy-plugins/releases) section. If you download the plugins directly from the repository they may not be fully compatible with the latest release of SWProxy and may have some reduced or no functionality. 5 | 6 | Each plugin has 2 files, a .py file with the actual plugin code and a .yapsy-plugin file with the plugin description. 7 | To install the plugins just drop the desired plugins in the /plugins folder, copy and edit the .config file to the parent folder and restart sw proxy. 8 | 9 | You can have multiple users connected and actively using the same proxy. It will separate users based on their user_id and create separate files for each account. 10 | 11 | * [Video Tutorial](https://www.youtube.com/watch?v=T4zI6HViV9g) 12 | 13 | ### Arena Logger 14 | Logs all attacks you make including rivals. In order to correctly record the opponent's name, the proxy must be connected when your phone recieves the arena log (on login, list refresh or when a new attack is recieved), otherwise it will just record the enemy team. The output filename is [user_id]_arena.csv 15 | 16 | ### Full Logger 17 | Dumps the contents of the requests and responses from/to com2us servers on a text file ("full_log_filename" from swproxy.config) 18 | 19 | ### Generate Friend Swarfarm 20 | Generates data for visited friends for use with Swarfarm. The generate data will not contain any inventory rune. 21 | 22 | ### Google Sheet Writer 23 | Allows all reports to be written directly to Google Sheets. Requires an API key and extra dependencies. Once these dependencies are built into SWProxy a video tutorial will be made showing how to set it up. 24 | 25 | ### Raid Logger 26 | Will log raid results including time, reward and raid members. The output filename is [user_id]_raids.csv 27 | 28 | ### Recruit Evaluator 29 | This plugin will generate extra data when visiting friend. The extra data is intended to help with guild recruit evaluation. 30 | 31 | ### Run Logger 32 | Will log runs and drops from Necro, Dragons, Giants, elemental halls and HoH. The outut filename is [user_id]_runs.csv 33 | 34 | ### Summon Logger 35 | Will log summons of any type of scroll, including social and crystal summon. Does not work with individual monster pieces (from SD). The outut filename is [user_id]_summons.csv 36 | 37 | ### ToA Logger 38 | Will log results from ToA attempts, it includes floor, difficulty, team used and monster faced in the last wave. The outut filename is [user_id]_toa.csv 39 | 40 | ### World Boss Logger 41 | Will log each attack against the world boss, including the attack power, elemental bonus, total damage, grade and all the mobs selected for the fight. The outut filename is [user_id]_worldboss.csv 42 | 43 | ### SWarfarm Logger 44 | Will send data about your runs and summons to [swarfarm](https://swarfarm.com/) where you will have access to full data and statistics about your runs. Site will also offer aggregate statistics using data from all users. -------------------------------------------------------------------------------- /SWParser/gui/gui.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from PyQt4 import QtCore, QtGui 3 | from MainWindow import Ui_MainWindow 4 | import SWProxy 5 | import threading 6 | import logging 7 | 8 | logger = logging.getLogger("SWProxy") 9 | 10 | class ProxyThread(QtCore.QThread): 11 | def __init__(self, ip, port, parent = None): 12 | QtCore.QThread.__init__(self, parent) 13 | self.ip = ip 14 | self.port = port 15 | 16 | def run(self): 17 | try: 18 | SWProxy.is_ip_port_valid(self.ip, self.port) 19 | logger.info("Running Proxy server at %s on port %s" % (self.ip, self.port)) 20 | p = SWProxy.HTTP(self.ip, self.port) 21 | p.run() 22 | except Exception as e: 23 | logger.info("Error running proxy server : %s" % e) 24 | print "Error running proxy server : %s" % e 25 | 26 | class MainWindow(QtGui.QMainWindow): 27 | def __init__(self, ip, port=8080, parent=None): 28 | QtGui.QMainWindow.__init__(self, parent) 29 | self.ui = Ui_MainWindow() 30 | self.ui.setupUi(self) 31 | self.ui.ipAddress.setText(ip) 32 | self.ui.proxyPort.setValue(port) 33 | self.ui.startProxy.clicked.connect(self.startStopProxy) 34 | self.ui.actionQuit.triggered.connect(self.quit) 35 | self.ui.actionAbout.triggered.connect(self.about) 36 | self.ui.actionOpen_PCAP.triggered.connect(self.openPCAP) 37 | self.proxy = None 38 | 39 | def quit(self): 40 | if self.proxy: 41 | self.proxy.terminate() 42 | self.proxy = None 43 | self.close() 44 | 45 | def about(self): 46 | QtGui.QMessageBox.about(self, "About", "SWProxy: Summoners War Proxy Tool\nWritten by KaKaRoTo\n\nLicensed under LGPLv3 and available at : \n\thttps://github.com/kakaroto/SWParser\n") 47 | 48 | def openPCAP(self): 49 | pcap_file = QtGui.QFileDialog.getOpenFileName() 50 | SWProxy.parse_pcap(pcap_file) 51 | 52 | def log(self, str): 53 | self.ui.logWindow.addItem(str) 54 | 55 | def startStopProxy(self): 56 | self.ui.proxyPort.setReadOnly(True) 57 | self.ui.proxyPort.setEnabled(False) 58 | self.ui.ipAddress.setReadOnly(True) 59 | self.ui.ipAddress.setEnabled(False) 60 | 61 | if self.proxy: 62 | self.ui.startProxy.setText("Start Proxy Server") 63 | self.ui.startProxy.setEnabled(False) 64 | self.proxy.terminate() 65 | else: 66 | self.ui.startProxy.setText("Stop Proxy Server") 67 | self.proxy = ProxyThread(self.ui.ipAddress.text(), self.ui.proxyPort.value(), parent=self) 68 | self.proxy.finished.connect(self.proxyStopped) 69 | self.proxy.start() 70 | 71 | def proxyStopped(self): 72 | self.proxy = None 73 | self.ui.ipAddress.setReadOnly(False) 74 | self.ui.ipAddress.setEnabled(True) 75 | self.ui.proxyPort.setReadOnly(False) 76 | self.ui.proxyPort.setEnabled(True) 77 | self.ui.startProxy.setEnabled(True) 78 | 79 | 80 | class GuiLogHandler(logging.Handler): 81 | def __init__(self, gui=None): 82 | logging.Handler.__init__(self) 83 | self.gui = gui 84 | 85 | def emit(self, record): 86 | msg = self.format(record) 87 | self.gui.log(msg) 88 | -------------------------------------------------------------------------------- /plugins/SummonLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | from SWPlugin import SWPlugin 6 | import threading 7 | 8 | sources = { 9 | 1: 'Unknown Scroll', 10 | 2: 'Mystical Scroll', 11 | 3: 'Light & Dark Scroll', 12 | 4: 'Water Scroll', 13 | 5: 'Fire Scroll', 14 | 6: 'Wind Scroll', 15 | 7: 'Legendary Scroll', 16 | 8: 'Exclusive Summons', 17 | 9: "Legendary Pieces", 18 | 10: "Light & Dark Pieces" 19 | } 20 | 21 | def identify_scroll(id): 22 | return sources[id] 23 | 24 | class MonsterLogger(SWPlugin): 25 | def __init__(self): 26 | with open('swproxy.config') as f: 27 | self.config = json.load(f) 28 | 29 | def process_request(self, req_json, resp_json): 30 | config = self.config 31 | if 'log_summon' not in config or not config['log_summon']: 32 | return 33 | 34 | command = req_json['command'] 35 | if command == 'SummonUnit': 36 | return self.log_summon(req_json, resp_json, config) 37 | 38 | def log_summon(self, req_json, resp_json, config): 39 | if not config["log_summon"]: 40 | return 41 | 42 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 43 | if 'unit_list' in resp_json: 44 | if 'item_info' in resp_json: 45 | scroll = identify_scroll(resp_json['item_info']['item_master_id']) 46 | else: 47 | mode = req_json['mode'] 48 | if mode == 3: 49 | scroll = 'Crystal' 50 | elif mode == 5: 51 | scroll = 'Social' 52 | else: 53 | scroll = 'Unidentified' 54 | 55 | filename = "%s-summons.csv" % wizard_id 56 | is_new_file = not os.path.exists(filename) 57 | 58 | with open(filename, "ab") as log_file: 59 | field_names = ['date', 'scroll', 'unit_name', 'attribute', 'grade', 'awake'] 60 | 61 | header = {'date': 'Date', 'scroll': 'Scroll', 'unit_name': 'Unit', 'attribute': 'Attribute', 'grade': 'Grade', 62 | 'awake': 'Awakened'} 63 | 64 | SWPlugin.call_plugins('process_csv_row', ('summon_logger', 'header', (field_names, header))) 65 | 66 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 67 | if is_new_file: 68 | log_writer.writerow(header) 69 | 70 | if 'unit_list' in resp_json and len(resp_json['unit_list']) > 0: 71 | for i in range(0, len(resp_json['unit_list'])): 72 | unit_name = monster_name(resp_json['unit_list'][i]['unit_master_id'],'',False) 73 | attribute = monster_attribute(resp_json['unit_list'][i]['attribute']) 74 | grade = resp_json['unit_list'][i]['class'] 75 | awakened = str(resp_json['unit_list'][i]['unit_master_id']) 76 | if int(awakened[-2]) == 0: 77 | awake = 'No' 78 | else: 79 | awake = 'Yes' 80 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'scroll': scroll, 'unit_name': unit_name, 81 | 'attribute': attribute, 'grade': grade, 'awake': awake} 82 | 83 | SWPlugin.call_plugins('process_csv_row', ('summon_logger', 'entry', (field_names, log_entry))) 84 | log_writer.writerow(log_entry) 85 | return 86 | -------------------------------------------------------------------------------- /plugins/ToALogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | import SWPlugin 6 | 7 | class ToALogger(SWPlugin.SWPlugin): 8 | def __init__(self): 9 | with open('swproxy.config') as f: 10 | self.config = json.load(f) 11 | 12 | def process_request(self, req_json, resp_json): 13 | config = self.config 14 | if 'log_toa' not in config or not config['log_toa']: 15 | return 16 | 17 | command = req_json['command'] 18 | if command == 'BattleTrialTowerResult_v2': 19 | return self.log_end_battle(req_json, resp_json, config) 20 | 21 | if command == 'BattleTrialTowerStart_v2': 22 | if 'toa-logger-data' not in config: 23 | config['toa-logger-data'] = {} 24 | 25 | plugin_data = config['toa-logger-data'] 26 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 27 | start = int(time.time()) 28 | monsters = resp_json['trial_tower_unit_list'][2] 29 | plugin_data[wizard_id] = {'start': start, 'monsters': monsters} 30 | 31 | def build_unit_dictionary(self, wizard_id): 32 | with open('%s-optimizer.json' % wizard_id) as f: 33 | user_data = json.load(f) 34 | mon_dict = {} 35 | for mon in user_data["mons"]: 36 | mon_dict[mon['unit_id']] = mon['name'] 37 | return mon_dict 38 | 39 | def log_end_battle(self, req_json, resp_json, config): 40 | if not config["log_toa"]: 41 | return 42 | 43 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 44 | if 'toa-logger-data' in config and wizard_id in config['toa-logger-data'] \ 45 | and 'start' in config['toa-logger-data'][wizard_id]: 46 | 47 | start_data = config['toa-logger-data'][wizard_id] 48 | 49 | delta = int(time.time()) - start_data['start'] 50 | m = divmod(delta, 60) 51 | s = m[1] # seconds 52 | elapsed_time = '%s:%02d' % (m[0], s) 53 | else: 54 | elapsed_time = 'N/A' 55 | 56 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 57 | stage = req_json['floor_id'] 58 | if req_json['difficulty'] == 1: 59 | difficulty = 'Normal' 60 | elif req_json['difficulty'] == 2: 61 | difficulty = 'Hard' 62 | else: 63 | difficulty = 'N/A' 64 | user_mons = self.build_unit_dictionary(wizard_id) 65 | 66 | filename = "%s-toa.csv" % wizard_id 67 | is_new_file = not os.path.exists(filename) 68 | 69 | with open(filename, "ab") as log_file: 70 | field_names = ['date', 'stage', 'difficulty', 'result', 'time', 'team1', 'team2', 'team3', 'team4', 'team5', 71 | 'opteam1', 'opteam2', 'opteam3', 'opteam4', 'opteam5'] 72 | 73 | header = {'date': 'Date', 'stage': 'Stage', 'difficulty': 'Difficulty', 'result': 'Result', 'time': 'Clear time', 74 | 'team1': 'Team 1', 'team2': 'Team 2', 'team3': 'Team 3', 'team4': 'Team 4', 'team5': 'Team 5', 75 | 'opteam1': 'Op Team 1', 'opteam2': 'Op Team 2', 'opteam3': 'Op Team 3', 'opteam4': 'Op Team 4', 'opteam5': 'Op Team 5'} 76 | 77 | SWPlugin.SWPlugin.call_plugins('process_csv_row', ('toa_logger', 'header', (field_names, header))) 78 | 79 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 80 | if is_new_file: 81 | log_writer.writerow(header) 82 | 83 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'stage': stage, 'difficulty': difficulty, 84 | 'result': win_lost, 'time': elapsed_time} 85 | 86 | for i in range(1, len(req_json['unit_id_list']) + 1): 87 | id = req_json['unit_id_list'][i-1]['unit_id'] 88 | log_entry['team%s' % i] = user_mons[id] 89 | 90 | for i in range(1, len(start_data['monsters']) + 1): 91 | log_entry['opteam%s' % i] = monster_name(start_data['monsters'][i-1]['unit_master_id']) 92 | 93 | SWPlugin.SWPlugin.call_plugins('process_csv_row', ('toa_logger', 'entry', (field_names, log_entry))) 94 | log_writer.writerow(log_entry) 95 | return 96 | -------------------------------------------------------------------------------- /plugins/RecruitEvaluator.py: -------------------------------------------------------------------------------- 1 | from SWParser import rune_effect_type 2 | import SWPlugin 3 | 4 | 5 | def get_key(item): 6 | return item[0] 7 | 8 | 9 | def averages(lst): 10 | sorted_list = sorted(lst, key=get_key, reverse=True) 11 | top_200 = sorted_list[0:200] 12 | sum_current = sum(n for n,_ in top_200) 13 | sum_max = sum(n for _,n in top_200) 14 | list_len = len(top_200) + 0.0 15 | 16 | return sum_current / list_len, sum_max / list_len 17 | 18 | 19 | class RecruitEvaluator(SWPlugin.SWPlugin): 20 | six_star_mobs = 0 21 | six_star_runes = 0 22 | level_15_runes = 0 23 | rune_scores = [] 24 | headers = None 25 | 26 | @staticmethod 27 | def get_sub_score(sub): 28 | if sub[0] == 0: 29 | return 0 30 | 31 | rune_type = rune_effect_type(sub[0]) 32 | max = RecruitEvaluator.sub_max_value_map[rune_type] if rune_type in RecruitEvaluator.sub_max_value_map else 0 33 | return sub[1] / max 34 | 35 | grade_multiplier_map = { 36 | 1: 0.286, 37 | 2: 0.31, 38 | 3: 0.47, 39 | 4: 0.68, 40 | 5: 0.8, 41 | 6: 1 42 | } 43 | 44 | sub_max_value_map = { 45 | 'HP%': 40.0, 46 | 'ATK%': 40.0, 47 | 'DEF%': 40.0, 48 | 'ACC': 40.0, 49 | 'RES': 40.0, 50 | 'CDmg': 35.0, 51 | 'CRate': 30.0, 52 | 'SPD': 30.0, 53 | 'ATK flat': 14*8.0, 54 | 'HP flat': 344*8.0, 55 | 'DEF flat': 14*8.0, 56 | } 57 | 58 | @staticmethod 59 | def rune_efficiency(rune): 60 | slot = rune['slot_no'] 61 | 62 | grade = rune['class'] 63 | 64 | main_bonus = 1.5 if slot % 2 == 0 else 0.8 65 | 66 | base_score = main_bonus * RecruitEvaluator.grade_multiplier_map[grade] 67 | 68 | for se in [rune['prefix_eff']] + rune['sec_eff']: 69 | base_score += RecruitEvaluator.get_sub_score(se) 70 | 71 | level = rune['upgrade_curr'] 72 | subs = 4 - min(level / 3, 4) 73 | 74 | score = (base_score, base_score + (0.2*subs)) 75 | max_score = main_bonus + 1.8 76 | 77 | final_score = (score[0]/max_score, score[1]/max_score) 78 | 79 | return final_score 80 | 81 | def process_csv_row(self, csv_type, data_type, data): 82 | if csv_type not in ['visit', 'runes']: 83 | return 84 | 85 | if data_type == 'header': 86 | RecruitEvaluator.six_star_mobs = 0 87 | RecruitEvaluator.six_star_runes = 0 88 | RecruitEvaluator.level_15_runes = 0 89 | RecruitEvaluator.rune_score = [] 90 | RecruitEvaluator.rune_potential = [] 91 | 92 | ids, headers = data 93 | 94 | ids.append('curr_potential') 95 | ids.append('max_potential') 96 | headers['curr_potential'] = "Current Potential" 97 | headers['max_potential'] = "Max Potential" 98 | RecruitEvaluator.headers = ids 99 | return 100 | if data_type == 'rune': 101 | rune, row = data 102 | if rune['class'] == 6: 103 | RecruitEvaluator.six_star_runes += 1 104 | if rune['upgrade_curr'] == 15: 105 | RecruitEvaluator.level_15_runes += 1 106 | 107 | eff = self.rune_efficiency(rune) 108 | RecruitEvaluator.rune_scores.append(eff) 109 | 110 | row['curr_potential'] = "%.2f %%" % (eff[0] * 100) 111 | row['max_potential'] = "%.2f %%" % (eff[1] * 100) 112 | elif data_type == 'monster': 113 | mob, row = data 114 | if mob['class'] == 6: 115 | RecruitEvaluator.six_star_mobs += 1 116 | elif data_type == 'footer': 117 | headers = RecruitEvaluator.headers 118 | footer = data 119 | footer.append({}) 120 | footer.append({ 121 | headers[0]: 'Total 6* mobs', 122 | headers[1]: 'Total 6* runes', 123 | headers[2]: 'Total level 15 runes', 124 | headers[3]: 'Avg. current rune potential', 125 | headers[4]: 'Avg. max rune potential'}) 126 | 127 | avg_curr, avg_max = averages(RecruitEvaluator.rune_scores) 128 | 129 | footer.append({ 130 | headers[0]: RecruitEvaluator.six_star_mobs, 131 | headers[1]: RecruitEvaluator.six_star_runes, 132 | headers[2]: RecruitEvaluator.level_15_runes, 133 | headers[3]: "%.2f %%" % (avg_curr * 100), 134 | headers[4]: "%.2f %%" % (avg_max * 100) 135 | }) 136 | -------------------------------------------------------------------------------- /SWParser/gui/MainWindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 632 10 | 422 11 | 12 | 13 | 14 | Summoners War Proxy 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | Your IP Address : 26 | 27 | 28 | 29 | 30 | 31 | 32 | true 33 | 34 | 35 | true 36 | 37 | 38 | 39 | 40 | 41 | 42 | Qt::Horizontal 43 | 44 | 45 | 46 | 40 47 | 20 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | Proxy Port 56 | 57 | 58 | 59 | 60 | 61 | 62 | 1 63 | 64 | 65 | 65535 66 | 67 | 68 | 69 | 70 | 71 | 72 | Start Proxy Server 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | true 84 | 85 | 86 | 87 | 88 | 0 89 | 0 90 | 610 91 | 325 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 0 108 | 0 109 | 632 110 | 20 111 | 112 | 113 | 114 | 115 | &File 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | &Help 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | &Open PCAP file 134 | 135 | 136 | 137 | 138 | &Quit 139 | 140 | 141 | 142 | 143 | About 144 | 145 | 146 | 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /plugins/LiveOptimizer.py: -------------------------------------------------------------------------------- 1 | import time 2 | import json 3 | import os 4 | from SWPlugin import SWPlugin 5 | from SWParser import * 6 | 7 | class LiveOptimizer(SWPlugin): 8 | def __init__(self): 9 | with open('swproxy.config') as f: 10 | self.config = json.load(f) 11 | 12 | def process_request(self, req_json, resp_json): 13 | config = self.config 14 | if 'live_sync' not in config or not config['live_sync']: 15 | return 16 | command = req_json['command'] 17 | if command == 'BattleRiftOfWorldsRaidResult': 18 | return self.log_end_raid(req_json, resp_json, config) 19 | if command == 'SellRuneCraftItem': 20 | return self.logSellCraft(req_json, resp_json, config) 21 | if command == 'BattleDungeonResult' or command == 'BattleScenarioResult': 22 | return self.log_dungeon_result(req_json, resp_json, config) 23 | if command == 'SellRune': 24 | return self.log_sell_rune(req_json, resp_json, config) 25 | if command == 'UpgradeRune': 26 | return self.log_upgrade_rune(req_json, resp_json, config) 27 | if command == 'EquipRune': 28 | return self.log_equip_rune(req_json, resp_json, config) 29 | if command == 'UnequipRune': 30 | return self.log_unequip_rune(req_json, resp_json, config) 31 | if command == 'AmplifyRune': 32 | return self.log_amplify_rune(req_json, resp_json, config) 33 | if command == 'BuyBlackMarketItem': 34 | return self.log_buy_rune(req_json, resp_json, config) 35 | 36 | def log_buy_rune(self, req_json, resp_json, config): 37 | if 'runes' in resp_json and len(resp_json['runes']) == 1: 38 | rune_json, _ = map_rune(resp_json['runes'][0], '0') 39 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'new_rune', 40 | {'rune': rune_json }) 41 | 42 | def log_amplify_rune(self, req_json, resp_json, config): 43 | rune_json, _ = map_rune(resp_json['rune'], '0') 44 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'amplify_rune', 45 | {'rune_id': req_json['rune_id'], 'craft_id': req_json['craft_item_id'], 'rune': rune_json }) 46 | 47 | def log_unequip_rune(self, req_json, resp_json, config): 48 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'unequip_rune', {'rune_id': req_json['rune_id']}) 49 | 50 | def log_equip_rune(self, req_json, resp_json, config): 51 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'equip_rune', {'rune_id': req_json['rune_id'], 'mob_id': req_json['unit_id']}) 52 | 53 | def log_upgrade_rune(self, req_json, resp_json, config): 54 | rune_json, _ = map_rune(resp_json['rune'], '0') 55 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'upgrade_rune', {'rune': rune_json}) 56 | 57 | def log_sell_rune(self, req_json, resp_json, config): 58 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'sell_rune', {'rune_id_list': req_json['rune_id_list']}) 59 | 60 | def log_dungeon_result(self, req_json, resp_json, config): 61 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 62 | 63 | # do not log loses 64 | if win_lost == 'Lost': 65 | return 66 | reward = resp_json['reward'] if 'reward' in resp_json else {} 67 | if 'crate' in reward and 'rune' in reward['crate']: 68 | rune = reward['crate']['rune'] 69 | optimizer_rune, _ = map_rune(rune, 1) 70 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'new_rune', {'rune': optimizer_rune}) 71 | 72 | def logSellCraft(self, req_json, resp_json, config): 73 | self.save_action(req_json['wizard_id'], req_json["ts_val"], 'sell_craft', {'craft_id_list': req_json['craft_item_id_list']}) 74 | pass 75 | 76 | def save_action(self, wizard_id, timestamp, action, content): 77 | result = {'wizard_id': wizard_id, 'timestamp': timestamp, 'action': action} 78 | result.update(content) 79 | filename = 'live/%s-live-%s.json' % (wizard_id, int(time.time() * 1000)) 80 | if not os.path.exists('live'): 81 | os.makedirs('live') 82 | with open(filename, 'w') as f: 83 | json.dump(result, f) 84 | 85 | def log_end_raid(self, req_json, resp_json, config): 86 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 87 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 88 | 89 | if win_lost == 'Win': 90 | order = 0 91 | for i in resp_json['battle_reward_list']: 92 | if wizard_id != str(i['wizard_id']): 93 | order += 1 94 | else: 95 | break 96 | 97 | reward = resp_json['battle_reward_list'][order]['reward_list'][0] 98 | if reward['item_master_type'] == 27: 99 | craft_info = resp_json['reward']['crate']['runecraft_info'] 100 | type_str = str(craft_info['craft_type_id']) 101 | craft = { 102 | 'item_id': craft_info['craft_item_id'], 103 | 'type': 'E' if craft_info['craft_type'] == 1 else 'G', 104 | 'set': rune_set_id(int(type_str[:-4])), 105 | 'stat': rune_effect_type(int(type_str[-4:-2])), 106 | 'grade': int(type_str[-1:]) 107 | } 108 | self.save_action(wizard_id, resp_json["ts_val"], 'new_craft', {'craft': craft}) 109 | -------------------------------------------------------------------------------- /plugins/RaidLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | from SWPlugin import SWPlugin 6 | import threading 7 | 8 | runecraft_grade_map = { 9 | 1: 'Common', 10 | 2: 'Magic', 11 | 3: 'Rare', 12 | 4: 'Hero', 13 | 5: 'Legendary' 14 | } 15 | 16 | raid_map = { 17 | 1001: 'Khi\'zar', 18 | } 19 | 20 | def identify_raid(id): 21 | return raid_map[id] 22 | 23 | def identify_rune_grade(id): 24 | return runecraft_grade_map[id] 25 | 26 | class RaidLogger(SWPlugin): 27 | def __init__(self): 28 | with open('swproxy.config') as f: 29 | self.config = json.load(f) 30 | 31 | def process_request(self, req_json, resp_json): 32 | config = self.config 33 | if 'log_raids' not in config or not config['log_raids']: 34 | return 35 | 36 | command = req_json['command'] 37 | if command == 'BattleRiftOfWorldsRaidResult': 38 | return self.log_end_battle(req_json, resp_json, config) 39 | 40 | if command == 'BattleRiftOfWorldsRaidStart': 41 | if 'raid-logger-data' not in config: 42 | config['raid-logger-data'] = {} 43 | 44 | plugin_data = config['raid-logger-data'] 45 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 46 | start = int(time.time()) 47 | stage = '%s R%s' % (identify_raid(resp_json['battle_info']['room_info']['raid_id']), resp_json['battle_info']['stage_id']) 48 | team = [] 49 | for i in resp_json['battle_info']['user_list']: 50 | if i['wizard_id'] != resp_json['wizard_info']['wizard_id']: 51 | team.append(i['wizard_name']) 52 | 53 | plugin_data[wizard_id] = {'stage' : stage, 'start': start, 'team': team} 54 | 55 | def log_end_battle(self, req_json, resp_json, config): 56 | if not config["log_raids"]: 57 | return 58 | 59 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 60 | if 'raid-logger-data' in config and wizard_id in config['raid-logger-data'] \ 61 | and 'start' in config['raid-logger-data'][wizard_id]: 62 | 63 | start_data = config['raid-logger-data'][wizard_id] 64 | 65 | delta = int(time.time()) - start_data['start'] 66 | m = divmod(delta, 60) 67 | s = m[1] # seconds 68 | elapsed_time = '%s:%02d' % (m[0], s) 69 | else: 70 | elapsed_time = 'N/A' 71 | 72 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 73 | 74 | if win_lost == 'Win': 75 | order = 0 76 | for i in resp_json['battle_reward_list']: 77 | if wizard_id != str(i['wizard_id']): 78 | order += 1 79 | else: 80 | break 81 | 82 | reward = resp_json['battle_reward_list'][order]['reward_list'][0] 83 | if reward['item_master_type'] == 1: 84 | log_reward = {'drop': 'Rainbowmon 3*'} 85 | elif reward['item_master_type'] == 6: 86 | log_reward = {'drop': 'Mana Stones', 'value': reward['item_quantity']} 87 | elif reward['item_master_type'] == 27: 88 | if reward['runecraft_type'] == 2: 89 | item = 'Grindstone' 90 | elif reward['runecraft_type'] == 1: 91 | item = 'Enchanted Gem' 92 | value = resp_json['reward']['crate']['runecraft_info']['sell_value'] 93 | rune_set = rune_set_id(reward['runecraft_set_id']) 94 | rarity = identify_rune_grade(reward['runecraft_rank']) 95 | stat = rune_effect_type(reward['runecraft_effect_id']) 96 | log_reward = {'drop': item, 'value': value, 'set': rune_set, 'rarity': rarity, 'stat': stat} 97 | elif reward['item_master_type'] == 9: 98 | if reward['item_master_id'] == 2: 99 | log_reward = {'drop': "Mystical Scroll"} 100 | elif reward['item_master_id'] == 8: 101 | log_reward = {'drop': "Summoning Stones x%s" % reward['item_quantity']} 102 | elif reward['item_master_type'] == 10: #placeholder waiting for info 103 | log_reward = {'drop': "Shapeshifting Stone x%s" % reward['item_quantity']} 104 | else: 105 | log_reward = {'drop': "Unknown drop %s" % json.dumps(reward)} 106 | 107 | filename = "%s-raids.csv" % wizard_id 108 | is_new_file = not os.path.exists(filename) 109 | 110 | with open(filename, "ab") as log_file: 111 | field_names = ['date', 'raid', 'result', 'time', 'team1', 'team2', 'drop', 'value', 'set', 'rarity', 'stat'] 112 | 113 | header = {'date': 'Date', 'raid': 'Raid', 'result': 'Result', 'time': 'Clear time', 114 | 'team1': 'Teammate #1', 'team2': 'Teammate #2', 'drop': 'Drop', 'value': 'Sell value', 115 | 'set': 'Rune Set', 'rarity': 'Rarity', 'stat': 'Stat'} 116 | 117 | SWPlugin.call_plugins('process_csv_row', ('raid_logger', 'header', (field_names, header))) 118 | 119 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 120 | if is_new_file: 121 | log_writer.writerow(header) 122 | 123 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'raid': start_data['stage'] if 'stage' in start_data else 'unknown', 124 | 'result': win_lost, 'time': elapsed_time, 'team1': start_data['team'][0], 'team2': start_data['team'][1]} 125 | 126 | log_entry.update(log_reward) 127 | 128 | SWPlugin.call_plugins('process_csv_row', ('raid_logger', 'entry', (field_names, log_entry))) 129 | log_writer.writerow(log_entry) 130 | return 131 | -------------------------------------------------------------------------------- /SWParser/gui/MainWindow.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Form implementation generated from reading ui file 'SWParser/gui/MainWindow.ui' 4 | # 5 | # Created by: PyQt4 UI code generator 4.11.4 6 | # 7 | # WARNING! All changes made in this file will be lost! 8 | 9 | from PyQt4 import QtCore, QtGui 10 | 11 | try: 12 | _fromUtf8 = QtCore.QString.fromUtf8 13 | except AttributeError: 14 | def _fromUtf8(s): 15 | return s 16 | 17 | try: 18 | _encoding = QtGui.QApplication.UnicodeUTF8 19 | def _translate(context, text, disambig): 20 | return QtGui.QApplication.translate(context, text, disambig, _encoding) 21 | except AttributeError: 22 | def _translate(context, text, disambig): 23 | return QtGui.QApplication.translate(context, text, disambig) 24 | 25 | class Ui_MainWindow(object): 26 | def setupUi(self, MainWindow): 27 | MainWindow.setObjectName(_fromUtf8("MainWindow")) 28 | MainWindow.resize(632, 422) 29 | self.centralwidget = QtGui.QWidget(MainWindow) 30 | self.centralwidget.setObjectName(_fromUtf8("centralwidget")) 31 | self.gridLayout = QtGui.QGridLayout(self.centralwidget) 32 | self.gridLayout.setObjectName(_fromUtf8("gridLayout")) 33 | self.verticalLayout = QtGui.QVBoxLayout() 34 | self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) 35 | self.horizontalLayout = QtGui.QHBoxLayout() 36 | self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) 37 | self.label = QtGui.QLabel(self.centralwidget) 38 | self.label.setObjectName(_fromUtf8("label")) 39 | self.horizontalLayout.addWidget(self.label) 40 | self.ipAddress = QtGui.QLineEdit(self.centralwidget) 41 | self.ipAddress.setEnabled(True) 42 | self.ipAddress.setReadOnly(False) 43 | self.ipAddress.setObjectName(_fromUtf8("ipAddress")) 44 | self.horizontalLayout.addWidget(self.ipAddress) 45 | spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) 46 | self.horizontalLayout.addItem(spacerItem) 47 | self.label_2 = QtGui.QLabel(self.centralwidget) 48 | self.label_2.setObjectName(_fromUtf8("label_2")) 49 | self.horizontalLayout.addWidget(self.label_2) 50 | self.proxyPort = QtGui.QSpinBox(self.centralwidget) 51 | self.proxyPort.setMinimum(1) 52 | self.proxyPort.setMaximum(65535) 53 | self.proxyPort.setObjectName(_fromUtf8("proxyPort")) 54 | self.horizontalLayout.addWidget(self.proxyPort) 55 | self.startProxy = QtGui.QPushButton(self.centralwidget) 56 | self.startProxy.setObjectName(_fromUtf8("startProxy")) 57 | self.horizontalLayout.addWidget(self.startProxy) 58 | self.verticalLayout.addLayout(self.horizontalLayout) 59 | self.gridLayout.addLayout(self.verticalLayout, 0, 0, 1, 1) 60 | self.scrollArea = QtGui.QScrollArea(self.centralwidget) 61 | self.scrollArea.setWidgetResizable(True) 62 | self.scrollArea.setObjectName(_fromUtf8("scrollArea")) 63 | self.scrollAreaWidgetContents = QtGui.QWidget() 64 | self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 610, 325)) 65 | self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents")) 66 | self.gridLayout_2 = QtGui.QGridLayout(self.scrollAreaWidgetContents) 67 | self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) 68 | self.logWindow = QtGui.QListWidget(self.scrollAreaWidgetContents) 69 | self.logWindow.setObjectName(_fromUtf8("logWindow")) 70 | self.gridLayout_2.addWidget(self.logWindow, 0, 0, 1, 1) 71 | self.scrollArea.setWidget(self.scrollAreaWidgetContents) 72 | self.gridLayout.addWidget(self.scrollArea, 1, 0, 1, 1) 73 | MainWindow.setCentralWidget(self.centralwidget) 74 | self.menubar = QtGui.QMenuBar(MainWindow) 75 | self.menubar.setGeometry(QtCore.QRect(0, 0, 632, 20)) 76 | self.menubar.setObjectName(_fromUtf8("menubar")) 77 | self.menuFile = QtGui.QMenu(self.menubar) 78 | self.menuFile.setObjectName(_fromUtf8("menuFile")) 79 | self.menuHelp = QtGui.QMenu(self.menubar) 80 | self.menuHelp.setObjectName(_fromUtf8("menuHelp")) 81 | MainWindow.setMenuBar(self.menubar) 82 | self.statusbar = QtGui.QStatusBar(MainWindow) 83 | self.statusbar.setObjectName(_fromUtf8("statusbar")) 84 | MainWindow.setStatusBar(self.statusbar) 85 | self.actionOpen_PCAP = QtGui.QAction(MainWindow) 86 | self.actionOpen_PCAP.setObjectName(_fromUtf8("actionOpen_PCAP")) 87 | self.actionQuit = QtGui.QAction(MainWindow) 88 | self.actionQuit.setObjectName(_fromUtf8("actionQuit")) 89 | self.actionAbout = QtGui.QAction(MainWindow) 90 | self.actionAbout.setObjectName(_fromUtf8("actionAbout")) 91 | self.menuFile.addAction(self.actionOpen_PCAP) 92 | self.menuFile.addSeparator() 93 | self.menuFile.addAction(self.actionQuit) 94 | self.menuHelp.addAction(self.actionAbout) 95 | self.menubar.addAction(self.menuFile.menuAction()) 96 | self.menubar.addAction(self.menuHelp.menuAction()) 97 | 98 | self.retranslateUi(MainWindow) 99 | QtCore.QMetaObject.connectSlotsByName(MainWindow) 100 | 101 | def retranslateUi(self, MainWindow): 102 | MainWindow.setWindowTitle(_translate("MainWindow", "Summoners War Proxy", None)) 103 | self.label.setText(_translate("MainWindow", "Your IP Address : ", None)) 104 | self.label_2.setText(_translate("MainWindow", "Proxy Port", None)) 105 | self.startProxy.setText(_translate("MainWindow", "Start Proxy Server", None)) 106 | self.menuFile.setTitle(_translate("MainWindow", "&File", None)) 107 | self.menuHelp.setTitle(_translate("MainWindow", "&Help", None)) 108 | self.actionOpen_PCAP.setText(_translate("MainWindow", "&Open PCAP file", None)) 109 | self.actionQuit.setText(_translate("MainWindow", "&Quit", None)) 110 | self.actionAbout.setText(_translate("MainWindow", "About", None)) 111 | 112 | -------------------------------------------------------------------------------- /plugins/GuildBattleLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | from SWPlugin import SWPlugin 6 | import threading 7 | 8 | win_lose_round = { 9 | 1 : 'Win', 10 | 2 : 'Lose', 11 | 3 : 'Draw', 12 | } 13 | 14 | win_lose_battle = { 15 | 1 : 'Win', 16 | 2 : 'Draw', 17 | 3 : 'Win', 18 | 4 : 'Lose', 19 | 6 : 'Lose', 20 | 9 : 'Draw', 21 | } 22 | 23 | class GuildBattleLogger(SWPlugin): 24 | def __init__(self): 25 | with open('swproxy.config') as f: 26 | self.config = json.load(f) 27 | 28 | def process_request(self, req_json, resp_json): 29 | config = self.config 30 | if 'log_guild_battle' not in config or not config['log_guild_battle']: 31 | return 32 | 33 | command = req_json['command'] 34 | if command == 'GetGuildWarMatchupInfo' or command == 'BattleGuildWarStart': 35 | if 'guild-battle-logger-data' not in config: 36 | config['guild-battle-logger-data'] = {} 37 | plugin_data = config['guild-battle-logger-data'] 38 | wizard_id = str(req_json['wizard_id']) 39 | if wizard_id not in plugin_data: 40 | plugin_data[wizard_id] = {} 41 | 42 | if command == 'GetGuildWarMatchupInfo': 43 | guild_opponent_list = {} 44 | for opp in resp_json['opp_guild_member_list']: 45 | guild_opponent_list[str(opp['wizard_id'])] = opp['wizard_name'] 46 | guildname = resp_json['opp_guild_info']['name'] 47 | plugin_data[wizard_id].update({'guild_opponent_list' : guild_opponent_list, 'guildname' : guildname}) 48 | 49 | if command == 'BattleGuildWarStart': 50 | start = int(time.time()) 51 | opp_monster_list = {} 52 | for battle in resp_json['guildwar_opp_unit_list']: 53 | for opp_mon in battle: 54 | opp_monster_list[opp_mon['pos_id']] = opp_mon['unit_info']['unit_master_id'] 55 | i = 1 56 | monster_list = {} 57 | for battle in resp_json['guildwar_my_unit_list']: 58 | for mon in battle: 59 | monster_list[i] = mon['unit_master_id'] 60 | i += 1 61 | opponent_id = req_json['opp_wizard_id'] 62 | plugin_data[wizard_id].update({'start' : start, 'opp_monster_list' : opp_monster_list, 'opponent_id' : opponent_id, 'monster_list' : monster_list,}) 63 | 64 | if command == 'BattleGuildWarResult': 65 | return self.log_end_battle(req_json, resp_json, config) 66 | 67 | def build_unit_dictionary(self, wizard_id): 68 | with open('%s-optimizer.json' % wizard_id) as f: 69 | user_data = json.load(f) 70 | mon_dict = {} 71 | for mon in user_data["mons"]: 72 | mon_dict[mon['unit_id']] = mon['name'] 73 | return mon_dict 74 | 75 | def log_end_battle(self, req_json, resp_json, config): 76 | if not config["log_guild_battle"]: 77 | return 78 | 79 | command = req_json['command'] 80 | 81 | if command == 'BattleGuildWarResult': 82 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 83 | if 'guild-battle-logger-data' in config and wizard_id in config['guild-battle-logger-data'] \ 84 | and 'start' in config['guild-battle-logger-data'][wizard_id]: 85 | start = config['guild-battle-logger-data'][wizard_id]['start'] 86 | delta = int(time.time()) - start 87 | m = divmod(delta, 60) 88 | s = m[1] # seconds 89 | elapsed_time = '%s:%02d' % (m[0], s) 90 | opp_monster_list = config['guild-battle-logger-data'][wizard_id]['opp_monster_list'] 91 | monster_list = config['guild-battle-logger-data'][wizard_id]['monster_list'] 92 | else: 93 | elapsed_time = 'N/A' 94 | 95 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 96 | user_mons = self.build_unit_dictionary(wizard_id) 97 | 98 | guildpoints = 0 99 | for reward in resp_json['reward_list']: 100 | guildpoints += reward['guild_point_var'] 101 | guildpoints += reward['guild_point_bonus'] 102 | 103 | round1 = win_lose_round[resp_json['win_lose_list'][0]] 104 | round2 = win_lose_round[resp_json['win_lose_list'][1]] 105 | result = win_lose_battle[resp_json['win_lose_list'][0]*resp_json['win_lose_list'][1]] 106 | 107 | oppguild = config['guild-battle-logger-data'][wizard_id]['guildname'] 108 | 109 | opponent_list = {} 110 | if 'guild-battle-logger-data' in config and wizard_id in config['guild-battle-logger-data'] \ 111 | and 'guild_opponent_list' in config['guild-battle-logger-data'][wizard_id]: 112 | opponent_list.update(config['guild-battle-logger-data'][wizard_id]['guild_opponent_list']) 113 | opponent_id = str(config['guild-battle-logger-data'][wizard_id]['opponent_id']) 114 | 115 | if opponent_list.has_key(opponent_id): 116 | opponent = opponent_list[opponent_id] 117 | else: 118 | opponent = opponent_id 119 | 120 | del config['guild-battle-logger-data'][wizard_id]['start'] # make sure start time doesn't persist 121 | del config['guild-battle-logger-data'][wizard_id]['opp_monster_list'] # make sure opp_mons doen't persist 122 | del config['guild-battle-logger-data'][wizard_id]['opponent_id'] # make sure opponent_id doesn't persist 123 | filename = "%s-guildbattle.csv" % wizard_id 124 | is_new_file = not os.path.exists(filename) 125 | 126 | with open(filename, "ab") as log_file: 127 | field_names = ['date', 'oppguild', 'opponent', 'round1', 'team1', 'team2', 'team3', 'opteam1', 'opteam2', 'opteam3', 128 | 'round2', 'team4', 'team5', 'team6', 'opteam4', 'opteam5', 'opteam6', 'result', 'guildpoints'] 129 | 130 | header = {'date': 'Date', 'oppguild': 'Enemy Guild', 'opponent': 'Opponent', 131 | 'round1': 'Round 1', 'team1': 'Team1', 'team2': 'Team2', 'team3': 'Team3', 'opteam1': 'OpTeam1', 'opteam2': 'OpTeam2', 'opteam3': 'OpTeam3', 132 | 'round2': 'Round 2', 'team4': 'Team4', 'team5': 'Team5', 'team6': 'Team6', 'opteam4': 'OpTeam4', 'opteam5': 'OpTeam5', 'opteam6': 'OpTeam6', 133 | 'result': 'Result', 'guildpoints': 'Guild Points'} 134 | 135 | SWPlugin.call_plugins('process_csv_row', ('guild_battle_logger', 'header', (field_names, header))) 136 | 137 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 138 | if is_new_file: 139 | log_writer.writerow(header) 140 | 141 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'oppguild': oppguild, 'opponent': opponent, 142 | 'round1': round1, 'round2': round2, 'result': result, 'guildpoints' : guildpoints} 143 | 144 | for i in range(1, len(monster_list) + 1): 145 | log_entry['team%s' % i] = monster_name(monster_list[i]) 146 | for i in range(1, len(opp_monster_list) + 1): 147 | log_entry['opteam%s' % i] = monster_name(opp_monster_list[i]) 148 | 149 | SWPlugin.call_plugins('process_csv_row', ('guild_battle_logger', 'entry', (field_names, log_entry))) 150 | log_writer.writerow(log_entry) 151 | return 152 | -------------------------------------------------------------------------------- /plugins/ArenaLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | from SWPlugin import SWPlugin 6 | import logging 7 | logger = logging.getLogger() 8 | import threading 9 | 10 | rival_name = { #there may be more, given alt versions of rivals 11 | '5001': "(R) Gready", 12 | '5002': "(R) Morgana", 13 | '5003': "(R) Edmund", 14 | '5004': "(R) Volta", 15 | '5006': "(R) Taihan", 16 | '5007': "(R) Shai", 17 | '5009': "(R) Razak", 18 | '5010': "(R) Kellan", 19 | '5011': "(R) Kian" 20 | } 21 | 22 | class ArenaLogger(SWPlugin): 23 | def __init__(self): 24 | with open('swproxy.config') as f: 25 | self.config = json.load(f) 26 | 27 | def process_request(self, req_json, resp_json): 28 | config = self.config 29 | if 'log_arena' not in config or not config['log_arena']: 30 | return 31 | 32 | command = req_json['command'] 33 | if command == 'GetArenaLog' or command == 'GetArenaWizardList' or command == 'BattleArenaStart': 34 | if 'arena-logger-data' not in config: 35 | config['arena-logger-data'] = {} 36 | plugin_data = config['arena-logger-data'] 37 | wizard_id = str(req_json['wizard_id']) 38 | if wizard_id not in plugin_data: 39 | plugin_data[wizard_id] = {} 40 | 41 | if command == 'GetArenaLog': 42 | revenge_list = {} 43 | for opp in resp_json['arena_log']: 44 | revenge_list[str(opp['wizard_id'])] = opp['wizard_name'] 45 | plugin_data[wizard_id].update({'revenge_list' : revenge_list}) 46 | 47 | if command == 'GetArenaWizardList': 48 | arena_list = {} 49 | for opp in resp_json['arena_list']: 50 | arena_list[str(opp['wizard_id'])] = opp['wizard_name'] 51 | plugin_data[wizard_id].update({'arena_list' : arena_list}) 52 | 53 | if command == 'BattleArenaStart': 54 | start = int(time.time()) 55 | opp_monster_list = {} 56 | for opp_mon in resp_json['opp_unit_list']: 57 | opp_monster_list[opp_mon['pos_id']] = opp_mon['unit_info']['unit_master_id'] 58 | opponent_id = req_json['opp_wizard_id'] 59 | plugin_data[wizard_id].update({'start' : start, 'opp_monster_list' : opp_monster_list, 'opponent_id' : opponent_id}) 60 | 61 | if command == 'BattleArenaResult': 62 | return self.log_end_battle(req_json, resp_json, config) 63 | 64 | def build_unit_dictionary(self, wizard_id): 65 | with open('%s-optimizer.json' % wizard_id) as f: 66 | user_data = json.load(f) 67 | mon_dict = {} 68 | for mon in user_data["mons"]: 69 | mon_dict[mon['unit_id']] = mon['name'] 70 | return mon_dict 71 | 72 | def log_end_battle(self, req_json, resp_json, config): 73 | if not config["log_arena"]: 74 | return 75 | 76 | command = req_json['command'] 77 | 78 | if command == 'BattleArenaResult': 79 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 80 | if 'arena-logger-data' in config and wizard_id in config['arena-logger-data'] \ 81 | and 'start' in config['arena-logger-data'][wizard_id]: 82 | start = config['arena-logger-data'][wizard_id]['start'] 83 | delta = int(time.time()) - start 84 | m = divmod(delta, 60) 85 | s = m[1] # seconds 86 | elapsed_time = '%s:%02d' % (m[0], s) 87 | opp_monster_list = config['arena-logger-data'][wizard_id]['opp_monster_list'] 88 | else: 89 | elapsed_time = 'N/A' 90 | 91 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 92 | if not os.path.exists('%s-optimizer.json' % wizard_id): 93 | logger.warn("optimizer file is needed for arena plugin") 94 | return 95 | 96 | user_mons = self.build_unit_dictionary(wizard_id) 97 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 98 | 99 | reward = resp_json['reward'] if 'reward' in resp_json else {} 100 | mana = reward['mana'] if 'mana' in reward else 0 101 | crystal = reward['crystal'] if 'crystal' in reward else 0 102 | energy = reward['energy'] if 'energy' in reward else 0 103 | honor = reward['honor_point'] if 'honor_point' in reward else 0 104 | 105 | opponent_list = {} 106 | if 'arena-logger-data' in config and wizard_id in config['arena-logger-data'] \ 107 | and 'arena_list' in config['arena-logger-data'][wizard_id]: 108 | opponent_list.update(config['arena-logger-data'][wizard_id]['arena_list']) 109 | if 'arena-logger-data' in config and wizard_id in config['arena-logger-data'] \ 110 | and 'revenge_list' in config['arena-logger-data'][wizard_id]: 111 | opponent_list.update(config['arena-logger-data'][wizard_id]['revenge_list']) 112 | opponent_list.update(rival_name) 113 | opponent_id = str(config['arena-logger-data'][wizard_id]['opponent_id']) 114 | 115 | if opponent_list.has_key(opponent_id): 116 | opponent = opponent_list[opponent_id] 117 | else: 118 | opponent = opponent_id 119 | 120 | del config['arena-logger-data'][wizard_id]['start'] # make sure start time doesn't persist 121 | del config['arena-logger-data'][wizard_id]['opp_monster_list'] # make sure opp_mons doen't persist 122 | del config['arena-logger-data'][wizard_id]['opponent_id'] # make sure opponent_id doesn't persist 123 | # don't delete arena_list or revenge_list as they are needed for future battles 124 | filename = "%s-arena.csv" % wizard_id 125 | is_new_file = not os.path.exists(filename) 126 | 127 | with open(filename, "ab") as log_file: 128 | field_names = ['date', 'result', 'time', 'mana', 'crystal', 'energy', 'honor', 'opponent', 129 | 'team1', 'team2', 'team3', 'team4', 'opteam1', 'opteam2', 'opteam3', 'opteam4'] 130 | 131 | header = {'date': 'Date', 'result': 'Result', 'time':'Clear time', 'mana':'Mana', 132 | 'crystal': 'Crystal', 'energy': 'Energy', 'honor': 'Honor', 'opponent': 'Opponent', 133 | 'team1': 'Team1', 'team2': 'Team2', 'team3': 'Team3', 'team4': 'Team4', 134 | 'opteam1': 'OpTeam1', 'opteam2': 'OpTeam2', 'opteam3': 'OpTeam3', 'opteam4': 'OpTeam4'} 135 | 136 | SWPlugin.call_plugins('process_csv_row', ('arena_logger', 'header', (field_names, header))) 137 | 138 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 139 | if is_new_file: 140 | log_writer.writerow(header) 141 | 142 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'result': win_lost, 'time': elapsed_time, 143 | 'mana': mana, 'crystal': crystal, 'energy': energy, 'honor' : honor} 144 | 145 | log_entry['opponent'] = opponent 146 | if 'unit_list' in resp_json and len(resp_json['unit_list']) > 0: 147 | for i in range(1, len(resp_json['unit_list']) + 1): 148 | log_entry['team%s' % i] = monster_name(resp_json['unit_list'][i-1]['unit_master_id']) 149 | else: 150 | for i in range(1, len(req_json['unit_id_list']) + 1): 151 | id = req_json['unit_id_list'][i-1]['unit_id'] 152 | log_entry['team%s' % i] = user_mons[id] 153 | for i in range(1, len(opp_monster_list) + 1): 154 | log_entry['opteam%s' % i] = monster_name(opp_monster_list[i]) 155 | 156 | SWPlugin.call_plugins('process_csv_row', ('arena_logger', 'entry', (field_names, log_entry))) 157 | log_writer.writerow(log_entry) 158 | return 159 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /SWParser/parser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import csv 4 | import json 5 | import cStringIO 6 | import sys 7 | import struct 8 | import numbers 9 | import os 10 | import codecs 11 | from SWPlugin import SWPlugin 12 | from collections import OrderedDict 13 | from smon_decryptor import decrypt_request, decrypt_response 14 | from monsters import monsters_name_map as name_map 15 | 16 | # ref: http://stackoverflow.com/a/5838817/1020222 17 | class DictUnicodeWriter(object): 18 | def __init__(self, f, fieldnames, dialect=csv.excel, encoding="utf-8", newfile=True, **kwds): 19 | # Redirect output to a queue 20 | self.queue = cStringIO.StringIO() 21 | self.writer = csv.DictWriter(self.queue, fieldnames, dialect=dialect, **kwds) 22 | self.stream = f 23 | self.encoder = codecs.getincrementalencoder(encoding)() 24 | if newfile: 25 | self.writebom() 26 | 27 | def writerow(self, D): 28 | self.writer.writerow({k:unicode(v).encode("utf-8") for k, v in D.items()}) 29 | # Fetch UTF-8 output from the queue ... 30 | data = self.queue.getvalue() 31 | data = data.decode("utf-8") 32 | # ... and reencode it into the target encoding 33 | data = self.encoder.encode(data) 34 | # write to the target stream 35 | self.stream.write(data) 36 | # empty queue 37 | self.queue.truncate(0) 38 | 39 | def writerows(self, rows): 40 | for D in rows: 41 | self.writerow(D) 42 | 43 | def writeheader(self): 44 | self.writer.writeheader() 45 | 46 | def writebom(self): 47 | """Write BOM, so excel can identify this as UTF8""" 48 | self.stream.write(u'\ufeff'.encode('utf8')) 49 | 50 | 51 | def monster_name(uid, default_unknown="???", full=True): 52 | uid = str(uid).ljust(5, "0") 53 | 54 | if default_unknown == "???": 55 | default_unknown += "[{uid}]".format(uid=int(uid[:-2])) 56 | 57 | if uid in name_map and len(name_map[uid]) > 0: 58 | return name_map[uid] 59 | 60 | awakened = True if int(uid[-2]) else False 61 | if uid[:-2] in name_map and len(name_map[uid[:-2]]) > 0: 62 | name = name_map[uid[:-2]] 63 | else: 64 | name = default_unknown 65 | if full: 66 | attribute = int(uid[-1]) 67 | return "%s%s (%s)" % ("AWAKENED " if awakened else "", name, monster_attribute(attribute)) 68 | elif not awakened: 69 | return name 70 | return default_unknown 71 | 72 | 73 | def monster_attribute(attribute): 74 | name_map = { 75 | 1: "Water", 76 | 2: "Fire", 77 | 3: "Wind", 78 | 4: "Light", 79 | 5: "Dark" 80 | } 81 | 82 | if attribute in name_map: 83 | return name_map[attribute] 84 | else: 85 | return "???[{attr}]".format(attr=attribute) 86 | 87 | 88 | def rune_effect_type(id, mode=0): 89 | """mode 0 = rune optimizer, mode 1 = csv export""" 90 | 91 | if mode != 0 and mode != 1: 92 | raise ValueError('Should be 0 (optimizer) or 1 (csv)') 93 | 94 | effect_type_map = { 95 | 0: ("",""), 96 | 1: ("HP flat", "HP +%s"), 97 | 2: ("HP%", "HP %s%%"), 98 | 3: ("ATK flat", "ATK +%s"), 99 | 4: ("ATK%", "ATK %s%%"), 100 | 5: ("DEF flat", "DEF +%s"), 101 | 6: ("DEF%", "DEF %s%%"), 102 | # 7: "UNKNOWN", # ? 103 | 8: ("SPD", "SPD +%s"), 104 | 9: ("CRate", "CRI Rate %s%%"), 105 | 10: ("CDmg", "CRI Dmg %s%%"), 106 | 11: ("RES", "Resistance %s%%"), 107 | 12: ("ACC", "Accuracy %s%%") 108 | } 109 | 110 | return effect_type_map[id][mode] if id in effect_type_map else "UNKNOWN" 111 | 112 | 113 | def rune_effect(eff): 114 | typ = eff[0] 115 | value = eff[1] 116 | flats = [1,3,5,8] 117 | if len(eff) > 3: 118 | if eff[3] != 0: 119 | if typ in flats: 120 | value = "%s -> +%s" % (value, str(int(value) + int(eff[3]))) 121 | else: 122 | value = "%s%% -> %s" % (value, str(int(value) + int(eff[3]))) 123 | 124 | if typ == 0: 125 | ret = "" 126 | elif typ == 7 or typ > 12: 127 | ret = "UNK %s %s" % (typ, value) 128 | else: 129 | ret = rune_effect_type(typ,1) % value 130 | 131 | if len(eff) > 2: 132 | if eff[2] != 0: 133 | ret = "%s (Converted)" % ret 134 | return ret 135 | 136 | def rune_set_id(id): 137 | name_map = { 138 | 1: "Energy", 139 | 2: "Guard", 140 | 3: "Swift", 141 | 4: "Blade", 142 | 5: "Rage", 143 | 6: "Focus", 144 | 7: "Endure", 145 | 8: "Fatal", 146 | 10: "Despair", 147 | 11: "Vampire", 148 | 13: "Violent", 149 | 14: "Nemesis", 150 | 15: "Will", 151 | 16: "Shield", 152 | 17: "Revenge", 153 | 18: "Destroy", 154 | 19: "Fight", 155 | 20: "Determination", 156 | 21: "Enhance", 157 | 22: "Accuracy", 158 | 23: "Tolerance", 159 | } 160 | 161 | if id in name_map: 162 | return name_map[id] 163 | else: 164 | return "???" 165 | 166 | def map_craft(craft, craft_id): 167 | type_str = str(craft['craft_type_id']) 168 | return { 169 | 'id': craft_id, 170 | 'item_id': craft['craft_item_id'], 171 | 'type': 'E' if craft['craft_type'] == 1 else 'G', 172 | 'set': rune_set_id(int(type_str[:-4])), 173 | 'stat': rune_effect_type(int(type_str[-4:-2])), 174 | 'grade': int(type_str[-1:]) 175 | } 176 | 177 | def map_rune(rune, rune_id, monster_id=0, monster_uid=0): 178 | cvs_map ={ 179 | 'slot': rune['slot_no'], 180 | 'rune_set': rune_set_id(rune['set_id']), 181 | 'rune_grade': rune['class'], 182 | 'rune_level': rune['upgrade_curr'], 183 | 'pri_eff': rune_effect(rune['pri_eff']), 184 | 'pre_eff': rune_effect(rune['prefix_eff']) 185 | } 186 | 187 | if rune_id != None: 188 | cvs_map.update({ 189 | 'sell_price': rune['sell_value'], 190 | 'rune_id': rune_id, 191 | 'monster_id': '%s (%s)' % (monster_id, monster_name(monster_uid)) if monster_id != 0 else '0', 192 | }) 193 | 194 | for i in range(0, len(rune['sec_eff'])): 195 | cvs_map['sub' + str(i + 1)] = rune_effect(rune['sec_eff'][i]) 196 | 197 | subs = { 198 | 'ATK flat': '-', 199 | 'ATK%': '-', 200 | 'HP flat': '-', 201 | 'HP%': '-', 202 | 'DEF flat': '-', 203 | 'DEF%': '-', 204 | 'RES': '-', 205 | 'ACC': '-', 206 | 'SPD': '-', 207 | 'CDmg': '-', 208 | 'CRate': '-', 209 | } 210 | 211 | for sec_eff in rune['sec_eff']: 212 | subs[rune_effect_type(sec_eff[0])] = sec_eff[1] + (sec_eff[3] if len(sec_eff) > 2 else 0) 213 | 214 | optimizer_map = {"id": rune_id, 215 | "unique_id": rune['rune_id'], 216 | "monster": monster_id, 217 | "monster_n":monster_name(monster_uid, "Unknown name"), 218 | "set": rune_set_id(rune['set_id']), 219 | "slot": rune['slot_no'], 220 | "grade": rune['class'], 221 | "level": rune['upgrade_curr'], 222 | "m_t": rune_effect_type(rune['pri_eff'][0]), 223 | "m_v": rune['pri_eff'][1], 224 | "i_t": rune_effect_type(rune['prefix_eff'][0]), 225 | "i_v": rune['prefix_eff'][1], 226 | "locked":0, 227 | "sub_res": subs['RES'], 228 | "sub_cdmg": subs['CDmg'], 229 | "sub_atkf": subs['ATK flat'], 230 | "sub_acc": subs['ACC'], 231 | "sub_atkp": subs['ATK%'], 232 | "sub_defp": subs['DEF%'], 233 | "sub_deff": subs['DEF flat'], 234 | "sub_hpp": subs['HP%'], 235 | "sub_hpf": subs['HP flat'], 236 | "sub_spd": subs['SPD'], 237 | "sub_crate": subs['CRate']} 238 | 239 | for sub in range(0,4): 240 | optimizer_map['s%s_t' % (sub + 1)] = rune_effect_type(rune['sec_eff'][sub][0]) if len(rune['sec_eff']) >= sub + 1 else "" 241 | optimizer_map['s%s_v' % (sub + 1)] = rune['sec_eff'][sub][1] +\ 242 | (rune['sec_eff'][sub][3] if len(rune['sec_eff'][sub]) > 2 else 0) \ 243 | if len(rune['sec_eff']) >= sub + 1 else 0 244 | optimizer_map['s%s_data' % (sub + 1)] = {"enchanted": rune['sec_eff'][sub][2] == 1, 245 | "gvalue": rune['sec_eff'][sub][3]} \ 246 | if len(rune['sec_eff']) >= sub + 1 and len(rune['sec_eff'][sub]) > 2 else {} 247 | return optimizer_map, cvs_map 248 | 249 | def map_monster(monster, monster_id_mapping, storage_id, wizard_name=None): 250 | csv_map = { 251 | 'name': monster_name(monster['unit_master_id']), 252 | 'level': monster['unit_level'], 253 | 'grade': monster['class'], 254 | 'attribute': monster_attribute(monster['attribute']), 255 | 'in_storage': "Yes" if monster['building_id'] == storage_id else "No", 256 | 'hp': int(monster['con']) * 15, 257 | 'atk': monster['atk'], 258 | 'def': monster['def'], 259 | 'spd': monster['spd'], 260 | 'crate': monster['critical_rate'], 261 | 'cdmg': monster['critical_damage'], 262 | 'res': monster['resist'], 263 | 'acc': monster['accuracy'] 264 | } 265 | 266 | if wizard_name is None: 267 | csv_map['id'] = monster_id_mapping[monster['unit_id']] 268 | else: 269 | csv_map.update({'wizard_name' : wizard_name}), 270 | 271 | if monster_id_mapping: 272 | optimizer_monster = {"id": monster_id_mapping[monster['unit_id']], 273 | "name":"%s%s" % (monster_name(monster['unit_master_id'], "Unknown name"), 274 | " (In Storage)" if monster['building_id'] == storage_id else ""), 275 | "level": monster['unit_level'], 276 | "unit_id": monster['unit_id'], 277 | "master_id": monster['unit_master_id'], 278 | "stars": monster['class'], 279 | "attribute": monster_attribute(monster['attribute']), 280 | "b_hp": int(monster['con']) * 15, 281 | "b_atk": monster['atk'], 282 | "b_def": monster['def'], 283 | "b_spd": monster['spd'], 284 | "b_crate": monster['critical_rate'], 285 | "b_cdmg": monster['critical_damage'], 286 | "b_res": monster['resist'], 287 | "b_acc": monster['accuracy']} 288 | else: 289 | optimizer_monster = None 290 | 291 | return optimizer_monster, csv_map 292 | -------------------------------------------------------------------------------- /SWProxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from SWParser.smon_decryptor import decrypt_request, decrypt_response 4 | import json 5 | import logging 6 | import os 7 | import proxy 8 | from SWPlugin import * 9 | import socket 10 | import sys 11 | import argparse 12 | import struct 13 | import dpkt 14 | import gspread 15 | import threading 16 | from oauth2client.service_account import ServiceAccountCredentials 17 | 18 | VERSION = "0.101-Plugins" 19 | GITHUB = 'https://github.com/kakaroto/SWProxy' 20 | logging.basicConfig() 21 | logger = logging.getLogger("SWProxy") 22 | 23 | class HTTP(proxy.TCP): 24 | """ 25 | HTTP proxy server implementation. 26 | Spawns new process to proxy accepted client connection. 27 | """ 28 | 29 | def handle(self, client): 30 | callback = SWProxyCallback() 31 | proc = proxy.Proxy(client, callback) 32 | proc.daemon = True 33 | proc.start() 34 | logger.debug('Started process {} to handle connection {}'.format(proc, client.conn)) 35 | 36 | 37 | class SWProxyCallback(object): 38 | def __init__(self): 39 | self.request = None 40 | 41 | def onRequest(self, proxy, host, port, request): 42 | try: 43 | if request.url.path.startswith('/api/gateway'): 44 | self.request = request # if we care about this api call, store request for decryption later 45 | except AttributeError: 46 | pass 47 | 48 | def onResponse(self, proxy, response): 49 | 50 | if self.request is None: 51 | # we have not obtained a valid request yet 52 | return 53 | 54 | try: 55 | req_plain, req_json = self._parse_request(self.request) 56 | resp_plain, resp_json = self._parse_response(response) 57 | 58 | if 'command' not in resp_json: 59 | # we only want apis that are commands 60 | self.request = None 61 | return 62 | 63 | try: 64 | SWPlugin.call_plugins('process_request', (req_json, resp_json)) 65 | except Exception as e: 66 | logger.exception('Exception while executing plugin : {}'.format(e)) 67 | 68 | except Exception as e: 69 | logger.debug('unknown exception: {}'.format(e)) 70 | 71 | def onDone(self, proxy): 72 | pass 73 | 74 | def _parse_request(self, request): 75 | """ takes a request, returns the decrypted plain and json """ 76 | plain = decrypt_request(request.body, 2 if '_c2.php' in self.request.url.path else 1) 77 | return plain, json.loads(plain) 78 | 79 | def _parse_response(self, response): 80 | """ takes a response body, returns the decrypted plain and json """ 81 | plain = decrypt_response(response.body, 2 if '_c2.php' in self.request.url.path else 1) 82 | return plain, json.loads(plain) 83 | 84 | 85 | def is_ip_port_valid(ip, port): 86 | try: 87 | socket.getaddrinfo(str(ip), port) 88 | _socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 89 | _socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 90 | _socket.bind((str(ip), port)) 91 | except: 92 | raise 93 | finally: 94 | _socket.close() 95 | 96 | def get_external_ip(): 97 | my_ip = [[(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]][0] 98 | return my_ip 99 | 100 | 101 | def read_file_lines(fpath): 102 | try: 103 | fpath = resource_path(fpath) 104 | with open(fpath, 'r') as fh: 105 | return map(lambda x:x.strip(), fh.readlines()) 106 | except Exception: 107 | logger.debug('Failed to read file at {}'.format(fpath)) 108 | return '' 109 | 110 | 111 | def get_usage_text(): 112 | authors_text = read_file_lines('AUTHORS') 113 | 114 | lines = [] 115 | lines.append("#"*40) 116 | lines.append("# SWParser v{} - Summoners War Proxy # ".format(VERSION)) 117 | lines.append("#"*40) 118 | lines.append("\tWritten by:\n\t\tKaKaRoTo\n") 119 | lines.append("\tAuthors:") 120 | for author in authors_text: 121 | lines.append("\t\t{}".format(author)) 122 | 123 | lines.append("\n\tPlugins:") 124 | for plugin in SWPlugin.plugins: 125 | lines.append("\t\t{}".format(plugin.name)) 126 | 127 | lines.append("\nLicensed under LGPLv3 and available at: \n\t{}\n".format(GITHUB)) 128 | return "\n".join(lines) 129 | 130 | 131 | def resource_path(relative_path): 132 | # function to locate data files for pyinstaller single file executable 133 | # ref: http://stackoverflow.com/a/32048136 134 | if hasattr(sys, '_MEIPASS'): 135 | return os.path.join(sys._MEIPASS, relative_path) 136 | 137 | return os.path.join(os.path.abspath("."), relative_path) 138 | 139 | 140 | def start_proxy_server(options): 141 | if options.interface: 142 | my_ip = options.interface 143 | else: 144 | my_ip = get_external_ip() 145 | 146 | try: 147 | is_ip_port_valid(my_ip, options.port) 148 | print "Running Proxy server at {} on port {}".format(my_ip, options.port) 149 | p = HTTP(my_ip, options.port) 150 | p.run() 151 | except KeyboardInterrupt: 152 | pass 153 | except socket.error: 154 | logger.error("IP Address and/or Port invalid - can't start Proxy") 155 | 156 | 157 | def parse_pcap(filename): 158 | streams = dict() # Connections with current buffer 159 | with open(filename, "rb") as f: 160 | pcap = dpkt.pcap.Reader(f) 161 | for ts, buf in pcap: 162 | eth = dpkt.ethernet.Ethernet(buf) 163 | if eth.type != dpkt.ethernet.ETH_TYPE_IP: 164 | continue 165 | ip = eth.data 166 | if not isinstance(ip, dpkt.ip.IP): 167 | try: 168 | ip = dpkt.ip.IP(ip) 169 | except: 170 | continue 171 | if ip.p != dpkt.ip.IP_PROTO_TCP: 172 | continue 173 | tcp = ip.data 174 | 175 | if not isinstance(tcp, dpkt.tcp.TCP): 176 | try: 177 | tcp = dpkt.tcp.TCP(tcp) 178 | except: 179 | continue 180 | 181 | tupl = (ip.src, ip.dst, tcp.sport, tcp.dport) 182 | if tupl in streams: 183 | streams[tupl] = streams[tupl] + tcp.data 184 | else: 185 | streams[tupl] = tcp.data 186 | 187 | if (tcp.flags & dpkt.tcp.TH_FIN) != 0 and \ 188 | (tcp.dport == 80 or tcp.sport == 80) and \ 189 | len(streams[tupl]) > 0: 190 | other_tupl = (ip.dst, ip.src, tcp.dport, tcp.sport) 191 | stream1 = streams[tupl] 192 | del streams[tupl] 193 | try: 194 | stream2 = streams[other_tupl] 195 | del streams[other_tupl] 196 | except: 197 | stream2 = "" 198 | if tcp.dport == 80: 199 | requests = stream1 200 | responses = stream2 201 | else: 202 | requests = stream2 203 | responses = stream1 204 | 205 | while len(requests): 206 | try: 207 | request = dpkt.http.Request(requests) 208 | #print request.method, request.uri 209 | except: 210 | request = '' 211 | requests = '' 212 | try: 213 | response = dpkt.http.Response(responses) 214 | #print response.status 215 | except: 216 | response = '' 217 | responses = '' 218 | requests = requests[len(request):] 219 | responses = requests[len(responses):] 220 | 221 | if len(request) > 0 and len(response) > 0 and \ 222 | request.method == 'POST' and \ 223 | request.uri == '/api/gateway.php' and \ 224 | response.status == '200': 225 | try: 226 | req_plain = decrypt_request(request.body) 227 | resp_plain = decrypt_response(response.body) 228 | req_json = json.loads(req_plain) 229 | resp_json = json.loads(resp_plain) 230 | 231 | if 'command' not in resp_json: 232 | return 233 | 234 | try: 235 | SWPlugin.call_plugins('process_request', (req_json, resp_json)) 236 | except Exception as e: 237 | logger.exception('Exception while executing plugin : {}'.format(e)) 238 | except: 239 | import traceback 240 | e = sys.exc_info()[0] 241 | traceback.print_exc() 242 | 243 | elif (tcp.flags & dpkt.tcp.TH_FIN) != 0: 244 | del streams[tupl] 245 | 246 | 247 | if __name__ == "__main__": 248 | parser = argparse.ArgumentParser(description='SWParser') 249 | parser.add_argument('-d', '--debug', action="store_true", default=False) 250 | parser.add_argument('-g', '--no-gui', action="store_true", default=False) 251 | parser.add_argument('-p', '--port', type=int, default=8080) 252 | parser.add_argument('-i', '--interface', type=str) 253 | options = parser.parse_args() 254 | 255 | # Set up logger 256 | level = "DEBUG" if options.debug else "INFO" 257 | logging.basicConfig(level=level, filename="proxy.log", format='%(asctime)s: %(name)s - %(levelname)s - %(message)s') 258 | logger.setLevel(logging.INFO) 259 | 260 | print get_usage_text() 261 | 262 | # attempt to load gui; fallback if import error 263 | if not options.no_gui: 264 | try: 265 | # Import here to avoid importing QT in CLI mode 266 | from SWParser.gui import gui 267 | from PyQt4.QtGui import QApplication, QIcon 268 | from PyQt4.QtCore import QSize 269 | except ImportError: 270 | print "Failed to load GUI dependencies. Switching to CLI mode" 271 | options.no_gui = True 272 | 273 | if options.no_gui: 274 | logger.addHandler(logging.StreamHandler()) 275 | start_proxy_server(options) 276 | else: 277 | app = QApplication(sys.argv) 278 | # set the icon 279 | icons_path = os.path.join(os.getcwd(), resource_path("icons/")) 280 | app_icon = QIcon() 281 | app_icon.addFile(icons_path +'16x16.png', QSize(16,16)) 282 | app_icon.addFile(icons_path + '24x24.png', QSize(24,24)) 283 | app_icon.addFile(icons_path + '32x32.png', QSize(32,32)) 284 | app_icon.addFile(icons_path + '48x48.png', QSize(48,48)) 285 | app_icon.addFile(icons_path + '256x256.png', QSize(256,256)) 286 | app.setWindowIcon(app_icon) 287 | if options.interface: 288 | win = gui.MainWindow(options.interface, options.port) 289 | else: 290 | win = gui.MainWindow(get_external_ip(), options.port) 291 | logger.addHandler(gui.GuiLogHandler(win)) 292 | win.show() 293 | sys.exit(app.exec_()) 294 | -------------------------------------------------------------------------------- /plugins/RunLogger.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | from SWParser import * 5 | from SWPlugin import SWPlugin 6 | import threading 7 | import logging 8 | logger = logging.getLogger() 9 | 10 | scenario_map = { 11 | 1: 'Garen Forest', 12 | 2: 'Mt. Siz', 13 | 3: 'Kabir Ruins', 14 | 4: 'Mt. White Ragon', 15 | 5: 'Telain Forest', 16 | 6: 'Hydeni Ruins', 17 | 7: 'Tamor Desert', 18 | 8: 'Vrofagus Ruins', 19 | 9: 'Faimon Volcano', 20 | 10: 'Aiden Forest', 21 | 11: 'Ferun Castle', 22 | 12: 'Mt Runar', 23 | 13: 'Chiruka Remains' 24 | } 25 | 26 | rune_class_map = { 27 | 0: 'Common', 28 | 1: 'Magic', 29 | 2: 'Rare', 30 | 3: 'Hero', 31 | 4: 'Legendary' 32 | } 33 | 34 | dungeon_map = { 35 | 1001: "Hall of Dark", 36 | 2001: "Hall of Fire", 37 | 3001: "Hall of Water", 38 | 4001: "Hall of Wind", 39 | 5001: "Hall of Magic", 40 | 6001: "Necropolis", 41 | 7001: "Hall of Light", 42 | 8001: "Giant's Keep", 43 | 9001: "Dragon's Lair" 44 | } 45 | 46 | difficulty_map = { 47 | 1: 'Normal', 48 | 2: 'Hard', 49 | 3: 'Hell' 50 | } 51 | 52 | grade_multiplier_map = { 53 | 1: 0.286, 54 | 2: 0.31, 55 | 3: 0.47, 56 | 4: 0.68, 57 | 5: 0.8, 58 | 6: 1 59 | } 60 | 61 | sub_max_value_map = { 62 | 'HP%': 40.0, 63 | 'ATK%': 40.0, 64 | 'DEF%': 40.0, 65 | 'ACC': 40.0, 66 | 'RES': 40.0, 67 | 'CDmg': 35.0, 68 | 'CRate': 30.0, 69 | 'SPD': 30.0, 70 | 'ATK flat': 14 * 8.0, 71 | 'HP flat': 344 * 8.0, 72 | 'DEF flat': 14 * 8.0, 73 | } 74 | 75 | essence_attribute = { 76 | 1: 'Water', 77 | 2: 'Fire', 78 | 3: 'Wind', 79 | 4: 'Light', 80 | 5: 'Dark', 81 | 6: 'Magic' 82 | } 83 | 84 | essence_grade = { 85 | 1: 'Low', 86 | 2: 'Mid', 87 | 3: 'High' 88 | } 89 | 90 | 91 | def get_sub_score(sub): 92 | if sub[0] == 0: 93 | return 0 94 | 95 | rune_type = rune_effect_type(sub[0]) 96 | max = sub_max_value_map[rune_type] if rune_type in sub_max_value_map else 0 97 | return sub[1] / max 98 | 99 | 100 | def rune_efficiency(rune): 101 | slot = rune['slot_no'] 102 | 103 | grade = rune['class'] 104 | 105 | main_bonus = 1.5 if slot % 2 == 0 else 0.8 106 | 107 | base_score = main_bonus * grade_multiplier_map[grade] 108 | 109 | for se in [rune['prefix_eff']] + rune['sec_eff']: 110 | base_score += get_sub_score(se) 111 | 112 | score = base_score + 0.8 113 | max_score = main_bonus + 1.8 114 | 115 | final_score = score / max_score 116 | 117 | return final_score 118 | 119 | 120 | def get_map_value(key, value_map, default='unknown'): 121 | if key not in value_map: 122 | if key > 10000: 123 | return "Hall of Heroes" 124 | return default 125 | return value_map[key] 126 | 127 | 128 | class RunLogger(SWPlugin): 129 | def __init__(self): 130 | with open('swproxy.config') as f: 131 | self.config = json.load(f) 132 | 133 | @staticmethod 134 | def get_item_name(crate): 135 | if 'random_scroll' in crate and crate['random_scroll']['item_master_id'] == 1: 136 | return "Unknown Scroll x%s" % crate['random_scroll']['item_quantity'] 137 | if 'random_scroll' in crate and crate['random_scroll']['item_master_id'] == 8: 138 | return "Summoning Stones x%s" % crate['random_scroll']['item_quantity'] 139 | if 'random_scroll' in crate and crate['random_scroll']['item_master_id'] == 2: 140 | return "Mystical Scroll" 141 | if 'costume_point' in crate: 142 | return "Shapeshift Stone x%s" % crate['costume_point'] 143 | if 'rune_upgrade_stone' in crate: 144 | return "Power Stone x%s" % crate['rune_upgrade_stone']['item_quantity'] 145 | if 'unit_info' in crate: 146 | return '%s %s*' % (monster_name(crate['unit_info']['unit_master_id']), crate['unit_info']['class']) 147 | if 'material' in crate: 148 | id = str(crate['material']['item_master_id']) 149 | attribute = essence_attribute[int(id[-1])] 150 | grade = essence_grade[int(id[-4])] 151 | return "Essence of %s(%s) x%s" % (attribute,grade,crate['material']['item_quantity']) 152 | if 'summon_pieces' in crate: 153 | return "Summoning Pieces %s x%s" % (monster_name(crate['summon_pieces']['item_master_id']),crate['summon_pieces']['item_quantity']) 154 | return 'Unknown drop %s' % json.dumps(crate) 155 | 156 | def process_request(self, req_json, resp_json): 157 | config = self.config 158 | if 'log_runs' not in config or not config['log_runs']: 159 | return 160 | 161 | command = req_json['command'] 162 | if command == 'BattleScenarioStart': 163 | stage = '%s %s - %s' % (get_map_value(req_json['region_id'], scenario_map), 164 | get_map_value(req_json['difficulty'], difficulty_map), 165 | req_json['stage_no']) 166 | if 'run-logger-data' not in config: 167 | config['run-logger-data'] = {} 168 | 169 | plugin_data = config['run-logger-data'] 170 | wizard_id = str(req_json['wizard_id']) 171 | plugin_data[wizard_id] = {'stage' : stage} 172 | 173 | if command == 'BattleScenarioResult' or command == 'BattleDungeonResult': 174 | return self.log_end_battle(req_json,resp_json, config) 175 | 176 | def build_unit_dictionary(self, wizard_id): 177 | if os.path.exists('%s-optimizer.json' % wizard_id): 178 | with open('%s-optimizer.json' % wizard_id) as f: 179 | user_data = json.load(f) 180 | mon_dict = {} 181 | for mon in user_data["mons"]: 182 | mon_dict[mon['unit_id']] = mon['name'] 183 | return mon_dict 184 | 185 | def log_end_battle(self, req_json, resp_json, config): 186 | if not config["log_runs"]: 187 | return 188 | 189 | command = req_json['command'] 190 | 191 | if command == 'BattleDungeonResult': 192 | stage = '%s B%s' % (get_map_value(req_json['dungeon_id'], dungeon_map, req_json['dungeon_id']), 193 | req_json['stage_id']) 194 | 195 | if command == 'BattleScenarioResult': 196 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 197 | if 'run-logger-data' in config and wizard_id in config['run-logger-data'] \ 198 | and 'stage' in config['run-logger-data'][wizard_id]: 199 | stage = config['run-logger-data'][wizard_id]['stage'] 200 | else: 201 | stage = 'unknown' 202 | 203 | wizard_id = str(resp_json['wizard_info']['wizard_id']) 204 | if not os.path.exists('%s-optimizer.json' % wizard_id): 205 | logger.warn("Optimizer file is needed for RunLogger plugin") 206 | return 207 | user_mons = self.build_unit_dictionary(wizard_id) 208 | win_lost = 'Win' if resp_json["win_lose"] == 1 else 'Lost' 209 | 210 | # Are we recording losses? 211 | if 'log_wipes' in config and not config["log_wipes"] and win_lost == 'Lost': 212 | return 213 | 214 | reward = resp_json['reward'] if 'reward' in resp_json else {} 215 | mana = reward['mana'] if 'mana' in reward else 0 216 | crystal = reward['crystal'] if 'crystal' in reward else 0 217 | energy = reward['energy'] if 'energy' in reward else 0 218 | timer = req_json['clear_time'] 219 | 220 | m = divmod(timer / 1000, 60) 221 | elapsed_time = '%s:%02d' % (m[0], m[1]) 222 | 223 | filename = "%s-runs.csv" % wizard_id 224 | is_new_file = not os.path.exists(filename) 225 | 226 | with open(filename, "ab") as log_file: 227 | field_names = ['date', 'dungeon', 'result', 'time', 'mana', 'crystal', 'energy', 'drop', 'grade', 'value', 228 | 'set', 'eff', 'slot', 'rarity', 'main_stat', 'prefix_stat','sub1','sub2','sub3','sub4','team1','team2','team3','team4','team5'] 229 | 230 | header = {'date': 'Date','dungeon': 'Dungeon', 'result': 'Result', 'time':'Clear time', 'mana':'Mana', 231 | 'crystal': 'Crystal', 'energy': 'Energy', 'drop': 'Drop', 'grade': 'Rune Grade','value': 'Sell value', 232 | 'set': 'Rune Set', 'eff': 'Max Efficiency', 'slot': 'Slot', 'rarity': 'Rune Rarity', 233 | 'main_stat': 'Main stat', 'prefix_stat': 'Prefix stat', 'sub1': 'Secondary stat 1', 234 | 'sub2': 'Secondary stat 2,', 'sub3': 'Secondary stat 3', 'sub4': 'Secondary stat 4', 235 | 'team1': 'Team1', 'team2': 'Team2', 'team3': 'Team3', 'team4': 'Team4', 'team5': 'Team5'} 236 | 237 | SWPlugin.call_plugins('process_csv_row', ('run_logger', 'header', (field_names, header))) 238 | 239 | log_writer = DictUnicodeWriter(log_file, fieldnames=field_names) 240 | if is_new_file: 241 | log_writer.writerow(header) 242 | 243 | if 'crate' in reward: 244 | if 'mana' in reward['crate']: 245 | reward['mana'] += reward['crate']['mana'] 246 | if 'energy' in reward['crate']: 247 | reward['energy'] += reward['crate']['energy'] 248 | if 'crystal' in reward['crate']: 249 | reward['crystal'] += reward['crate']['crystal'] 250 | 251 | log_entry = {'date': time.strftime("%Y-%m-%d %H:%M"), 'dungeon': stage, 'result': win_lost, 252 | 'time': elapsed_time, 'mana': mana, 'crystal': crystal, 'energy': energy} 253 | 254 | if 'crate' in reward: 255 | if 'rune' in reward['crate']: 256 | rune = reward['crate']['rune'] 257 | eff = rune_efficiency(rune) * 100 258 | rune_set = rune_set_id(rune['set_id']) 259 | slot = rune['slot_no'] 260 | grade = rune['class'] 261 | rank = get_map_value(len(rune['sec_eff']), rune_class_map) 262 | 263 | log_entry['drop'] = 'Rune' 264 | log_entry['grade'] = '%s*' % grade 265 | log_entry['value'] = rune['sell_value'] 266 | log_entry['set'] = rune_set 267 | log_entry['eff'] = '%0.2f%%' % eff 268 | log_entry['slot'] = slot 269 | log_entry['rarity'] = rank 270 | log_entry['main_stat'] = rune_effect(rune['pri_eff']) 271 | log_entry['prefix_stat'] = rune_effect(rune['prefix_eff']) 272 | 273 | i = 1 274 | for se in rune['sec_eff']: 275 | log_entry['sub%s' %i] = rune_effect(se) 276 | i += 1 277 | else: 278 | other_item = self.get_item_name(reward['crate']) 279 | log_entry['drop'] = other_item 280 | 281 | if 'unit_list' in resp_json and len(resp_json['unit_list']) > 0: 282 | for i in range(1, len(resp_json['unit_list']) + 1): 283 | log_entry['team%s' % i] = monster_name(resp_json['unit_list'][i-1]['unit_master_id']) 284 | else: 285 | for i in range(1, len(req_json['unit_id_list']) + 1): 286 | id = req_json['unit_id_list'][i-1]['unit_id'] 287 | log_entry['team%s' % i] = user_mons[id] 288 | 289 | if 'instance_info' in resp_json: 290 | log_entry['drop'] = 'Secret Dungeon' 291 | 292 | SWPlugin.call_plugins('process_csv_row', ('run_logger', 'entry', (field_names, log_entry))) 293 | log_writer.writerow(log_entry) 294 | return 295 | -------------------------------------------------------------------------------- /plugins/GWLogger.py: -------------------------------------------------------------------------------- 1 | # template : https://docs.google.com/spreadsheets/d/1KPt_KE_Z_RcJh6Wz-VCuU14HEQusKxBeCUzO99SpE2c/edit?usp=sharing 2 | import datetime 3 | import gspread 4 | from oauth2client.service_account import ServiceAccountCredentials 5 | from threading import Thread 6 | import json 7 | import os 8 | import time 9 | from SWParser import * 10 | import SWPlugin 11 | from itertools import groupby 12 | from string import ascii_uppercase 13 | 14 | result_map = { 15 | 1: 'win', 16 | 2: 'lost', 17 | 3: 'draw' 18 | } 19 | 20 | summary_battle_columns = { 21 | 1 : ('B', 'G'), 22 | 2 : ('H', 'M'), 23 | 3 : ('N', 'S'), 24 | 4 : ('T', 'Y'), 25 | 5 : ('Z', 'AE'), 26 | 6 : ('AF', 'AK'), 27 | 7 : ('AL', 'AQ'), 28 | 8 : ('AR', 'AW'), 29 | 9 : ('AX', 'BC'), 30 | 10 : ('BD', 'BI'), 31 | 11 : ('BJ', 'BO'), 32 | 12 : ('BP', 'BU'), 33 | } 34 | 35 | column_names = [] 36 | for p in range(5): 37 | for c in ascii_uppercase: 38 | prefix = ascii_uppercase[p - 1] if p > 0 else '' 39 | column_names.append(prefix + c) 40 | 41 | attack_tab = 'Attack' 42 | attack_summary = 'Attack Summary' 43 | log_tab = 'Log' 44 | defense_tab = 'Defense Summary' 45 | sheet_name = 'Guildwar %s' 46 | 47 | def get_match_id(data): 48 | return data['match_id'] 49 | 50 | class GWLogger(SWPlugin.SWPlugin): 51 | def __init__(self): 52 | if not os.path.exists('swproxy.config'): 53 | self.config = {} 54 | return 55 | 56 | with open('swproxy.config') as f: 57 | self.config = json.load(f) 58 | 59 | def group_battles(self, cache): 60 | list = sorted(cache.values(), key=get_match_id) 61 | grouped = groupby(list, lambda x: x['match_id']) 62 | groups = [] 63 | for key, group in grouped: 64 | matches = [] 65 | battle = {} 66 | first = True 67 | for item in group: 68 | if first: 69 | first = False 70 | battle['guild'] = item['op_guild'] 71 | battle['type'] = item['type'] 72 | battle['match_id'] = item['match_id'] 73 | matches.append(item) 74 | battle['matches'] = matches 75 | groups.append(battle) 76 | return groups 77 | 78 | def get_worksheet(self, key, sheet): 79 | date = datetime.date.today() 80 | days_until_saturday = 5 - date.weekday() 81 | next_saturday = date + datetime.timedelta(days_until_saturday) 82 | 83 | scope = ['https://spreadsheets.google.com/feeds'] 84 | credentials = ServiceAccountCredentials.from_json_keyfile_name(key, scope) 85 | gc = gspread.authorize(credentials) 86 | sheet_name = 'Guildwar %s' % next_saturday.strftime('%m-%d-%Y') 87 | return gc.open(sheet_name) 88 | 89 | def gp_values(self, atk1, atk2, gp): 90 | # 2 wins 91 | if atk1 == 'win' and atk2 == 'win': 92 | return gp / 2, gp/2 93 | 94 | # 2 loses 95 | if gp == 0: 96 | return 0, 0 97 | 98 | # draw 99 | if atk1 == 'draw': 100 | return 1, gp - 1 101 | if atk2 == 'draw': 102 | return gp-1, 1 103 | 104 | if atk1 == 'win': 105 | return gp, 0 106 | else: 107 | return 0, gp 108 | 109 | def write_attack_tab(self, sheet, data, members_list, opponent_list, battle_index): 110 | wks = sheet.worksheet(attack_tab) 111 | line = (battle_index * 33) + 1 112 | cells = wks.range('A%s:AY%s' % (line, line + 32)) 113 | 114 | for i, cell in enumerate(cells): 115 | if i > 101: 116 | cell.value = '' 117 | 118 | cells[1].value = data['guild'] 119 | for i, name in enumerate(members_list): 120 | cells[(i * 51) + 102].value = name 121 | 122 | for match in data['matches']: 123 | index_member = members_list.index(match['member_name']) 124 | index_opponent = opponent_list.index(match['op_name']) 125 | cell = (index_member *51) + 103 + index_opponent * 2 126 | round1, round2 = self.gp_values(match['result_1'], match['result_2'], match['gp']) 127 | cells[cell].value = round1 128 | cells[cell + 1].value = round2 129 | 130 | wks.update_cells(cells) 131 | 132 | def write_attack_summary_members(self, sheet, members_list): 133 | wks = sheet.worksheet(attack_summary) 134 | 135 | cells = wks.range('A4:A33') 136 | for i, name in enumerate(members_list): 137 | cells[i].value = name 138 | wks.update_cells(cells) 139 | 140 | def write_attack_summary(self, sheet, data, members_list, opponent_list, battle_index): 141 | wks = sheet.worksheet(attack_summary) 142 | start_col, end_col = summary_battle_columns[battle_index +1] 143 | 144 | cells = wks.range('%s2:%s33' % (start_col, end_col)) 145 | 146 | # clean everything 147 | for i, cell in enumerate(cells): 148 | if i > 12 and i % 32 > 1: 149 | cell.value = '' 150 | 151 | # guild name 152 | cells[0].value = data['guild'] 153 | 154 | sword_counter = {} 155 | for match in data['matches']: 156 | member = match['member_name'] 157 | 158 | if member not in sword_counter: 159 | sword_counter[member] = 0 160 | 161 | swords = sword_counter[member] 162 | sword_counter[member] += 1 163 | 164 | index_member = members_list.index(member) 165 | cell = (6*index_member) + (swords * 2) + 12 166 | round1, round2 = self.gp_values(match['result_1'], match['result_2'], match['gp']) 167 | cells[cell].value = round1 168 | cells[cell + 1].value = round2 169 | 170 | wks.update_cells(cells) 171 | 172 | def get_opponent_list(self, battle_data): 173 | members = {} 174 | for entry in battle_data: 175 | member = entry['op_name'] 176 | if member not in members: 177 | members[member] = 0 178 | gp = entry['gp'] 179 | if gp > members[member]: 180 | members[member] = gp 181 | member_list = [] 182 | for member in members: 183 | member_list.append({'id': member, 'gp': members[member]}) 184 | s = sorted(member_list, key=lambda item: item['gp'], reverse=True) 185 | members = [] 186 | for item in s: 187 | members.append(item['id']) 188 | return members 189 | 190 | def create_members_list(self, cache): 191 | members = {} 192 | for entry in cache: 193 | member = cache[entry]['member_name'] 194 | if member not in members: 195 | members[member] = 0 196 | members[member] += cache[entry]['gp'] 197 | member_list = [] 198 | for member in members: 199 | member_list.append({'id': member, 'gp': members[member]}) 200 | s = sorted(member_list, key=lambda item: item['gp'], reverse=True) 201 | members = [] 202 | for item in s: 203 | members.append(item['id']) 204 | return members 205 | 206 | def process_request(self, req_json, resp_json): 207 | config = self.config 208 | if 'log_guildwar' not in config or not config['log_guildwar'] or 'enable_google_sheet_writer' not in config\ 209 | or not config['enable_google_sheet_writer']: 210 | return 211 | 212 | command = req_json['command'] 213 | if command == 'GetGuildWarBattleLogByGuildId': 214 | thread = Thread(target = self.log_guildwar, args = (req_json, resp_json, config)) 215 | thread.start() 216 | 217 | def read_log(self, filename): 218 | with open(filename, 'rb') as f: 219 | return json.load(f) 220 | 221 | def process_input_json(self, resp_json): 222 | # read previous data 223 | week = datetime.date.today().strftime("%U") 224 | cache_name = "guildwar-%s.json" % week 225 | is_new_file = not os.path.exists(cache_name) 226 | cache = {} if is_new_file else self.read_log(cache_name) 227 | 228 | log_type = 'attack' if resp_json['log_type'] == 1 else 'defense' 229 | for guild in resp_json['battle_log_list_group']: 230 | for battle in guild['battle_log_list']: 231 | id = str(battle['rid']) 232 | 233 | if id in cache: 234 | continue 235 | 236 | log_entry = {'id': id, 'type': log_type, 'member_name': battle['wizard_name'], 237 | 'op_name': battle['opp_wizard_name'], 'op_guild': battle['opp_guild_name'], 238 | 'match_id' : battle['match_id'], 239 | 'result_1': result_map[battle['result'][0]], 240 | 'result_2': result_map[battle['result'][1]], 241 | 'gp': battle['guild_point_var'], 242 | 'end': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(battle['battle_end']))} 243 | 244 | cache[id] = log_entry 245 | return cache 246 | 247 | def save_cache(self, cache): 248 | date = datetime.date.today() 249 | days_until_saturday = 5 - date.weekday() 250 | next_saturday = date + datetime.timedelta(days_until_saturday) 251 | 252 | 253 | week = next_saturday.strftime("%U") 254 | cache_name = "guildwar-%s.json" % week 255 | with open(cache_name, 'wb') as f: 256 | json.dump(cache, f) 257 | 258 | def get_sheet_name(self, battle_type): 259 | week = datetime.date.today().strftime("%U") 260 | type = 'Attack' if battle_type == 'attack' else 'Defense' 261 | return 'GW - %s (%s)' % (type, week) 262 | 263 | def write_log(self, battle_list, sheet): 264 | count = 2 265 | wks = sheet.worksheet(log_tab) 266 | for battle in battle_list: 267 | c = len(battle['matches']) 268 | cells = wks.range('A%s:I%s' % (count, count+c)) 269 | pos = 0 270 | for match in battle['matches']: 271 | cells[pos].value = match['id'] 272 | cells[pos+1].value = match['type'] 273 | cells[pos+2].value = match['member_name'] 274 | cells[pos+3].value = match['op_name'] 275 | cells[pos+4].value = match['op_guild'] 276 | cells[pos+5].value = match['result_1'] 277 | cells[pos+6].value = match['result_2'] 278 | cells[pos+7].value = match['gp'] 279 | cells[pos+8].value = match['end'] 280 | pos += 9 281 | wks.update_cells(cells) 282 | count += c 283 | 284 | def write_defense_summary(self, battle_list, member_list, sheet): 285 | wks = sheet.worksheet(defense_tab) 286 | 287 | counters = {} 288 | guilds = [] 289 | guild_info = [] 290 | 291 | # write members names 292 | cells = wks.range('B1:CK1') 293 | for i, member in enumerate(member_list): 294 | cells[i*3].value = member 295 | counters[member] = 0 296 | wks.update_cells(cells) 297 | 298 | # write defense data 299 | cells = wks.range('B5:CM59') 300 | 301 | for battle in battle_list: 302 | if battle['type'] == 'attack': 303 | continue 304 | 305 | guilds.append(battle['guild']) 306 | guild_info.append({"name": battle['guild'], "date": battle['matches'][0]['end']}) 307 | 308 | for match in battle['matches']: 309 | member = match['member_name'] 310 | count = counters[member] 311 | cell = (90 * count) + (member_list.index(member) * 3) 312 | if cell >= len(cells): 313 | print member 314 | cells[cell].value = '#%s' % str(len(guilds)) 315 | cells[cell + 1].value = match['result_1'].upper()[0] 316 | cells[cell + 2].value = match['result_2'].upper()[0] 317 | counters[member] += 1 318 | wks.update_cells(cells) 319 | 320 | def log_guildwar(self, req_json, resp_json, config): 321 | cache = self.process_input_json(resp_json) 322 | self.save_cache(cache) 323 | 324 | member_list = self.create_members_list(cache) 325 | battle_list = self.group_battles(cache) 326 | 327 | sheet = self.get_worksheet(self.config['google_key'], self.config['sheet_name']) 328 | self.write_attack_summary_members(sheet, member_list) 329 | self.write_log(battle_list, sheet) 330 | self.write_defense_summary(battle_list, member_list, sheet) 331 | 332 | index = {'attack': 0, 'defense': 0} 333 | for battle in battle_list: 334 | op_members = self.get_opponent_list(battle['matches']) 335 | type = battle['type'] 336 | battle_index = index[type] 337 | index[type] += 1 338 | if type == 'attack': 339 | self.write_attack_tab(sheet, battle, member_list, op_members, battle_index) 340 | print 'Log Attack tab' 341 | self.write_attack_summary( sheet, battle, member_list, op_members, battle_index) 342 | print 'Log Attack Summary' 343 | -------------------------------------------------------------------------------- /SWParser/monsters.py: -------------------------------------------------------------------------------- 1 | monsters_name_map = { 2 | "101": "Fairy", 3 | "10111": "Elucia", 4 | "10112": "Iselia", 5 | "10113": "Aeilene", 6 | "10114": "Neal", 7 | "10115": "Sorin", 8 | 9 | "102": "Imp", 10 | "10211": "Fynn", 11 | "10212": "Cogma", 12 | "10213": "Ralph", 13 | "10214": "Taru", 14 | "10215": "Garok", 15 | 16 | "103": "Pixie", 17 | "10311": "Kacey", 18 | "10312": "Tatu", 19 | "10313": "Shannon", 20 | "10314": "Cheryl", 21 | "10315": "Camaryn", 22 | 23 | "104": "Yeti", 24 | "10411": "Kunda", 25 | "10412": "Tantra", 26 | "10413": "Rakaja", 27 | "10414": "Arkajan", 28 | "10415": "Kumae", 29 | 30 | "105": "Harpy", 31 | "10511": "Ramira", 32 | "10512": "Lucasha", 33 | "10513": "Prilea", 34 | "10514": "Kabilla", 35 | "10515": "Hellea", 36 | 37 | "106": "Hellhound", 38 | "10611": "Tarq", 39 | "10612": "Sieq", 40 | "10613": "Gamir", 41 | "10614": "Shamar", 42 | "10615": "Shumar", 43 | 44 | "107": "Warbear", 45 | "10711": "Dagora", 46 | "10712": "Ursha", 47 | "10713": "Ramagos", 48 | "10714": "Lusha", 49 | "10715": "Gorgo", 50 | 51 | "108": "Elemental", 52 | "10811": "Daharenos", 53 | "10812": "Bremis", 54 | "10813": "Taharus", 55 | "10814": "Priz", 56 | "10815": "Camules", 57 | 58 | "109": "Garuda", 59 | "10911": "Konamiya", 60 | "10912": "Cahule", 61 | "10913": "Lindermen", 62 | "10914": "Teon", 63 | "10915": "Rizak", 64 | 65 | "110": "Inugami", 66 | "11011": "Icaru", 67 | "11012": "Raoq", 68 | "11013": "Ramahan", 69 | "11014": "Belladeon", 70 | "11015": "Kro", 71 | 72 | "111": "Salamander", 73 | "11111": "Kaimann", 74 | "11112": "Krakdon", 75 | "11113": "Lukan", 76 | "11114": "Sharman", 77 | "11115": "Decamaron", 78 | 79 | "112": "Nine-tailed Fox", 80 | "11211": "Soha", 81 | "11212": "Shihwa", 82 | "11213": "Arang", 83 | "11214": "Chamie", 84 | "11215": "Kamiya", 85 | 86 | "113": "Serpent", 87 | "11311": "Shailoq", 88 | "11312": "Fao", 89 | "11313": "Ermeda", 90 | "11314": "Elpuria", 91 | "11315": "Mantura", 92 | 93 | "114": "Golem", 94 | "11411": "Kuhn", 95 | "11412": "Kugo", 96 | "11413": "Ragion", 97 | "11414": "Groggo", 98 | "11415": "Maggi", 99 | 100 | "115": "Griffon", 101 | "11511": "Kahn", 102 | "11512": "Spectra", 103 | "11513": "Bernard", 104 | "11514": "Shamann", 105 | "11515": "Varus", 106 | 107 | "116": "Undine", 108 | "11611": "Mikene", 109 | "11612": "Atenai", 110 | "11613": "Delphoi", 111 | "11614": "Icasha", 112 | "11615": "Tilasha", 113 | 114 | "117": "Inferno", 115 | "11711": "Purian", 116 | "11712": "Tagaros", 117 | "11713": "Anduril", 118 | "11714": "Eludain", 119 | "11715": "Drogan", 120 | 121 | "118": "Sylph", 122 | "11811": "Tyron", 123 | "11812": "Baretta", 124 | "11813": "Shimitae", 125 | "11814": "Eredas", 126 | "11815": "Aschubel", 127 | 128 | "119": "Sylphid", 129 | "11911": "Lumirecia", 130 | "11912": "Fria", 131 | "11913": "Acasis", 132 | "11914": "Mihael", 133 | "11915": "Icares", 134 | 135 | "120": "High Elemental", 136 | "12011": "Ellena", 137 | "12012": "Kahli", 138 | "12013": "Moria", 139 | "12014": "Shren", 140 | "12015": "Jumaline", 141 | 142 | "121": "Harpu", 143 | "12111": "Sisroo", 144 | "12112": "Colleen", 145 | "12113": "Seal", 146 | "12114": "Sia", 147 | "12115": "Seren", 148 | 149 | "122": "Slime", 150 | "12211": "", 151 | "12212": "", 152 | "12213": "", 153 | "12214": "", 154 | "12215": "", 155 | 156 | "123": "Forest Keeper", 157 | "12311": "", 158 | "12312": "", 159 | "12313": "", 160 | "12314": "", 161 | "12315": "", 162 | 163 | "124": "Mushroom", 164 | "12411": "", 165 | "12412": "", 166 | "12413": "", 167 | "12414": "", 168 | "12415": "", 169 | 170 | "125": "Maned Boar", 171 | "12511": "", 172 | "12512": "", 173 | "12513": "", 174 | "12514": "", 175 | "12515": "", 176 | 177 | "126": "Monster Flower", 178 | "12611": "", 179 | "12612": "", 180 | "12613": "", 181 | "12614": "", 182 | "12615": "", 183 | 184 | "127": "Ghost", 185 | "12711": "", 186 | "12712": "", 187 | "12713": "", 188 | "12714": "", 189 | "12715": "", 190 | 191 | "128": "Low Elemental", 192 | "12811": "Tigresse", 193 | "12812": "Lamor", 194 | "12813": "Samour", 195 | "12814": "Varis", 196 | "12815": "Havana", 197 | 198 | "129": "Mimick", 199 | "12911": "", 200 | "12912": "", 201 | "12913": "", 202 | "12914": "", 203 | "12915": "", 204 | 205 | "130": "Horned Frog", 206 | "13011": "", 207 | "13012": "", 208 | "13013": "", 209 | "13014": "", 210 | "13015": "", 211 | 212 | "131": "Sandman", 213 | "13111": "", 214 | "13112": "", 215 | "13113": "", 216 | "13114": "", 217 | "13115": "", 218 | 219 | "132": "Howl", 220 | "13211": "Lulu", 221 | "13212": "Lala", 222 | "13213": "Chichi", 223 | "13214": "Shushu", 224 | "13215": "Chacha", 225 | 226 | "133": "Succubus", 227 | "13311": "Izaria", 228 | "13312": "Akia", 229 | "13313": "Selena", 230 | "13314": "Aria", 231 | "13315": "Isael", 232 | 233 | "134": "Joker", 234 | "13411": "Sian", 235 | "13412": "Jojo", 236 | "13413": "Lushen", 237 | "13414": "Figaro", 238 | "13415": "Liebli", 239 | 240 | "135": "Ninja", 241 | "13511": "Susano", 242 | "13512": "Garo", 243 | "13513": "Orochi", 244 | "13514": "Gin", 245 | "13515": "Han", 246 | 247 | "136": "Surprise Box", 248 | "13611": "", 249 | "13612": "", 250 | "13613": "", 251 | "13614": "", 252 | "13615": "", 253 | 254 | "137": "Bearman", 255 | "13711": "Gruda", 256 | "13712": "Kungen", 257 | "13713": "Dagorr", 258 | "13714": "Ahman", 259 | "13715": "Haken", 260 | 261 | "138": "Valkyrja", 262 | "13811": "Camilla", 263 | "13812": "Vanessa", 264 | "13813": "Katarina", 265 | "13814": "Akroma", 266 | "13815": "Trinity", 267 | 268 | "139": "Pierret", 269 | "13911": "Julie", 270 | "13912": "Clara", 271 | "13913": "Sophia", 272 | "13914": "Eva", 273 | "13915": "Luna", 274 | 275 | "140": "Werewolf", 276 | "14011": "Vigor", 277 | "14012": "Garoche", 278 | "14013": "Shakan", 279 | "14014": "Eshir", 280 | "14015": "Jultan", 281 | 282 | "141": "Phantom Thief", 283 | "14111": "Luer", 284 | "14112": "Jean", 285 | "14113": "Julien", 286 | "14114": "Louis", 287 | "14115": "Guillaume", 288 | 289 | "142": "Angelmon", 290 | "14211": "Blue Angelmon", 291 | "14212": "Red Angelmon", 292 | "14213": "Gold Angelmon", 293 | "14214": "White Angelmon", 294 | "14215": "Dark Angelmon", 295 | 296 | "144": "Dragon", 297 | "14411": "Verad", 298 | "14412": "Zaiross", 299 | "14413": "Jamire", 300 | "14414": "Zerath", 301 | "14415": "Grogen", 302 | 303 | "145": "Phoenix", 304 | "14511": "Sigmarus", 305 | "14512": "Perna", 306 | "14513": "Teshar", 307 | "14514": "Eludia", 308 | "14515": "Jaara", 309 | 310 | "146": "Chimera", 311 | "14611": "Taor", 312 | "14612": "Rakan", 313 | "14613": "Lagmaron", 314 | "14614": "Shan", 315 | "14615": "Zeratu", 316 | 317 | "147": "Vampire", 318 | "14711": "Liesel", 319 | "14712": "Verdehile", 320 | "14713": "Argen", 321 | "14714": "Julianne", 322 | "14715": "Cadiz", 323 | 324 | "148": "Viking", 325 | "14811": "Huga", 326 | "14812": "Geoffrey", 327 | "14813": "Walter", 328 | "14814": "Jansson", 329 | "14815": "Janssen", 330 | 331 | "149": "Amazon", 332 | "14911": "Ellin", 333 | "14912": "Ceres", 334 | "14913": "Hina", 335 | "14914": "Lyn", 336 | "14915": "Mara", 337 | 338 | "150": "Martial Cat", 339 | "15011": "Mina", 340 | "15012": "Mei", 341 | "15013": "Naomi", 342 | "15014": "Xiao Ling", 343 | "15015": "Miho", 344 | 345 | "152": "Vagabond", 346 | "15211": "Allen", 347 | "15212": "Kai'en", 348 | "15213": "Roid", 349 | "15214": "Darion", 350 | "15215": "Jubelle", 351 | 352 | "153": "Epikion Priest", 353 | "15311": "Rina", 354 | "15312": "Chloe", 355 | "15313": "Michelle", 356 | "15314": "Iona", 357 | "15315": "Rasheed", 358 | 359 | "154": "Magical Archer", 360 | "15411": "Sharron", 361 | "15412": "Cassandra", 362 | "15413": "Ardella", 363 | "15414": "Chris", 364 | "15415": "Bethony", 365 | 366 | "155": "Rakshasa", 367 | "15511": "Su", 368 | "15512": "Hwa", 369 | "15513": "Yen", 370 | "15514": "Pang", 371 | "15515": "Ran", 372 | 373 | "156": "Bounty Hunter", 374 | "15611": "Wayne", 375 | "15612": "Randy", 376 | "15613": "Roger", 377 | "15614": "Walkers", 378 | "15615": "Jamie", 379 | 380 | "157": "Oracle", 381 | "15711": "Praha", 382 | "15712": "Juno", 383 | "15713": "Seara", 384 | "15714": "Laima", 385 | "15715": "Giana", 386 | 387 | "158": "Imp Champion", 388 | "15811": "Yaku", 389 | "15812": "Fairo", 390 | "15813": "Pigma", 391 | "15814": "Shaffron", 392 | "15815": "Loque", 393 | 394 | "159": "Mystic Witch", 395 | "15911": "Megan", 396 | "15912": "Rebecca", 397 | "15913": "Silia", 398 | "15914": "Linda", 399 | "15915": "Gina", 400 | 401 | "160": "Grim Reaper", 402 | "16011": "Hemos", 403 | "16012": "Sath", 404 | "16013": "Hiva", 405 | "16014": "Prom", 406 | "16015": "Thrain", 407 | 408 | "161": "Occult Girl", 409 | "16111": "Anavel", 410 | "16112": "Rica", 411 | "16113": "Charlotte", 412 | "16114": "Lora", 413 | "16115": "Nicki", 414 | 415 | "162": "Death Knight", 416 | "16211": "Fedora", 417 | "16212": "Arnold", 418 | "16213": "Briand", 419 | "16214": "Conrad", 420 | "16215": "Dias", 421 | 422 | "163": "Lich", 423 | "16311": "Rigel", 424 | "16312": "Antares", 425 | "16313": "Fuco", 426 | "16314": "Halphas", 427 | "16315": "Grego", 428 | 429 | "164": "Skull Soldier", 430 | "16411": "", 431 | "16412": "", 432 | "16413": "", 433 | "16414": "", 434 | "16415": "", 435 | 436 | "165": "Living Armor", 437 | "16511": "Nickel", 438 | "16512": "Iron", 439 | "16513": "Copper", 440 | "16514": "Silver", 441 | "16515": "Zinc", 442 | 443 | "166": "Dragon Knight", 444 | "16611": "Chow", 445 | "16612": "Laika", 446 | "16613": "Leo", 447 | "16614": "Jager", 448 | "16615": "Ragdoll", 449 | 450 | "167": "Magical Archer Promo", 451 | "16711": "", 452 | "16712": "", 453 | "16713": "", 454 | "16714": "Fami", 455 | "16715": "", 456 | 457 | "168": "Monkey King", 458 | "16811": "Shi Hou", 459 | "16812": "Mei Hou Wang", 460 | "16813": "Xing Zhe", 461 | "16814": "Qitian Dasheng", 462 | "16815": "Son Zhang Lao", 463 | 464 | "169": "Samurai", 465 | "16911": "Kaz", 466 | "16912": "Jun", 467 | "16913": "Kaito", 468 | "16914": "Tosi", 469 | "16915": "Sige", 470 | 471 | "170": "Archangel", 472 | "17011": "Ariel", 473 | "17012": "Velajuel", 474 | "17013": "Eladriel", 475 | "17014": "Artamiel", 476 | "17015": "Fermion", 477 | 478 | "172": "Drunken Master", 479 | "17211": "Mao", 480 | "17212": "Xiao Chun", 481 | "17213": "Huan", 482 | "17214": "Tien Qin", 483 | "17215": "Wei Shin", 484 | 485 | "173": "Kung Fu Girl", 486 | "17311": "Xiao Lin", 487 | "17312": "Hong Hua", 488 | "17313": "Ling Ling", 489 | "17314": "Liu Mei", 490 | "17315": "Fei", 491 | 492 | "174": "Beast Monk", 493 | "17411": "Chandra", 494 | "17412": "Kumar", 495 | "17413": "Ritesh", 496 | "17414": "Shazam", 497 | "17415": "Rahul", 498 | 499 | "175": "Mischievous Bat", 500 | "17511": "", 501 | "17512": "", 502 | "17513": "", 503 | "17514": "", 504 | "17515": "", 505 | 506 | "176": "Battle Scorpion", 507 | "17611": "", 508 | "17612": "", 509 | "17613": "", 510 | "17614": "", 511 | "17615": "", 512 | 513 | "177": "Minotauros", 514 | "17711": "Urtau", 515 | "17712": "Burentau", 516 | "17713": "Eintau", 517 | "17714": "Grotau", 518 | "17715": "Kamatau", 519 | 520 | "178": "Lizardman", 521 | "17811": "Kernodon", 522 | "17812": "Igmanodon", 523 | "17813": "Velfinodon", 524 | "17814": "Glinodon", 525 | "17815": "Devinodon", 526 | 527 | "179": "Hell Lady", 528 | "17911": "Beth", 529 | "17912": "Raki", 530 | "17913": "Ethna", 531 | "17914": "Asima", 532 | "17915": "Craka", 533 | 534 | "180": "Brownie Magician", 535 | "18011": "Orion", 536 | "18012": "Draco", 537 | "18013": "Aquila", 538 | "18014": "Gemini", 539 | "18015": "Korona", 540 | 541 | "181": "Kobold Bomber", 542 | "18111": "Malaka", 543 | "18112": "Zibrolta", 544 | "18113": "Taurus", 545 | "18114": "Dover", 546 | "18115": "Bering", 547 | 548 | "182": "King Angelmon", 549 | "18211": "Blue King Angelmon", 550 | "18212": "Red King Angelmon", 551 | "18213": "Gold King Angelmon", 552 | "18214": "White King Angelmon", 553 | "18215": "Dark King Angelmon", 554 | 555 | "183": "Sky Dancer", 556 | "18311": "Mihyang", 557 | "18312": "Hwahee", 558 | "18313": "Chasun", 559 | "18314": "Yeonhong", 560 | "18315": "Wolyung", 561 | 562 | "184": "Taoist", 563 | "18411": "Gildong", 564 | "18412": "Gunpyeong", 565 | "18413": "Woochi", 566 | "18414": "Hwadam", 567 | "18415": "Woonhak", 568 | 569 | "185": "Beast Hunter", 570 | "18511": "Gangchun", 571 | "18512": "Nangrim", 572 | "18513": "Suri", 573 | "18514": "Baekdu", 574 | "18515": "Hannam", 575 | 576 | "186": "Pioneer", 577 | "18611": "Woosa", 578 | "18612": "Chiwu", 579 | "18613": "Pungbaek", 580 | "18614": "Nigong", 581 | "18615": "Woonsa", 582 | 583 | "187": "Penguin Knight", 584 | "18711": "Toma", 585 | "18712": "Naki", 586 | "18713": "Mav", 587 | "18714": "Dona", 588 | "18715": "Kuna", 589 | 590 | "188": "Barbaric King", 591 | "18811": "Aegir", 592 | "18812": "Surtr", 593 | "18813": "Hraesvelg", 594 | "18814": "Mimirr", 595 | "18815": "Hrungnir", 596 | 597 | "189": "Polar Queen", 598 | "18911": "Alicia", 599 | "18912": "Brandia", 600 | "18913": "Tiana", 601 | "18914": "Elenoa", 602 | "18915": "Lydia", 603 | 604 | "190": "Battle Mammoth", 605 | "19011": "Talc", 606 | "19012": "Granite", 607 | "19013": "Olivine", 608 | "19014": "Marble", 609 | "19015": "Basalt", 610 | 611 | "191": "Fairy Queen", 612 | "19111": "", 613 | "19112": "", 614 | "19113": "", 615 | "19114": "Fran", 616 | "19115": "", 617 | 618 | "192": "Ifrit", 619 | "19211": "Theomars", 620 | "19212": "Tesarion", 621 | "19213": "Akhamamir", 622 | "19214": "Elsharion", 623 | "19215": "Veromos", 624 | 625 | "193": "Cow Girl", 626 | "19311": "Sera", 627 | "19312": "Anne", 628 | "19313": "Hannah", 629 | "19314": "", 630 | "19315": "Cassie", 631 | 632 | "194": "Pirate Captain", 633 | "19411": "Galleon", 634 | "19412": "Carrack", 635 | "19413": "Barque", 636 | "19414": "Brig", 637 | "19415": "Frigate", 638 | 639 | "195": "Charger Shark", 640 | "19511": "Aqcus", 641 | "19512": "Ignicus", 642 | "19513": "Zephicus", 643 | "19514": "Rumicus", 644 | "19515": "Calicus", 645 | 646 | "196": "Mermaid", 647 | "19611": "Tetra", 648 | "19612": "Platy", 649 | "19613": "Cichlid", 650 | "19614": "Molly", 651 | "19615": "Betta", 652 | 653 | "197": "Sea Emperor", 654 | "19711": "Poseidon", 655 | "19712": "Okeanos", 656 | "19713": "Triton", 657 | "19714": "Pontos", 658 | "19715": "Manannan", 659 | 660 | "198": "Magic Knight", 661 | "19811": "Lapis", 662 | "19812": "Astar", 663 | "19813": "Lupinus", 664 | "19814": "Iris", 665 | "19815": "Lanett", 666 | 667 | "199": "Assassin", 668 | "19911": "Stella", 669 | "19912": "Lexy", 670 | "19913": "Tanya", 671 | "19914": "Natalie", 672 | "19915": "Isabelle", 673 | 674 | "200": "Neostone Fighter", 675 | "20011": "Ryan", 676 | "20012": "Trevor", 677 | "20013": "Logan", 678 | "20014": "Lucas", 679 | "20015": "Karl", 680 | 681 | "201": "Neostone Agent", 682 | "20111": "Emma", 683 | "20112": "Lisa", 684 | "20113": "Olivia", 685 | "20114": "Illianna", 686 | "20115": "Sylvia", 687 | 688 | "202": "Martial Artist", 689 | "20211": "Luan", 690 | "20212": "Sin", 691 | "20213": "Lo", 692 | "20214": "Hiro", 693 | "20215": "Jackie", 694 | 695 | "203": "Mummy", 696 | "20311": "Nubia", 697 | "20312": "Sonora", 698 | "20313": "Namib", 699 | "20314": "Sahara", 700 | "20315": "Karakum", 701 | 702 | "204": "Anubis", 703 | "20411": "Avaris", 704 | "20412": "Khmun", 705 | "20413": "Iunu", 706 | "20414": "Amarna", 707 | "20415": "Thebae", 708 | 709 | "205": "Desert Queen", 710 | "20511": "Bastet", 711 | "20512": "Sekhmet", 712 | "20513": "Hathor", 713 | "20514": "Isis", 714 | "20515": "Nephthys", 715 | 716 | "206": "Horus", 717 | "20611": "Qebehsenuef", 718 | "20612": "Duamutef", 719 | "20613": "Imesety", 720 | "20614": "Wedjat", 721 | "20615": "Amduat", 722 | 723 | "207": "Jack-o'-lantern", 724 | "20711": "Chilling", 725 | "20712": "Smokey", 726 | "20713": "Windy", 727 | "20714": "Misty", 728 | "20715": "Dusky", 729 | 730 | "208": "Frankenstein", 731 | "20811": "Tractor", 732 | "20812": "Bulldozer", 733 | "20813": "Crane", 734 | "20814": "Driller", 735 | "20815": "Crawler", 736 | 737 | "209": "Elven Ranger", 738 | "20911": "Eluin", 739 | "20912": "Adrian", 740 | "20913": "Erwin", 741 | "20914": "Lucien", 742 | "20915": "Isillen", 743 | 744 | "210": "Harg", 745 | "21011": "Remy", 746 | "21012": "Racuni", 747 | "21013": "Raviti", 748 | "21014": "Dova", 749 | "21015": "Kroa", 750 | 751 | "211": "Fairy King", 752 | "21111": "Psamathe", 753 | "21112": "Daphnis", 754 | "21113": "Ganymede", 755 | "21114": "Oberon", 756 | "21115": "Nyx", 757 | 758 | "212": "Panda Warrior", 759 | "21211": "Mo Long", 760 | "21212": "Xiong Fei", 761 | "21213": "Feng Yan", 762 | "21214": "Tian Lang", 763 | "21215": "Mi Ying", 764 | 765 | "213": "Dice Magician", 766 | "21311": "Reno", 767 | "21312": "Ludo", 768 | "21313": "Morris", 769 | "21314": "Tablo", 770 | "21315": "Monte", 771 | 772 | "214": "Harp Magician", 773 | "21411": "Sonnet", 774 | "21412": "Harmonia", 775 | "21413": "Triana", 776 | "21414": "Celia", 777 | "21415": "Vivachel", 778 | 779 | "15105": "Devilmon", 780 | "14314": "Rainbowmon", 781 | 782 | "1000111": "Homunculus (Water)", 783 | "1000112": "Homunculus (Fire)", 784 | "1000113": "Homunculus (Wind)" 785 | } 786 | -------------------------------------------------------------------------------- /proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ 4 | proxy.py 5 | ~~~~~~~~ 6 | 7 | HTTP Proxy Server in Python. 8 | 9 | :copyright: (c) 2013 by Abhinav Singh. 10 | :license: BSD, see LICENSE for more details. 11 | """ 12 | VERSION = (0, 2) 13 | __version__ = '.'.join(map(str, VERSION[0:2])) 14 | __description__ = 'HTTP Proxy Server in Python' 15 | __author__ = 'Abhinav Singh' 16 | __author_email__ = 'mailsforabhinav@gmail.com' 17 | __homepage__ = 'https://github.com/abhinavsingh/proxy.py' 18 | __license__ = 'BSD' 19 | 20 | import sys 21 | import threading 22 | import datetime 23 | import argparse 24 | import logging 25 | import socket 26 | import select 27 | 28 | logger = logging.getLogger(__name__) 29 | 30 | # True if we are running on Python 3. 31 | PY3 = sys.version_info[0] == 3 32 | 33 | if PY3: 34 | text_type = str 35 | binary_type = bytes 36 | from urllib import parse as urlparse 37 | else: 38 | text_type = unicode 39 | binary_type = str 40 | import urlparse 41 | 42 | 43 | def text_(s, encoding='utf-8', errors='strict'): 44 | """ If ``s`` is an instance of ``binary_type``, return 45 | ``s.decode(encoding, errors)``, otherwise return ``s``""" 46 | if isinstance(s, binary_type): 47 | return s.decode(encoding, errors) 48 | return s # pragma: no cover 49 | 50 | 51 | def bytes_(s, encoding='utf-8', errors='strict'): 52 | """ If ``s`` is an instance of ``text_type``, return 53 | ``s.encode(encoding, errors)``, otherwise return ``s``""" 54 | if isinstance(s, text_type): # pragma: no cover 55 | return s.encode(encoding, errors) 56 | return s 57 | 58 | version = bytes_(__version__) 59 | 60 | CRLF, COLON, SP = b'\r\n', b':', b' ' 61 | 62 | HTTP_REQUEST_PARSER = 1 63 | HTTP_RESPONSE_PARSER = 2 64 | 65 | HTTP_PARSER_STATE_INITIALIZED = 1 66 | HTTP_PARSER_STATE_LINE_RCVD = 2 67 | HTTP_PARSER_STATE_RCVING_HEADERS = 3 68 | HTTP_PARSER_STATE_HEADERS_COMPLETE = 4 69 | HTTP_PARSER_STATE_RCVING_BODY = 5 70 | HTTP_PARSER_STATE_COMPLETE = 6 71 | 72 | CHUNK_PARSER_STATE_WAITING_FOR_SIZE = 1 73 | CHUNK_PARSER_STATE_WAITING_FOR_DATA = 2 74 | CHUNK_PARSER_STATE_COMPLETE = 3 75 | 76 | 77 | class ChunkParser(object): 78 | """HTTP chunked encoding response parser.""" 79 | 80 | def __init__(self): 81 | self.state = CHUNK_PARSER_STATE_WAITING_FOR_SIZE 82 | self.body = b'' 83 | self.chunk = b'' 84 | self.size = None 85 | 86 | def parse(self, data): 87 | more = True if len(data) > 0 else False 88 | while more: more, data = self.process(data) 89 | 90 | def process(self, data): 91 | if self.state == CHUNK_PARSER_STATE_WAITING_FOR_SIZE: 92 | line, data = HttpParser.split(data) 93 | self.size = int(line, 16) 94 | self.state = CHUNK_PARSER_STATE_WAITING_FOR_DATA 95 | elif self.state == CHUNK_PARSER_STATE_WAITING_FOR_DATA: 96 | remaining = self.size - len(self.chunk) 97 | self.chunk += data[:remaining] 98 | data = data[remaining:] 99 | if len(self.chunk) == self.size: 100 | data = data[len(CRLF):] 101 | self.body += self.chunk 102 | if self.size == 0: 103 | self.state = CHUNK_PARSER_STATE_COMPLETE 104 | else: 105 | self.state = CHUNK_PARSER_STATE_WAITING_FOR_SIZE 106 | self.chunk = b'' 107 | self.size = None 108 | return len(data) > 0, data 109 | 110 | class HttpParser(object): 111 | """HTTP request/response parser.""" 112 | 113 | def __init__(self, type=None): 114 | self.state = HTTP_PARSER_STATE_INITIALIZED 115 | self.type = type if type else HTTP_REQUEST_PARSER 116 | 117 | self.raw = b'' 118 | self.buffer = b'' 119 | 120 | self.headers = dict() 121 | self.body = None 122 | 123 | self.method = None 124 | self.url = None 125 | self.code = None 126 | self.reason = None 127 | self.version = None 128 | 129 | self.chunker = None 130 | 131 | def parse(self, data): 132 | self.raw += data 133 | data = self.buffer + data 134 | self.buffer = b'' 135 | 136 | more = True if len(data) > 0 else False 137 | while more: 138 | more, data = self.process(data) 139 | self.buffer = data 140 | 141 | def process(self, data): 142 | if self.state >= HTTP_PARSER_STATE_HEADERS_COMPLETE and \ 143 | (self.method == b"POST" or self.type == HTTP_RESPONSE_PARSER): 144 | if not self.body: 145 | self.body = b'' 146 | 147 | if b'content-length' in self.headers: 148 | self.state = HTTP_PARSER_STATE_RCVING_BODY 149 | self.body += data 150 | if len(self.body) >= int(self.headers[b'content-length'][1]): 151 | self.state = HTTP_PARSER_STATE_COMPLETE 152 | elif b'transfer-encoding' in self.headers and self.headers[b'transfer-encoding'][1].lower() == b'chunked': 153 | if not self.chunker: 154 | self.chunker = ChunkParser() 155 | self.chunker.parse(data) 156 | if self.chunker.state == CHUNK_PARSER_STATE_COMPLETE: 157 | self.body = self.chunker.body 158 | self.state = HTTP_PARSER_STATE_COMPLETE 159 | 160 | return False, b'' 161 | 162 | line, data = HttpParser.split(data) 163 | if line == False: return line, data 164 | 165 | if self.state < HTTP_PARSER_STATE_LINE_RCVD: 166 | self.process_line(line) 167 | elif self.state < HTTP_PARSER_STATE_HEADERS_COMPLETE: 168 | self.process_header(line) 169 | 170 | if self.state == HTTP_PARSER_STATE_HEADERS_COMPLETE and \ 171 | ((self.type == HTTP_REQUEST_PARSER and \ 172 | not self.method == b"POST") or \ 173 | # aureus - 2016-06-14 - nasty hack fix to get around iOS issue 174 | # remove this when actual fix is found 175 | (self.type == HTTP_REQUEST_PARSER and \ 176 | self.method == b'POST' and \ 177 | (self.url.netloc.endswith('qpyou.cn') or self.url.netloc.endswith('com2us.net')) and not self.url.path.startswith('/api/')) or \ 178 | # end nasty hack fix 179 | (self.type == HTTP_RESPONSE_PARSER and \ 180 | b'content-length' not in self.headers and \ 181 | b'transfer-encoding' not in self.headers)) and \ 182 | self.raw.endswith(CRLF*2): 183 | self.state = HTTP_PARSER_STATE_COMPLETE 184 | 185 | return len(data) > 0, data 186 | 187 | def process_line(self, data): 188 | line = data.split(SP) 189 | if self.type == HTTP_REQUEST_PARSER: 190 | self.method = line[0].upper() 191 | self.url = urlparse.urlsplit(line[1]) 192 | self.version = line[2] 193 | else: 194 | self.version = line[0] 195 | self.code = line[1] 196 | self.reason = b' '.join(line[2:]) 197 | self.state = HTTP_PARSER_STATE_LINE_RCVD 198 | 199 | def process_header(self, data): 200 | if len(data) == 0: 201 | if self.state == HTTP_PARSER_STATE_RCVING_HEADERS: 202 | self.state = HTTP_PARSER_STATE_HEADERS_COMPLETE 203 | elif self.state == HTTP_PARSER_STATE_LINE_RCVD: 204 | self.state = HTTP_PARSER_STATE_RCVING_HEADERS 205 | else: 206 | self.state = HTTP_PARSER_STATE_RCVING_HEADERS 207 | parts = data.split(COLON) 208 | key = parts[0].strip() 209 | value = COLON.join(parts[1:]).strip() 210 | self.headers[key.lower()] = (key, value) 211 | 212 | def build_url(self): 213 | if not self.url: 214 | return b'/None' 215 | 216 | url = self.url.path 217 | if url == b'': url = b'/' 218 | if not self.url.query == b'': url += b'?' + self.url.query 219 | if not self.url.fragment == b'': url += b'#' + self.url.fragment 220 | return url 221 | 222 | def build_header(self, k, v): 223 | return k + b": " + v + CRLF 224 | 225 | def build(self, del_headers=None, add_headers=None): 226 | if self.type == HTTP_REQUEST_PARSER: 227 | req = b" ".join([self.method, self.build_url(), self.version]) 228 | else: 229 | req = b" ".join([self.version, self.code, self.reason]) 230 | req += CRLF 231 | 232 | if not del_headers: del_headers = [] 233 | del_headers.append(b'content-length') 234 | del_headers.append(b'transfer-encoding') 235 | for k in self.headers: 236 | if not k in del_headers: 237 | req += self.build_header(self.headers[k][0], self.headers[k][1]) 238 | 239 | if not add_headers: add_headers = [] 240 | if self.body: 241 | add_headers.append(('Content-Length', str(len(self.body)))) 242 | for k in add_headers: 243 | req += self.build_header(k[0], k[1]) 244 | 245 | req += CRLF 246 | if self.body: 247 | req += self.body 248 | 249 | return req 250 | 251 | @staticmethod 252 | def split(data): 253 | pos = data.find(CRLF) 254 | if pos == -1: return False, data 255 | line = data[:pos] 256 | data = data[pos+len(CRLF):] 257 | return line, data 258 | 259 | class Connection(object): 260 | """TCP server/client connection abstraction.""" 261 | 262 | def __init__(self, what): 263 | self.buffer = b'' 264 | self.closed = False 265 | self.what = what # server or client 266 | 267 | def send(self, data): 268 | return self.conn.send(data) 269 | 270 | def recv(self, bytes=8192): 271 | try: 272 | data = self.conn.recv(bytes) 273 | if len(data) == 0: 274 | logger.debug('recvd 0 bytes from %s' % self.what) 275 | return None 276 | logger.debug('rcvd %d bytes from %s' % (len(data), self.what)) 277 | return data 278 | except Exception as e: 279 | logger.exception('Exception while receiving from connection %s %r with reason %r' % (self.what, self.conn, e)) 280 | return None 281 | 282 | def close(self): 283 | self.conn.close() 284 | self.closed = True 285 | 286 | def buffer_size(self): 287 | return len(self.buffer) 288 | 289 | def has_buffer(self): 290 | return self.buffer_size() > 0 291 | 292 | def queue(self, data): 293 | self.buffer += data 294 | 295 | def flush(self): 296 | sent = self.send(self.buffer) 297 | self.buffer = self.buffer[sent:] 298 | logger.debug('flushed %d bytes to %s' % (sent, self.what)) 299 | 300 | class Server(Connection): 301 | """Establish connection to destination server.""" 302 | 303 | def __init__(self, host, port): 304 | super(Server, self).__init__(b'server') 305 | self.addr = (host, int(port)) 306 | 307 | def connect(self): 308 | self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 309 | self.conn.connect((self.addr[0], self.addr[1])) 310 | 311 | class Client(Connection): 312 | """Accepted client connection.""" 313 | 314 | def __init__(self, conn, addr): 315 | super(Client, self).__init__(b'client') 316 | self.conn = conn 317 | self.addr = addr 318 | 319 | class ProxyError(Exception): 320 | pass 321 | 322 | class ProxyConnectionFailed(ProxyError): 323 | 324 | def __init__(self, host, port, reason): 325 | self.host = host 326 | self.port = port 327 | self.reason = reason 328 | 329 | def __str__(self): 330 | return '' % (self.host, self.port, self.reason) 331 | 332 | class Proxy(threading.Thread): 333 | """HTTP proxy implementation. 334 | 335 | Accepts connection object and act as a proxy between client and server. 336 | """ 337 | 338 | def __init__(self, client, callback=None): 339 | super(Proxy, self).__init__() 340 | 341 | self.start_time = self._now() 342 | self.last_activity = self.start_time 343 | self.callback = callback 344 | 345 | self.client = client 346 | self.server = None 347 | 348 | self.request = HttpParser() 349 | self.response = HttpParser(HTTP_RESPONSE_PARSER) 350 | 351 | self.connection_established_pkt = CRLF.join([ 352 | b'HTTP/1.1 200 Connection established', 353 | b'Proxy-agent: proxy.py v' + version, 354 | CRLF 355 | ]) 356 | 357 | def _now(self): 358 | return datetime.datetime.utcnow() 359 | 360 | def _inactive_for(self): 361 | return (self._now() - self.last_activity).seconds 362 | 363 | def _is_inactive(self): 364 | return self._inactive_for() > 30 365 | 366 | def _process_request(self, data): 367 | # once we have connection to the server 368 | # we don't parse the http request packets 369 | # any further, instead just pipe incoming 370 | # data from client to server 371 | if self.server and not self.server.closed: 372 | self.server.queue(data) 373 | return 374 | 375 | # parse http request 376 | self.request.parse(data) 377 | 378 | # once http request parser has reached the state complete 379 | # we attempt to establish connection to destination server 380 | if self.request.state == HTTP_PARSER_STATE_COMPLETE: 381 | logger.debug('request parser is in state complete') 382 | 383 | if self.request.method == b"CONNECT": 384 | host, port = self.request.url.path.split(COLON) 385 | elif self.request.url: 386 | host, port = self.request.url.hostname, self.request.url.port if self.request.url.port else 80 387 | 388 | self.server = Server(host, port) 389 | try: 390 | logger.debug('connecting to server %s:%s' % (host, port)) 391 | self.server.connect() 392 | logger.debug('connected to server %s:%s' % (host, port)) 393 | except Exception as e: 394 | self.server.closed = True 395 | raise ProxyConnectionFailed(host, port, repr(e)) 396 | 397 | if self.callback: 398 | self.callback.onRequest(self, host, port, self.request) 399 | # for http connect methods (https requests) 400 | # queue appropriate response for client 401 | # notifying about established connection 402 | if self.request.method == b"CONNECT": 403 | self.client.queue(self.connection_established_pkt) 404 | # for usual http requests, re-build request packet 405 | # and queue for the server with appropriate headers 406 | else: 407 | self.server.queue(self.request.build( 408 | del_headers=[b'proxy-connection', b'connection', b'keep-alive'], 409 | add_headers=[(b'Connection', b'Close')] 410 | )) 411 | 412 | def _process_response(self, data): 413 | # parse incoming response packet 414 | # only for non-https requests 415 | if not self.request.method == b"CONNECT": 416 | self.response.parse(data) 417 | if self.response.state == HTTP_PARSER_STATE_COMPLETE: 418 | if self.callback: 419 | self.callback.onResponse(self, self.response) 420 | # queue data for client 421 | self.client.queue(self.response.raw) 422 | else: 423 | # queue data for client 424 | self.client.queue(data) 425 | 426 | def _access_log(self): 427 | host, port = self.server.addr if self.server else (None, None) 428 | if self.request.method == b"CONNECT": 429 | logger.info("%s:%s - %s %s:%s" % (self.client.addr[0], self.client.addr[1], self.request.method, host, port)) 430 | elif self.request.method: 431 | logger.info("%s:%s - %s %s:%s%s - %s %s - %s bytes" % (self.client.addr[0], self.client.addr[1], self.request.method, host, port, self.request.build_url(), self.response.code, self.response.reason, len(self.response.raw))) 432 | 433 | def _get_waitable_lists(self): 434 | rlist, wlist, xlist = [self.client.conn], [], [] 435 | logger.debug('*** watching client for read ready') 436 | 437 | if self.client.has_buffer(): 438 | logger.debug('pending client buffer found, watching client for write ready') 439 | wlist.append(self.client.conn) 440 | 441 | if self.server and not self.server.closed: 442 | logger.debug('connection to server exists, watching server for read ready') 443 | rlist.append(self.server.conn) 444 | 445 | if self.server and not self.server.closed and self.server.has_buffer(): 446 | logger.debug('connection to server exists and pending server buffer found, watching server for write ready') 447 | wlist.append(self.server.conn) 448 | 449 | return rlist, wlist, xlist 450 | 451 | def _process_wlist(self, w): 452 | if self.client.conn in w: 453 | logger.debug('client is ready for writes, flushing client buffer') 454 | self.client.flush() 455 | 456 | if self.server and not self.server.closed and self.server.conn in w: 457 | logger.debug('server is ready for writes, flushing server buffer') 458 | self.server.flush() 459 | 460 | def _process_rlist(self, r): 461 | if self.client.conn in r: 462 | logger.debug('client is ready for reads, reading') 463 | data = self.client.recv() 464 | self.last_activity = self._now() 465 | 466 | if not data: 467 | logger.debug('client closed connection, breaking') 468 | return True 469 | 470 | try: 471 | self._process_request(data) 472 | except ProxyConnectionFailed as e: 473 | logger.exception(e) 474 | self.client.queue(CRLF.join([ 475 | b'HTTP/1.1 502 Bad Gateway', 476 | b'Proxy-agent: proxy.py v' + version, 477 | b'Content-Length: 11', 478 | b'Connection: close', 479 | CRLF 480 | ]) + b'Bad Gateway') 481 | self.client.flush() 482 | return True 483 | 484 | if self.server and not self.server.closed and self.server.conn in r: 485 | logger.debug('server is ready for reads, reading') 486 | data = self.server.recv() 487 | self.last_activity = self._now() 488 | 489 | if not data: 490 | logger.debug('server closed connection') 491 | self.server.close() 492 | else: 493 | self._process_response(data) 494 | 495 | return False 496 | 497 | def _process(self): 498 | while True: 499 | rlist, wlist, xlist = self._get_waitable_lists() 500 | r, w, x = select.select(rlist, wlist, xlist, 1) 501 | 502 | self._process_wlist(w) 503 | if self._process_rlist(r): 504 | break 505 | 506 | if self.client.buffer_size() == 0: 507 | if self.response.state == HTTP_PARSER_STATE_COMPLETE: 508 | logger.debug('client buffer is empty and response state is complete, breaking') 509 | break 510 | 511 | if self._is_inactive(): 512 | logger.debug('client buffer is empty and maximum inactivity has reached, breaking') 513 | break 514 | 515 | def run(self): 516 | logger.debug('Proxying connection %r at address %r' % (self.client.conn, self.client.addr)) 517 | try: 518 | self._process() 519 | except KeyboardInterrupt: 520 | pass 521 | except Exception as e: 522 | logger.exception('Exception while handling connection %r with reason %r' % (self.client.conn, e)) 523 | finally: 524 | logger.debug("closing client connection with pending client buffer size %d bytes" % self.client.buffer_size()) 525 | self.client.close() 526 | if self.server: 527 | logger.debug("closed client connection with pending server buffer size %d bytes" % self.server.buffer_size()) 528 | self._access_log() 529 | logger.debug('Closing proxy for connection %r at address %r' % (self.client.conn, self.client.addr)) 530 | if self.callback: 531 | self.callback.onDone(self) 532 | 533 | class TCP(object): 534 | """TCP server implementation.""" 535 | 536 | def __init__(self, hostname='127.0.0.1', port=8899, backlog=100): 537 | self.port = port 538 | self.backlog = backlog 539 | self.hostname = hostname 540 | 541 | def handle(self, client): 542 | raise NotImplementedError() 543 | 544 | def run(self): 545 | try: 546 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 547 | logger.info('Starting server on {}, port {}'.format(self.hostname, self.port)) 548 | self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 549 | self.socket.bind((self.hostname, self.port)) 550 | self.socket.listen(self.backlog) 551 | while True: 552 | conn, addr = self.socket.accept() 553 | logger.info('Accepted connection %r at address %r' % (conn, addr)) 554 | client = Client(conn, addr) 555 | self.handle(client) 556 | except Exception as e: 557 | logger.exception('Exception while running the server %r' % e) 558 | finally: 559 | logger.info('Closing server socket') 560 | self.socket.close() 561 | 562 | class HTTP(TCP): 563 | """HTTP proxy server implementation. 564 | 565 | Spawns new process to proxy accepted client connection. 566 | """ 567 | 568 | def handle(self, client): 569 | proc = Proxy(client) 570 | proc.daemon = True 571 | proc.start() 572 | logger.debug('Started process %r to handle connection %r' % (proc, client.conn)) 573 | 574 | def main(): 575 | parser = argparse.ArgumentParser( 576 | description='proxy.py v%s' % __version__, 577 | epilog='Having difficulty using proxy.py? Report at: %s/issues/new' % __homepage__ 578 | ) 579 | 580 | parser.add_argument('--hostname', default='127.0.0.1', help='Default: 127.0.0.1') 581 | parser.add_argument('--port', default='8899', help='Default: 8899') 582 | parser.add_argument('--log-level', default='INFO', help='DEBUG, INFO, WARNING, ERROR, CRITICAL') 583 | args = parser.parse_args() 584 | 585 | logging.basicConfig(level=getattr(logging, args.log_level), format='%(asctime)s - %(levelname)s - pid:%(process)d - %(message)s') 586 | 587 | hostname = args.hostname 588 | port = int(args.port) 589 | 590 | try: 591 | proxy = HTTP(hostname, port) 592 | proxy.run() 593 | except KeyboardInterrupt: 594 | pass 595 | 596 | if __name__ == '__main__': 597 | main() 598 | --------------------------------------------------------------------------------