├── docs └── __init__.py ├── tff_rl ├── __init__.py ├── core │ ├── __init__.py │ ├── memory.py │ ├── policies.py │ └── q.py ├── agents │ └── __init__.py └── tests │ ├── __init__.py │ ├── test_run.py │ └── test_pylint.py ├── tools ├── __init__.py └── helper.py ├── .pydocstyle ├── pytest.ini ├── requirements.txt ├── setup.py ├── .travis.yml ├── readme.rst └── .pylintrc /docs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tff_rl/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tools/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tff_rl/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tff_rl/agents/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tff_rl/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.pydocstyle: -------------------------------------------------------------------------------- 1 | [pydocstyle] 2 | match = .*\.py 3 | # google doccstyle 4 | select = D203, D204, D213, D215, D401, D404, D406, D407, D408, D409 -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | # addopts = -ra -s 3 | log_cli = True 4 | log_cli_level = INFO 5 | log_cli_format = %(asctime)s %(levelname)s %(message)s 6 | python_functions = test_* -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | pytest 3 | pylint 4 | pydocstyle 5 | gym 6 | numpy 7 | matplotlib 8 | pyglet 9 | keras 10 | docopt 11 | tensorflow==2.0 12 | pybind11 13 | cppimport -------------------------------------------------------------------------------- /tff_rl/core/memory.py: -------------------------------------------------------------------------------- 1 | """memory management""" 2 | import random 3 | 4 | 5 | class ReplayMemory: 6 | 7 | def __init__(self): 8 | self.memory = [] 9 | 10 | def push(self, event): 11 | self.memory.append(event) 12 | 13 | def sample(self, batch_size): 14 | samples = zip(random.sample(self.memory, batch_size)) 15 | return samples 16 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """Distribution to pipy""" 2 | 3 | from setuptools import setup, find_packages 4 | 5 | with open("README.md") as readme: 6 | long_description = readme.read() 7 | 8 | setup( 9 | name='tf_rl', 10 | version='1.0.0', 11 | long_description=long_description, 12 | url='https://github.com/dickreuter/tf_rl', 13 | author='Nicolas Dickreuter', 14 | author_email='dickreuter@gmail.com', 15 | license='MIT', 16 | description=('Framework for reinforcement learning.'), 17 | packages=find_packages(exclude=['tests', 'gym_env', 'tools']), 18 | install_requires=['pyglet', 'pytest', 'pandas', 'pylint', 'gym', 'numpy', 'matplotlib', 'tensorflow'], 19 | platforms='any', 20 | ) 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | 4 | python: 5 | - "3.6" 6 | 7 | os: 8 | - linux 9 | 10 | install: 11 | - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then MINICONDA_OS=Linux; else MINICONDA_OS=MacOSX; fi 12 | - wget https://repo.anaconda.com/miniconda/Miniconda3-latest-$MINICONDA_OS-x86_64.sh -O miniconda.sh 13 | - bash miniconda.sh -b -p "$HOME"/miniconda 14 | - source "$HOME"/miniconda/etc/profile.d/conda.sh 15 | - conda config --set always_yes yes --set changeps1 no --set auto_update_conda false 16 | - export LANG=en_US.UTF-8 17 | - export COVERAGE_DIR=":$HOME/htmlcov" 18 | - pip install -r requirements.txt 19 | - printenv | sort 20 | - conda install gcc_linux-64 gxx_linux-64 -c anaconda 21 | 22 | # command to run tests 23 | script: pytest --log-cli-level=ERROR 24 | 25 | after_success: 26 | - codecov 27 | 28 | -------------------------------------------------------------------------------- /tff_rl/core/policies.py: -------------------------------------------------------------------------------- 1 | """Policies""" 2 | import logging 3 | 4 | import numpy as np 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | class PolicyQ1(): 10 | """Custom policy when making decision based on neural network.""" 11 | 12 | def __init__(self, tau=1., clip=(-500., 500.)): 13 | self.tau = tau 14 | self.clip = clip 15 | 16 | def select_action(self, q_values): 17 | """Return the selected action 18 | 19 | # Arguments 20 | q_values (np.ndarray): List of the estimations of Q for each action 21 | 22 | # Returns 23 | Selection action 24 | 25 | """ 26 | assert q_values.ndim == 1 27 | q_values = q_values.astype('float64') 28 | nb_actions = q_values.shape[0] 29 | 30 | exp_values = np.exp(np.clip(q_values / self.tau, self.clip[0], self.clip[1])) 31 | probs = exp_values / np.sum(exp_values) 32 | action = np.random.choice(range(nb_actions), p=probs) 33 | log.info(f"Chosen action by q-learner {action} - probabilities: {probs}") 34 | return action 35 | -------------------------------------------------------------------------------- /tff_rl/tests/test_run.py: -------------------------------------------------------------------------------- 1 | from keras import Sequential 2 | from keras.layers import Flatten, Dense, Activation 3 | 4 | import gym 5 | from tff_rl.core.memory import ReplayMemory 6 | from tff_rl.core.policies import PolicyQ1 7 | from tff_rl.core.q import QAgent 8 | 9 | 10 | def test_cart_pole(): 11 | env = gym.make("CartPole-v1") 12 | observation = env.reset() 13 | 14 | obs_space_shape = env.observation_space.shape 15 | nb_actions = env.action_space.n 16 | 17 | model = Sequential() 18 | model.add(Flatten(input_shape=(1,) + obs_space_shape)) 19 | model.add(Dense(16)) 20 | model.add(Activation('relu')) 21 | model.add(Dense(16)) 22 | model.add(Activation('relu')) 23 | model.add(Dense(16)) 24 | model.add(Activation('relu')) 25 | model.add(Dense(nb_actions, activation='linear')) 26 | 27 | memory = ReplayMemory() 28 | policy = PolicyQ1() 29 | 30 | QAgent(env=env, model=model, memory=memory, policy=policy) 31 | 32 | for _ in range(10): 33 | env.render() 34 | action = env.action_space.sample() # your agent here (this takes random actions) 35 | observation, reward, done, info = env.step(action) 36 | 37 | if done: 38 | observation = env.reset() 39 | env.close() 40 | -------------------------------------------------------------------------------- /tff_rl/core/q.py: -------------------------------------------------------------------------------- 1 | """Routines for q learning""" 2 | from keras.layers import Flatten, Dense, Activation 3 | 4 | 5 | class QAgent: 6 | def __init__(self, env, model=None, memory=None, policy=None): 7 | self.env = env 8 | self.model = model 9 | self.memory = memory 10 | self.policy = policy 11 | 12 | 13 | def action(self, action_space, observation, info): # pylint: disable=no-self-use 14 | """Mandatory method that calculates the move based on the observation array and the action space.""" 15 | action = None 16 | 17 | return action 18 | 19 | def forward(self): 20 | raise NotADirectoryError 21 | 22 | def backward(self): 23 | raise NotADirectoryError 24 | 25 | def load_weights(self): 26 | raise NotADirectoryError 27 | 28 | def save_weights(self): 29 | raise NotADirectoryError 30 | 31 | def compute_batch_q_values(self, state_batch): 32 | batch = self.process_state_batch(state_batch) 33 | q_values = self.model.predict_on_batch(batch) 34 | assert q_values.shape == (len(state_batch), self.nb_actions) 35 | return q_values 36 | 37 | def compute_q_values(self, state): 38 | q_values = self.compute_batch_q_values([state]).flatten() 39 | assert q_values.shape == (self.nb_actions,) 40 | return q_values 41 | -------------------------------------------------------------------------------- /readme.rst: -------------------------------------------------------------------------------- 1 | Framework for reinforcement learning 2 | ==================================== 3 | 4 | A framework for reinforcement learning based on the OpenAi environment and tensorflow 2.0. 5 | 6 | Usage: 7 | ------ 8 | 9 | - Install Anaconda, I would also recommend to install pycharm. 10 | - Create a virtual environment with ``conda create -n tf2 python=3.7`` 11 | - Activate it with ``conda activate tf2``, then install all required packages with ``pip install -r requirements.txt`` 12 | 13 | 14 | Roadmap 15 | ------- 16 | 17 | Agents and roadmap 18 | ~~~~~~~~~~~~~~~~~~ 19 | 20 | - [ ] General framework outline 21 | - [ ] Deep Q Learning (DQN) 22 | - [ ] Double DQN 23 | - [ ] n-step montecarlo 24 | - [ ] Deep SARSA 25 | - [ ] Asynchronous Advantage Actor-Critic (A3C) [ 26 | 27 | 28 | 29 | How to integrate your code on Github 30 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 31 | 32 | It will be hard for one person alone to beat the world at poker. That's 33 | why this repo aims to have a collaborative environment, where models can 34 | be added and evaluated. 35 | 36 | To contribute do the following: 37 | 38 | - Get Pycharm and build the virtual python environment. Use can do: ``pip install -r requirements.txt`` 39 | - If you want to use the 500x faster c++ based equity calculator, also install visual studio, but this is not necessary 40 | - Clone your fork to your local machine. You can do this directly from pycharm: VCS --> check out from version control --> git 41 | - Add as remote the original repository where you created the fork from and call it upstream (the connection to your fork should be called origin). This can be done with vcs --> git --> remotes 42 | - Create a new branch: click on master at the bottom right, and then click on 'new branch' 43 | - Make your edits. 44 | - Ensure all tests pass. Under file --> settings --> python integrated tools switch to pytest (see screenshot). |image1| You can then just right click on the tests folder and run all tests. All tests need to pass. Make sure to add your own tests by simply naming the funtion test\_... \ 45 | - Make sure all the tests are passing. Best run pytest as described above (in pycharm just right click on the tests folder and run it). If a test fails, you can debug the test, by right clicking on it and put breakpoints, or even open a console at the breakpoint: https://stackoverflow.com/questions/19329601/interactive-shell-debugging-with-pycharm 46 | - Commit your changes (CTRL+K} 47 | - Push your changes to your origin (your fork) (CTRL+SHIFT+K) 48 | - To bring your branch up to date with upstream master, if it has moved on: rebase onto upstream master: click on your branch name at the bottom right of pycharm, then click on upstream/master, then rebase onto. You may need to resolve soe conflicts. Once this is done, make sure to always force-push (ctrl+shift+k), (not just push). This can be done by selecting the dropdown next to push and choose force-push (important: don't push and merge a rebased branch with your remote) 49 | - Create a pull request on your github.com to merge your branch with the upstream master. 50 | - When your pull request is approved, it will be merged into the upstream/master. -------------------------------------------------------------------------------- /tff_rl/tests/test_pylint.py: -------------------------------------------------------------------------------- 1 | """Pylint test.""" 2 | 3 | import logging 4 | import os 5 | import sys 6 | from glob import glob 7 | 8 | from pydocstyle import Error, check 9 | from pydocstyle.cli import setup_stream_handlers, ReturnCode 10 | from pydocstyle.config import ConfigurationParser, IllegalConfiguration 11 | from pylint import lint 12 | from pylint.reporters.text import TextReporter 13 | 14 | from tools.helper import get_dir 15 | 16 | # pylint: disable=anomalous-backslash-in-string,too-few-public-methods,inconsistent-return-statements 17 | 18 | log = logging.getLogger(__name__) 19 | CODEBASE = get_dir('codebase') 20 | REPOS = [name for name in os.listdir(CODEBASE) if os.path.isdir(name)] 21 | 22 | FOLDERS = [name for name in os.listdir(CODEBASE) if 23 | os.path.isdir(os.path.join(CODEBASE, name)) and '.' not in name[0] and '_' not in name[0]] 24 | 25 | LINT_EXTENSIONS = ['.py'] 26 | 27 | EXCLUDE_SUBFOLDERS = ['.idea', 'doc', 'distribution'] 28 | 29 | # To ignore specific rules, please disable it in your file or function, 30 | # or (if given broad consensus) ignore it globally by adding an exception to .pylintrc. 31 | # Full list of readable pylint message short-names can be found here: 32 | # https://github.com/janjur/readable-pylint-messages 33 | 34 | IGNORE_LIST = ["""\r\n""", # part of final output 35 | "*************", # part of final output 36 | "------------", # part of final output 37 | "Your code has been rated at", "E ", # part of final output 38 | "Redefining built-in 'id'", # exception as it seems never a problem 39 | "UPPER_CASE naming style (invalid-name)", 40 | """"log" doesn't conform to UPPER_CASE"""] 41 | 42 | REPOSITORIES = list(set(FOLDERS) - set(EXCLUDE_SUBFOLDERS)) 43 | 44 | 45 | class _WritableOutput: 46 | """A simple class, supporting a write method to capture pylint output.""" 47 | 48 | def __init__(self): 49 | self.content = [] 50 | 51 | def write(self, string): 52 | """Write method to capture pylint output.""" 53 | if string == '\n': 54 | return # filter newlines 55 | self.content.append(string) 56 | 57 | 58 | def test_pylint(): 59 | """Test codebase for pylint errors.""" 60 | files_to_check = get_relevant_files() 61 | log.info("{} changed files detected".format(len(files_to_check))) 62 | rcfile, reps = (os.path.join(CODEBASE, '.pylintrc'), files_to_check) 63 | 64 | pylint_args = ['--rcfile={}'.format(rcfile), ] 65 | log.info('applying pylint to repository {}'.format(reps)) 66 | pylint_args += reps 67 | 68 | pylint_output = _WritableOutput() 69 | pylint_reporter = TextReporter(pylint_output) 70 | lint.Run(pylint_args, reporter=pylint_reporter, do_exit=False) 71 | 72 | pylint_outputs = pylint_output.content 73 | 74 | errors = [] 75 | for output in pylint_outputs: 76 | if not any([i in output for i in IGNORE_LIST]): 77 | errors.append(output) 78 | if "Your code has been rated at" in output: 79 | print("\n" + output) 80 | 81 | if errors: 82 | raise AssertionError('{} Pylint errors found. ' 83 | 'For quick resolution, consider running this test locally before you push. ' 84 | 'Scroll down for hyperlinks to errors.\n{}'.format(len(errors), '\n'.join(errors))) 85 | 86 | 87 | def get_relevant_files(): 88 | """ 89 | Get relevant changed files of current branch vs target branch. 90 | 91 | check_all_files (bool): get all files if true, get changed files since comparison commit 92 | 93 | Filenames are filtered: 94 | - Files need to be in lint_extensions 95 | - Files cannot be in the exclude_folders list 96 | 97 | Returns: 98 | list of str: changed files 99 | 100 | """ 101 | filenames = [y for x in os.walk(get_dir('codebase')) for y in glob(os.path.join(x[0], '*.py'))] 102 | if os.name == 'nt': 103 | filenames = [filename.replace('/', """\\""") for filename in filenames] 104 | filenames = [filename for filename in filenames if filename] 105 | filenames = [filename for filename in filenames if os.path.splitext(filename)[1] in LINT_EXTENSIONS] 106 | filenames = [os.path.join(CODEBASE, filename) for filename in filenames] 107 | log.debug(filenames) 108 | 109 | return filenames 110 | 111 | 112 | # nforce consistent docstrings as per https://www.python.org/dev/peps/pep-0257/. 113 | 114 | log = logging.getLogger(__name__) 115 | REPOSITORIES = ['neuron_poker'] 116 | 117 | # --- please remove files here for enforcement --- 118 | 119 | IGNORES = ['__init__.py'] 120 | 121 | 122 | def test_pydocsftyle(): 123 | """ 124 | Docstring enforcement test. 125 | 126 | Please adjust the enforced_file list to enfoce the test in a file. 127 | To auto generate the correct format in intellij please make the following adjustments in your ide: 128 | Tools - Python Integrated Tools - Docstring format to google style. 129 | """ 130 | # pass argv argument to pydocstyle via monkeypatch 131 | reps = [os.path.join(CODEBASE, rep) for rep in REPOSITORIES] 132 | 133 | sys.argv = ['test_pydocstyle'] + reps + [r"""--match=.*\.py"""] 134 | errors = run_pydocstyle() 135 | 136 | enforced_errors = [str(error) for error in errors] 137 | 138 | if enforced_errors: 139 | raise RuntimeError("Docstring test failed: \n{}".format('\n'.join(enforced_errors))) 140 | 141 | 142 | def run_pydocstyle(): 143 | """Adjust version of pydocstile to return detailed errors to the test.""" 144 | log.setLevel(logging.DEBUG) 145 | conf = ConfigurationParser() 146 | setup_stream_handlers(conf.get_default_run_configuration()) 147 | 148 | try: 149 | conf.parse() 150 | except IllegalConfiguration: 151 | return ReturnCode.invalid_options 152 | 153 | run_conf = conf.get_user_run_configuration() 154 | 155 | # Reset the logger according to the command line arguments 156 | setup_stream_handlers(run_conf) 157 | 158 | log.debug("starting in debug mode.") 159 | 160 | Error.explain = run_conf.explain 161 | Error.source = run_conf.source 162 | 163 | errors = [] 164 | changed_files = get_relevant_files() 165 | 166 | if not changed_files: 167 | return [] 168 | 169 | all_files = conf.get_files_to_check() 170 | all_files = [file for file in all_files if file[0] in changed_files] 171 | try: 172 | for filename, checked_codes, ignore_decorators in all_files: 173 | errors.extend(check((filename,), select=checked_codes, ignore_decorators=ignore_decorators)) 174 | except IllegalConfiguration as error: 175 | # An illegal configuration file was found during file generation. 176 | log.error(error.args[0]) 177 | return ReturnCode.invalid_options 178 | 179 | count = 0 180 | errors_final = [] 181 | for err in errors: 182 | if hasattr(err, 'code') and not any(ignore in str(err) for ignore in IGNORES): 183 | sys.stdout.write('%s\n' % err) 184 | errors_final.append(err) 185 | count += 1 186 | return errors_final 187 | -------------------------------------------------------------------------------- /tools/helper.py: -------------------------------------------------------------------------------- 1 | """Helper functions.""" 2 | # pylint: disable = ungrouped-imports, too-few-public-methods 3 | 4 | import datetime 5 | import logging 6 | 7 | import multiprocessing 8 | from multiprocessing.pool import ThreadPool 9 | import os 10 | 11 | import pickle 12 | import sys 13 | import traceback 14 | from collections import Iterable # pylint: disable=no-name-in-module 15 | from configparser import ConfigParser, ExtendedInterpolation 16 | from logging import handlers 17 | 18 | import pandas as pd 19 | 20 | CONFIG_FILENAME = 'config.ini' 21 | log = logging.getLogger(__name__) 22 | COMPUTER_NAME = os.getenv('COMPUTERNAME') 23 | 24 | 25 | class Singleton(type): 26 | """ 27 | Singleton Metaclass. 28 | 29 | Objects are only instantiated once and saved in the _instances dict if a class references 30 | to this metaclass. 31 | 32 | """ 33 | 34 | _instances = {} 35 | 36 | def __call__(cls, *args, **kwargs): # called at instantiation of an object that uses this metaclass 37 | """Is called at instantiation of a class that refers to this metaclass.""" 38 | if cls not in cls._instances: 39 | cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) 40 | return cls._instances[cls] 41 | 42 | @staticmethod 43 | def delete(class_name): 44 | """Delete an instancee of a singleton class.""" 45 | if class_name in class_name._instances: # pylint: disable=protected-access 46 | del class_name._instances[class_name] # pylint: disable=protected-access 47 | 48 | 49 | class CustomConfigParser(metaclass=Singleton): 50 | """ 51 | Singleton class that wraps the ConfigParser to make sure it's only loaded once. 52 | 53 | The first time a config filename override will be considered. After that 54 | the parameter is irrelevant as the same config object will be returned. 55 | 56 | """ 57 | 58 | def __init__(self, config_override_filename=None): 59 | """Load the configuration (usually config.ini).""" 60 | if config_override_filename and not os.path.isfile(config_override_filename): 61 | raise ValueError("Unable to find config file {}".format(config_override_filename)) 62 | 63 | main_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.ini') 64 | 65 | self.config = ConfigParser(interpolation=ExtendedInterpolation()) 66 | self.config.optionxform = str # enforce case sensitivity on key 67 | 68 | if config_override_filename: # custom config 69 | self.config.read([main_file, config_override_filename]) 70 | else: # no custom file 71 | self.config.read(main_file) 72 | 73 | 74 | def get_config(config_override_filename=None): 75 | """Public accessor for config file.""" 76 | config = CustomConfigParser(config_override_filename) 77 | return config.config 78 | 79 | 80 | def init_logger(screenlevel, filename=None, logdir=None, modulename=''): 81 | """ 82 | Initialize Logger. 83 | 84 | Args: 85 | screenlevel (logging): logging.INFO or logging.DEBUG 86 | filename (str): filename (without .log) 87 | logdir (str): directory name for log 88 | modulename (str): project name default 89 | 90 | """ 91 | # for all other modules just use log = logging.getLogger(__name__) 92 | if not logdir: 93 | logdir = get_dir('log') 94 | 95 | root = logging.getLogger() 96 | [root.removeHandler(rh) for rh in root.handlers] # pylint: disable=W0106 97 | [root.removeFilter(rf) for rf in root.filters] # pylint: disable=W0106 98 | 99 | root = logging.getLogger('') 100 | root.setLevel(logging.WARNING) 101 | 102 | stream_handler = logging.StreamHandler(sys.stdout) 103 | stream_handler.setLevel(screenlevel) 104 | if filename and not filename == 'None': 105 | filename = filename.replace("{date}", datetime.date.today().strftime("%Y%m%d")) 106 | all_logs_filename = os.path.join(logdir, filename + '.log') 107 | error_filename = os.path.join(logdir, filename + '_errors.log') 108 | info_filename = os.path.join(logdir, filename + '_info.log') 109 | 110 | print("Saving log file to: {}".format(all_logs_filename)) 111 | print("Saving info file to: {}".format(info_filename)) 112 | print("Saving error only file to: {}".format(error_filename)) 113 | 114 | file_handler2 = handlers.RotatingFileHandler(all_logs_filename, maxBytes=300000, backupCount=20) 115 | file_handler2.setLevel(logging.DEBUG) 116 | 117 | error_handler = handlers.RotatingFileHandler(error_filename, maxBytes=300000, backupCount=20) 118 | error_handler.setLevel(logging.WARNING) 119 | 120 | info_handler = handlers.RotatingFileHandler(info_filename, maxBytes=30000000, backupCount=100) 121 | info_handler.setLevel(logging.INFO) 122 | 123 | # formatter when using --log command line and writing log to a file 124 | file_handler2.setFormatter( 125 | logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)d - %(message)s')) 126 | error_handler.setFormatter( 127 | logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)d - %(message)s')) 128 | info_handler.setFormatter( 129 | logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)d - %(message)s')) 130 | 131 | # root.addHandler(fh) 132 | root.addHandler(file_handler2) 133 | root.addHandler(error_handler) 134 | root.addHandler(info_handler) 135 | 136 | # screen output formatter 137 | stream_handler.setFormatter( 138 | logging.Formatter('%(levelname)s - %(message)s')) 139 | root.addHandler(stream_handler) 140 | 141 | mainlogger = logging.getLogger(modulename) 142 | mainlogger.setLevel(logging.DEBUG) 143 | 144 | # pd.set_option('display.height', 1000) # pd.set_option('display.max_rows', 500) # pd.set_option('display.max_columns', 500) # pd.set_option('display.width', 1000) 145 | 146 | 147 | def get_dir(*paths): 148 | """ 149 | Retrieve path (for subpath use multiple arguments). 150 | 151 | 1. path from config file under Files section (relative to staging folder), or 152 | 2. 'codebase' for codebase base directory, or 153 | 3. if neither of the above, custom directory relative to codebase 154 | 155 | """ 156 | codebase = os.path.abspath(os.path.dirname(os.path.dirname(os.path.relpath(__file__)))) 157 | if paths[0] == 'codebase': # pylint: disable=no-else-return 158 | return codebase 159 | else: 160 | # check if entry in config.ini 161 | try: 162 | config = get_config() 163 | specified_path = config.get("Files", paths[0]) 164 | if len(paths) > 1: 165 | specified_path = os.path.join(specified_path, *paths[1:]) 166 | thirdparty_dir = config.get('Thirdparty', 'thirdparty_dir') 167 | full_path = os.path.abspath(os.path.join(codebase, thirdparty_dir, specified_path)) 168 | return full_path 169 | except: # pylint: disable=bare-except 170 | # otherwise just return absolute path in codebase 171 | return os.path.abspath(os.path.join(codebase, *paths)) # if path has multiple entries 172 | 173 | 174 | def exception_hook(*exc_info): 175 | """Catches all unhandled exceptions.""" 176 | # Print the error and traceback 177 | print("--- exception hook ----") 178 | text = "".join(traceback.format_exception(*exc_info)) # pylint: disable=E1120 179 | log.error("Unhandled exception: %s", text) 180 | 181 | 182 | def flatten(items): 183 | """Yield items from any nested iterable; see Reference.""" 184 | for x in items: 185 | if isinstance(x, Iterable) and not isinstance(x, (str, bytes)): 186 | for sub_x in flatten(x): 187 | yield sub_x 188 | else: 189 | yield x 190 | 191 | 192 | def get_multiprocessing_config(): 193 | """ 194 | Load multiprocessing configuration from config and read amount of cores. 195 | 196 | Maximum number of cores that are used is max(1, min(cores, num_cpus - 1)) 197 | 198 | Returns: 199 | parallel (boolean): if multiprocessing is True or False 200 | cores (int): Amount of cores to use 201 | 202 | """ 203 | config = get_config() 204 | parallel = config.getboolean('MultiThreading', 'parallel') 205 | cores = config.getint('MultiThreading', 'cores') 206 | num_cpus = multiprocessing.cpu_count() 207 | cores = max(1, min(cores, num_cpus - 1)) 208 | return parallel, cores 209 | 210 | 211 | def multi_threading(pool_fn, pool_args, disable_multiprocessing=False, dataframe_mode=False): 212 | """ 213 | Wrap multi threading for external c++ calls. 214 | 215 | Args: 216 | pool_fn: any partial function that takes a single argument. For multi argument functions reduce it with partial 217 | to a single argument. The first argument needs to be the list over which the pool can iterate. 218 | pool_args (list): list of any type that is passed into the pool.map or map. 219 | disable_multiprocessing (bool): if set to True, multiprocessing will not be applied, regardless of config.ini entry. 220 | dataframe_mode (bool): set to true to use starmap, so pd.concat can be used on results, 221 | if set to false, the result will be a list of list. 222 | 223 | Returns: 224 | res (list): Result of multiprocessing. Len of results will match len of the list of the pool_args 225 | 226 | """ 227 | 228 | parallel, cores = get_multiprocessing_config() 229 | log.debug("Start with parallel={} and cores={}, queue size={}".format(parallel, cores, len(pool_args))) 230 | if parallel and not disable_multiprocessing: 231 | threadpool = ThreadPool(cores) 232 | if dataframe_mode: 233 | res = threadpool.starmap(pool_fn, pool_args) 234 | else: 235 | res = threadpool.map(pool_fn, pool_args) 236 | else: 237 | res = [pool_fn(x) for x in pool_args] 238 | assert len(res) == len(pool_args) 239 | log.debug("Completed.") 240 | return res 241 | 242 | 243 | def memory_cache(func): 244 | """Memoisation decorator for functions taking one or more arguments.""" 245 | 246 | class Memoise: # pylint: disable=too-few-public-methods 247 | """A memoise class class.""" 248 | 249 | cache = {} 250 | 251 | def __init__(self, func_): 252 | self.func = func_ 253 | 254 | def __call__(self, *args, **kwargs): 255 | """Call to function with cached decorator.""" 256 | try: 257 | args_tuple = _keys_to_tuple(args, kwargs) 258 | try: 259 | res = self.cache[self.func.__name__, args_tuple] 260 | log.debug("+++ Using memory cacheed item for {} function +++ ".format(self.func.__name__)) 261 | return res 262 | except KeyError: 263 | log.debug("--- Caching item for {} function in memory ---".format(self.func.__name__)) 264 | self.cache[self.func.__name__, args_tuple] = res = self.func(*args, **kwargs) 265 | return res 266 | except Exception as err: # pylint: disable=broad-except 267 | raise RuntimeError("Error calling cached function {} ".format(self.func.__name__), err) 268 | 269 | return Memoise(func) 270 | 271 | 272 | def _keys_to_tuple(args, kwargs): 273 | """Ensure everything is hashable.""" 274 | compiled_args = [] 275 | for arg in args: 276 | if isinstance(arg, (pd.DataFrame, dict)): 277 | compiled_args.append(pickle.dumps(arg)) 278 | elif isinstance(arg, list): 279 | compiled_args.append(tuple(arg)) 280 | else: 281 | compiled_args.append(arg) 282 | 283 | for k, v in sorted(kwargs.items()): 284 | compiled_args.append(k) 285 | compiled_args.append(v) 286 | return tuple(compiled_args) 287 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Add files or directories to the blacklist. They should be base names, not 9 | # paths. 10 | ignore=CVS 11 | 12 | # Add files or directories matching the regex patterns to the blacklist. The 13 | # regex matches against base names, not paths. 14 | ignore-patterns= 15 | 16 | # Python code to execute, usually for sys.path manipulation such as 17 | # pygtk.require(). 18 | #init-hook= 19 | 20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 21 | # number of processors available to use. 22 | jobs=1 23 | 24 | # Control the amount of potential inferred values when inferring a single 25 | # object. This can help the performance when dealing with large functions or 26 | # complex, nested conditions. 27 | limit-inference-results=100 28 | 29 | # List of plugins (as comma separated values of python modules names) to load, 30 | # usually to register additional checkers. 31 | load-plugins= 32 | 33 | # Pickle collected data for later comparisons. 34 | persistent=yes 35 | 36 | # Specify a configuration file. 37 | #rcfile= 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence= 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | disable=print-statement, 64 | parameter-unpacking, 65 | unpacking-in-except, 66 | old-raise-syntax, 67 | backtick, 68 | long-suffix, 69 | old-ne-operator, 70 | old-octal-literal, 71 | import-star-module-level, 72 | non-ascii-bytes-literal, 73 | raw-checker-failed, 74 | bad-inline-option, 75 | locally-disabled, 76 | file-ignored, 77 | suppressed-message, 78 | useless-suppression, 79 | deprecated-pragma, 80 | use-symbolic-message-instead, 81 | apply-builtin, 82 | basestring-builtin, 83 | buffer-builtin, 84 | cmp-builtin, 85 | coerce-builtin, 86 | execfile-builtin, 87 | file-builtin, 88 | long-builtin, 89 | raw_input-builtin, 90 | reduce-builtin, 91 | standarderror-builtin, 92 | unicode-builtin, 93 | xrange-builtin, 94 | coerce-method, 95 | delslice-method, 96 | getslice-method, 97 | setslice-method, 98 | no-absolute-import, 99 | old-division, 100 | dict-iter-method, 101 | dict-view-method, 102 | next-method-called, 103 | metaclass-assignment, 104 | indexing-exception, 105 | raising-string, 106 | reload-builtin, 107 | oct-method, 108 | hex-method, 109 | nonzero-method, 110 | cmp-method, 111 | input-builtin, 112 | round-builtin, 113 | intern-builtin, 114 | unichr-builtin, 115 | map-builtin-not-iterating, 116 | zip-builtin-not-iterating, 117 | range-builtin-not-iterating, 118 | filter-builtin-not-iterating, 119 | using-cmp-argument, 120 | eq-without-hash, 121 | div-method, 122 | idiv-method, 123 | rdiv-method, 124 | exception-message-attribute, 125 | invalid-str-codec, 126 | sys-max-int, 127 | bad-python3-import, 128 | deprecated-string-function, 129 | deprecated-str-translate-call, 130 | deprecated-itertools-function, 131 | deprecated-types-field, 132 | next-method-defined, 133 | dict-items-not-iterating, 134 | dict-keys-not-iterating, 135 | dict-values-not-iterating, 136 | deprecated-operator-function, 137 | deprecated-urllib-function, 138 | xreadlines-attribute, 139 | deprecated-sys-function, 140 | exception-escape, 141 | comprehension-escape, 142 | line-too-long, 143 | logging-format-interpolation, 144 | logging-fstring-interpolation, 145 | logging-not-lazy, 146 | R0801, 147 | W0511 148 | 149 | # Enable the message, report, category or checker with the given id(s). You can 150 | # either give multiple identifier separated by comma (,) or put this option 151 | # multiple time (only on the command line, not in the configuration file where 152 | # it should appear only once). See also the "--disable" option for examples. 153 | enable=c-extension-no-member 154 | 155 | 156 | [REPORTS] 157 | 158 | # Python expression which should return a note less than 10 (10 is the highest 159 | # note). You have access to the variables errors warning, statement which 160 | # respectively contain the number of errors / warnings messages and the total 161 | # number of statements analyzed. This is used by the global evaluation report 162 | # (RP0004). 163 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 164 | 165 | # Template used to display messages. This is a python new-style format string 166 | # used to format the message information. See doc for all details. 167 | #msg-template= 168 | 169 | # Set the output format. Available formats are text, parseable, colorized, json 170 | # and msvs (visual studio). You can also give a reporter class, e.g. 171 | # mypackage.mymodule.MyReporterClass. 172 | output-format=text 173 | 174 | # Tells whether to display a full report or only the messages. 175 | reports=no 176 | 177 | # Activate the evaluation score. 178 | score=yes 179 | 180 | 181 | [REFACTORING] 182 | 183 | # Maximum number of nested blocks for function / method body 184 | max-nested-blocks=5 185 | 186 | # Complete name of functions that never returns. When checking for 187 | # inconsistent-return-statements if a never returning function is called then 188 | # it will be considered as an explicit return statement and no message will be 189 | # printed. 190 | never-returning-functions=sys.exit 191 | 192 | 193 | [BASIC] 194 | 195 | # Naming style matching correct argument names. 196 | argument-naming-style=snake_case 197 | 198 | # Regular expression matching correct argument names. Overrides argument- 199 | # naming-style. 200 | #argument-rgx= 201 | 202 | # Naming style matching correct attribute names. 203 | attr-naming-style=snake_case 204 | 205 | # Regular expression matching correct attribute names. Overrides attr-naming- 206 | # style. 207 | #attr-rgx= 208 | 209 | # Bad variable names which should always be refused, separated by a comma. 210 | bad-names=foo, 211 | bar, 212 | baz, 213 | toto, 214 | tutu, 215 | tata 216 | 217 | # Naming style matching correct class attribute names. 218 | class-attribute-naming-style=any 219 | 220 | # Regular expression matching correct class attribute names. Overrides class- 221 | # attribute-naming-style. 222 | #class-attribute-rgx= 223 | 224 | # Naming style matching correct class names. 225 | class-naming-style=PascalCase 226 | 227 | # Regular expression matching correct class names. Overrides class-naming- 228 | # style. 229 | #class-rgx= 230 | 231 | # Naming style matching correct constant names. 232 | const-naming-style=UPPER_CASE 233 | 234 | # Regular expression matching correct constant names. Overrides const-naming- 235 | # style. 236 | #const-rgx= 237 | 238 | # Minimum line length for functions/classes that require docstrings, shorter 239 | # ones are exempt. 240 | docstring-min-length=-1 241 | 242 | # Naming style matching correct function names. 243 | function-naming-style=snake_case 244 | 245 | # Regular expression matching correct function names. Overrides function- 246 | # naming-style. 247 | #function-rgx= 248 | 249 | # Good variable names which should always be accepted, separated by a comma. 250 | good-names=i, 251 | j, 252 | k, 253 | ex, 254 | Run, 255 | _, 256 | k, 257 | v, 258 | x, 259 | y, 260 | 261 | # Include a hint for the correct naming format with invalid-name. 262 | include-naming-hint=no 263 | 264 | # Naming style matching correct inline iteration names. 265 | inlinevar-naming-style=any 266 | 267 | # Regular expression matching correct inline iteration names. Overrides 268 | # inlinevar-naming-style. 269 | #inlinevar-rgx= 270 | 271 | # Naming style matching correct method names. 272 | method-naming-style=snake_case 273 | 274 | # Regular expression matching correct method names. Overrides method-naming- 275 | # style. 276 | #method-rgx= 277 | 278 | # Naming style matching correct module names. 279 | module-naming-style=snake_case 280 | 281 | # Regular expression matching correct module names. Overrides module-naming- 282 | # style. 283 | #module-rgx= 284 | 285 | # Colon-delimited sets of names that determine each other's naming style when 286 | # the name regexes allow several styles. 287 | name-group= 288 | 289 | # Regular expression which should only match function or class names that do 290 | # not require a docstring. 291 | no-docstring-rgx=^_ 292 | 293 | # List of decorators that produce properties, such as abc.abstractproperty. Add 294 | # to this list to register other decorators that produce valid properties. 295 | # These decorators are taken in consideration only for invalid-name. 296 | property-classes=abc.abstractproperty 297 | 298 | # Naming style matching correct variable names. 299 | variable-naming-style=snake_case 300 | 301 | # Regular expression matching correct variable names. Overrides variable- 302 | # naming-style. 303 | #variable-rgx= 304 | 305 | 306 | [FORMAT] 307 | 308 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 309 | expected-line-ending-format= 310 | 311 | # Regexp for a line that is allowed to be longer than the limit. 312 | ignore-long-lines=^\s*(# )??$ 313 | 314 | # Number of spaces of indent required inside a hanging or continued line. 315 | indent-after-paren=4 316 | 317 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 318 | # tab). 319 | indent-string=' ' 320 | 321 | # Maximum number of characters on a single line. 322 | max-line-length=100 323 | 324 | # Maximum number of lines in a module. 325 | max-module-lines=1000 326 | 327 | # List of optional constructs for which whitespace checking is disabled. `dict- 328 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 329 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 330 | # `empty-line` allows space-only lines. 331 | no-space-check=trailing-comma, 332 | dict-separator 333 | 334 | # Allow the body of a class to be on the same line as the declaration if body 335 | # contains single statement. 336 | single-line-class-stmt=no 337 | 338 | # Allow the body of an if to be on the same line as the test if there is no 339 | # else. 340 | single-line-if-stmt=no 341 | 342 | 343 | [LOGGING] 344 | 345 | # Format style used to check logging format string. `old` means using % 346 | # formatting, while `new` is for `{}` formatting. 347 | logging-format-style=old 348 | 349 | # Logging modules to check that the string format arguments are in logging 350 | # function parameter format. 351 | logging-modules=logging 352 | 353 | 354 | [MISCELLANEOUS] 355 | 356 | # List of note tags to take in consideration, separated by a comma. 357 | notes=FIXME, 358 | XXX, 359 | TODO, 360 | todo 361 | 362 | 363 | [SIMILARITIES] 364 | 365 | # Ignore comments when computing similarities. 366 | ignore-comments=yes 367 | 368 | # Ignore docstrings when computing similarities. 369 | ignore-docstrings=yes 370 | 371 | # Ignore imports when computing similarities. 372 | ignore-imports=no 373 | 374 | # Minimum lines number of a similarity. 375 | min-similarity-lines=10 376 | 377 | 378 | [SPELLING] 379 | 380 | # Limits count of emitted suggestions for spelling mistakes. 381 | max-spelling-suggestions=4 382 | 383 | # Spelling dictionary name. Available dictionaries: none. To make it working 384 | # install python-enchant package.. 385 | spelling-dict= 386 | 387 | # List of comma separated words that should not be checked. 388 | spelling-ignore-words= 389 | 390 | # A path to a file that contains private dictionary; one word per line. 391 | spelling-private-dict-file= 392 | 393 | # Tells whether to store unknown words to indicated private dictionary in 394 | # --spelling-private-dict-file option instead of raising a message. 395 | spelling-store-unknown-words=no 396 | 397 | 398 | [STRING] 399 | 400 | # This flag controls whether the implicit-str-concat-in-sequence should 401 | # generate a warning on implicit string concatenation in sequences defined over 402 | # several lines. 403 | check-str-concat-over-line-jumps=no 404 | 405 | 406 | [TYPECHECK] 407 | 408 | # List of decorators that produce context managers, such as 409 | # contextlib.contextmanager. Add to this list to register other decorators that 410 | # produce valid context managers. 411 | contextmanager-decorators=contextlib.contextmanager 412 | 413 | # List of members which are set dynamically and missed by pylint inference 414 | # system, and so shouldn't trigger E1101 when accessed. Python regular 415 | # expressions are accepted. 416 | generated-members= 417 | 418 | # Tells whether missing members accessed in mixin class should be ignored. A 419 | # mixin class is detected if its name ends with "mixin" (case insensitive). 420 | ignore-mixin-members=yes 421 | 422 | # Tells whether to warn about missing members when the owner of the attribute 423 | # is inferred to be None. 424 | ignore-none=yes 425 | 426 | # This flag controls whether pylint should warn about no-member and similar 427 | # checks whenever an opaque object is returned when inferring. The inference 428 | # can return multiple potential results while evaluating a Python object, but 429 | # some branches might not be evaluated, which results in partial inference. In 430 | # that case, it might be useful to still emit no-member and other checks for 431 | # the rest of the inferred objects. 432 | ignore-on-opaque-inference=yes 433 | 434 | # List of class names for which member attributes should not be checked (useful 435 | # for classes with dynamically set attributes). This supports the use of 436 | # qualified names. 437 | ignored-classes=optparse.Values,thread._local,_thread._local 438 | 439 | # List of module names for which member attributes should not be checked 440 | # (useful for modules/projects where namespaces are manipulated during runtime 441 | # and thus existing member attributes cannot be deduced by static analysis. It 442 | # supports qualified module names, as well as Unix pattern matching. 443 | ignored-modules= 444 | 445 | # Show a hint with possible names when a member name was not found. The aspect 446 | # of finding the hint is based on edit distance. 447 | missing-member-hint=yes 448 | 449 | # The minimum edit distance a name should have in order to be considered a 450 | # similar match for a missing member name. 451 | missing-member-hint-distance=1 452 | 453 | # The total number of similar names that should be taken in consideration when 454 | # showing a hint for a missing member. 455 | missing-member-max-choices=1 456 | 457 | 458 | [VARIABLES] 459 | 460 | # List of additional names supposed to be defined in builtins. Remember that 461 | # you should avoid defining new builtins when possible. 462 | additional-builtins= 463 | 464 | # Tells whether unused global variables should be treated as a violation. 465 | allow-global-unused-variables=yes 466 | 467 | # List of strings which can identify a callback function by name. A callback 468 | # name must start or end with one of those strings. 469 | callbacks=cb_, 470 | _cb 471 | 472 | # A regular expression matching the name of dummy variables (i.e. expected to 473 | # not be used). 474 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 475 | 476 | # Argument names that match this expression will be ignored. Default to name 477 | # with leading underscore. 478 | ignored-argument-names=_.*|^ignored_|^unused_ 479 | 480 | # Tells whether we should check for unused import in __init__ files. 481 | init-import=no 482 | 483 | # List of qualified module names which can have objects that can redefine 484 | # builtins. 485 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 486 | 487 | 488 | [CLASSES] 489 | 490 | # List of method names used to declare (i.e. assign) instance attributes. 491 | defining-attr-methods=__init__, 492 | __new__, 493 | setUp 494 | 495 | # List of member names, which should be excluded from the protected access 496 | # warning. 497 | exclude-protected=_asdict, 498 | _fields, 499 | _replace, 500 | _source, 501 | _make 502 | 503 | # List of valid names for the first argument in a class method. 504 | valid-classmethod-first-arg=cls 505 | 506 | # List of valid names for the first argument in a metaclass class method. 507 | valid-metaclass-classmethod-first-arg=cls 508 | 509 | 510 | [DESIGN] 511 | 512 | # Maximum number of arguments for function / method. 513 | max-args=10 514 | 515 | # Maximum number of attributes for a class (see R0902). 516 | max-attributes=50 517 | 518 | # Maximum number of boolean expressions in an if statement. 519 | max-bool-expr=5 520 | 521 | # Maximum number of branch for function / method body. 522 | max-branches=30 523 | 524 | # Maximum number of locals for function / method body. 525 | max-locals=30 526 | 527 | # Maximum number of parents for a class (see R0901). 528 | max-parents=5 529 | 530 | # Maximum number of public methods for a class (see R0904). 531 | max-public-methods=30 532 | 533 | # Maximum number of return / yield for function / method body. 534 | max-returns=20 535 | 536 | # Maximum number of statements in function / method body. 537 | max-statements=100 538 | 539 | # Minimum number of public methods for a class (see R0903). 540 | min-public-methods=0 541 | 542 | 543 | [IMPORTS] 544 | 545 | # Allow wildcard imports from modules that define __all__. 546 | allow-wildcard-with-all=no 547 | 548 | # Analyse import fallback blocks. This can be used to support both Python 2 and 549 | # 3 compatible code, which means that the block might have code that exists 550 | # only in one or another interpreter, leading to false positives when analysed. 551 | analyse-fallback-blocks=no 552 | 553 | # Deprecated modules which should not be used, separated by a comma. 554 | deprecated-modules=optparse,tkinter.tix 555 | 556 | # Create a graph of external dependencies in the given file (report RP0402 must 557 | # not be disabled). 558 | ext-import-graph= 559 | 560 | # Create a graph of every (i.e. internal and external) dependencies in the 561 | # given file (report RP0402 must not be disabled). 562 | import-graph= 563 | 564 | # Create a graph of internal dependencies in the given file (report RP0402 must 565 | # not be disabled). 566 | int-import-graph= 567 | 568 | # Force import order to recognize a module as part of the standard 569 | # compatibility libraries. 570 | known-standard-library= 571 | 572 | # Force import order to recognize a module as part of a third party library. 573 | known-third-party=enchant 574 | 575 | 576 | [EXCEPTIONS] 577 | 578 | # Exceptions that will emit a warning when being caught. Defaults to 579 | # "BaseException, Exception". 580 | overgeneral-exceptions=BaseException, 581 | Exception 582 | --------------------------------------------------------------------------------