├── HSM ├── model │ ├── __init__.py │ ├── best_estimators │ │ └── model_sw.pkl │ ├── predict.py │ └── train.py ├── tests │ ├── __init__.py │ ├── test_db.py │ └── test_api.py ├── utils │ ├── __init__.py │ ├── validate.py │ ├── db.py │ ├── db_utils.py │ ├── config.py │ └── qualtrics.py ├── requirements-dev.txt ├── requirements.txt ├── load_data.py ├── api.py ├── app.py └── README.MD ├── Procfile ├── runtime.txt ├── manifest.yml ├── bin └── run.sh ├── .flake8 ├── docker-entrypoint.sh ├── launcher ├── scripts │ ├── run_hsm.sh │ ├── launch_server.sh │ └── launch_hsm.sh ├── start_hsm.command └── update_qualtrics.command ├── ISSUE_TEMPLATE.md ├── .github └── dependabot.yml ├── pull_request_template.md ├── Dockerfile ├── docker-compose-cloud.yml ├── docker-compose.yml ├── CONTRIBUTING.md ├── .circleci └── config.yml ├── LICENSE.md ├── Pipfile ├── .cfignore ├── .gitignore ├── sample.env ├── README.md └── Pipfile.lock /HSM/model/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /HSM/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /HSM/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: bin/run.sh -------------------------------------------------------------------------------- /runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.6.x -------------------------------------------------------------------------------- /manifest.yml: -------------------------------------------------------------------------------- 1 | --- 2 | applications: 3 | - name: mlaas -------------------------------------------------------------------------------- /HSM/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | flake8==3.7.7 -------------------------------------------------------------------------------- /bin/run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ROOT=$HOME/HSM 3 | 4 | cd $ROOT 5 | gunicorn api:app -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length=120 3 | exclude=.venv, venv, /*migrations*/, *manage.py* -------------------------------------------------------------------------------- /HSM/model/best_estimators/model_sw.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/18F/10x-MLaaS/master/HSM/model/best_estimators/model_sw.pkl -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ROOT=/home/hsm/HSM 3 | 4 | cd $ROOT 5 | exec gunicorn -b :$HOST_PORT --reload --access-logfile - --error-logfile - api:app -------------------------------------------------------------------------------- /launcher/scripts/run_hsm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | docker exec --user hsm --workdir /home/hsm -it ${CONTAINER_NAME_WEB} "/bin/bash" -c "python ~/HSM/app.py" 4 | 5 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## 10x Qualitative Data User story 2 | As a \, I want \ so that \. 3 | 4 | ## Acceptance criteria 5 | - [ ] 6 | - [ ] 7 | - [ ] 8 | -------------------------------------------------------------------------------- /launcher/scripts/launch_server.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | working_dir=$(dirname "$0") 4 | cd $working_dir 5 | cd .. 6 | cd .. 7 | 8 | docker-compose up --build 9 | 10 | -------------------------------------------------------------------------------- /launcher/start_hsm.command: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | script_name=launch_server.sh 4 | 5 | working_dir=$(dirname "$0") 6 | script_dir=$working_dir/scripts 7 | 8 | echo script_dir: $script_dir 9 | 10 | open -a Terminal.app $script_dir/$script_name 11 | -------------------------------------------------------------------------------- /launcher/update_qualtrics.command: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | script_name=launch_hsm.sh 4 | 5 | working_dir=$(dirname "$0") 6 | script_dir=$working_dir/scripts 7 | 8 | echo script_dir: $script_dir 9 | 10 | open -a Terminal.app $script_dir/$script_name 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: docker 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "09:00" 8 | open-pull-requests-limit: 10 9 | ignore: 10 | - dependency-name: python 11 | versions: 12 | - 3.9.1 13 | - 3.9.2 14 | - 3.9.3 15 | -------------------------------------------------------------------------------- /pull_request_template.md: -------------------------------------------------------------------------------- 1 | Short description explaining the high-level reason for the pull request 2 | 3 | ## Reference 4 | 5 | - Reference to a related issue 6 | 7 | ## Additions 8 | 9 | - 10 | 11 | ## Removals 12 | 13 | - 14 | 15 | ## Changes 16 | 17 | - 18 | 19 | ## Testing 20 | 21 | - 22 | 23 | ## Review 24 | 25 | - @user 26 | 27 | ## Notes 28 | 29 | - 30 | 31 | ## Todos 32 | 33 | - 34 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6 2 | 3 | RUN apt-get update && apt-get install -y postgresql-client 4 | 5 | COPY Pipfile Pipfile 6 | COPY Pipfile.lock Pipfile.lock 7 | 8 | RUN pip install pipenv 9 | RUN pipenv install --system --dev 10 | 11 | COPY docker-entrypoint.sh ./ 12 | RUN chmod +x docker-entrypoint.sh 13 | 14 | # Add hsm user 15 | RUN useradd hsm && echo "hsm:hsm" | chpasswd && adduser hsm sudo 16 | 17 | EXPOSE 8080 18 | -------------------------------------------------------------------------------- /docker-compose-cloud.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | web: 4 | build: . 5 | tty: true 6 | container_name: ${CONTAINER_NAME_WEB} 7 | restart: always 8 | ports: 9 | - "${HOST_PORT}:${HOST_PORT}" 10 | env_file: 11 | - ./.env 12 | volumes: 13 | - .:/home/hsm 14 | network_mode: "host" 15 | entrypoint: 16 | - ./docker-entrypoint.sh 17 | -------------------------------------------------------------------------------- /launcher/scripts/launch_hsm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | script_dir=$(dirname "$0") 4 | echo script_dir: $script_dir 5 | cd $script_dir 6 | cd .. 7 | cd .. 8 | 9 | working_dir=$(dirname "$0") 10 | echo working_dir: $( pwd ) 11 | 12 | #pipenv shell 13 | pipenv run $script_dir/run_hsm.sh 14 | #pipenv run echo "name: ${CONTAINER_NAME_WEB}" 15 | #echo "name: ${CONTAINER_NAME_WEB}" 16 | # pipenv run docker exec --user hsm --workdir /home/hsm -it ${CONTAINER_NAME_WEB} "/bin/bash" -c "python ~/HSM/app.py" 17 | -------------------------------------------------------------------------------- /HSM/requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.6.3 2 | certifi==2018.10.15 3 | chardet==3.0.4 4 | contractions==0.0.17 5 | dill==0.2.8.2 6 | et-xmlfile==1.0.1 7 | idna==2.7 8 | imbalanced-learn==0.3.3 9 | jdcal==1.4 10 | nltk==3.4.5 11 | numpy==1.16.3 12 | openpyxl==2.5.9 13 | pandas==0.23.4 14 | psycopg2==2.7.5 15 | python-dateutil==2.7.5 16 | pytz==2018.7 17 | requests==2.20.0 18 | scikit-learn==0.24.2 19 | scipy==1.1.0 20 | six==1.11.0 21 | SQLAlchemy==1.3.5 22 | SQLAlchemy-Utils==0.33.6 23 | urllib3==1.26.5 24 | xlrd==1.1.0 25 | -------------------------------------------------------------------------------- /HSM/utils/validate.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | class Validate(): 5 | 6 | def __init__(self, results_path): 7 | self.results_path = results_path 8 | 9 | def get_validations(self): 10 | ''' 11 | Returns a mapping of responseIds to user-validated spam/ham codes 12 | ''' 13 | validated_df = pd.read_excel(self.results_path) 14 | validated_id_pred_map = dict(zip(validated_df['ResponseID'], 15 | validated_df['SPAM'])) 16 | return validated_id_pred_map 17 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | web: 4 | build: . 5 | tty: true 6 | container_name: ${CONTAINER_NAME_WEB} 7 | restart: always 8 | ports: 9 | - "${HOST_PORT}:${HOST_PORT}" 10 | environment: 11 | - DB_NAME=${DB_NAME} 12 | - DB_ADDR=${DB_ADDR} 13 | env_file: 14 | - ./.env 15 | volumes: 16 | - .:/home/hsm 17 | depends_on: 18 | - db 19 | networks: 20 | - db_nw 21 | - web_nw 22 | entrypoint: 23 | - ./docker-entrypoint.sh 24 | db: 25 | image: "postgres:9.6.5" 26 | container_name: ${CONTAINER_NAME_DB} 27 | environment: 28 | - POSTGRES_USER=${DB_USER} 29 | - POSTGRES_PASSWORD=${DB_PASS} 30 | - POSTGRES_DB=${DB_NAME} 31 | - POSTGRES_PORT=5432 32 | networks: 33 | - db_nw 34 | restart: always 35 | networks: 36 | db_nw: 37 | driver: bridge 38 | web_nw: 39 | driver: bridge 40 | -------------------------------------------------------------------------------- /HSM/tests/test_db.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from sqlalchemy import create_engine 3 | from sqlalchemy.orm import sessionmaker 4 | # from utils.config import SQLALCHEMY_URI 5 | 6 | 7 | # global application scope. create Session class, engine 8 | Session = sessionmaker() 9 | 10 | engine = create_engine("sqlite://") 11 | 12 | 13 | class SomeTest(unittest.TestCase): 14 | 15 | def setUp(self): 16 | # connect to the database 17 | self.connection = engine.connect() 18 | 19 | # begin a non-ORM transaction 20 | self.trans = self.connection.begin() 21 | 22 | # bind an individual Session to the connection 23 | self.session = Session(bind=self.connection) 24 | 25 | def test_something(self): 26 | # use the session in tests. 27 | 28 | # self.session.add(Foo()) 29 | self.session.commit() 30 | 31 | def tearDown(self): 32 | self.session.close() 33 | 34 | # rollback - everything that happened with the 35 | # Session above (including calls to commit()) 36 | # is rolled back. 37 | self.trans.rollback() 38 | 39 | # return connection to the Engine 40 | self.connection.close() 41 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Welcome! 2 | 3 | We're so glad you're thinking about contributing to an 18F open source project! If you're unsure about anything, just ask -- or submit the issue or pull request anyway. The worst that can happen is you'll be politely asked to change something. We love all friendly contributions. 4 | 5 | We want to ensure a welcoming environment for all of our projects. Our staff follow the [18F Code of Conduct](https://github.com/18F/code-of-conduct/blob/master/code-of-conduct.md) and all contributors should do the same. 6 | 7 | We encourage you to read this project's CONTRIBUTING policy (you are here), its [LICENSE](LICENSE.md), and its [README](README.md). 8 | 9 | If you have any questions or want to read more, check out the [18F Open Source Policy GitHub repository](https://github.com/18f/open-source-policy), or just [shoot us an email](mailto:18f@gsa.gov). 10 | 11 | ## Public domain 12 | 13 | This project is in the public domain within the United States, and 14 | copyright and related rights in the work worldwide are waived through 15 | the [CC0 1.0 Universal public domain dedication](https://creativecommons.org/publicdomain/zero/1.0/). 16 | 17 | All contributions to this project will be released under the CC0 18 | dedication. By submitting a pull request, you are agreeing to comply 19 | with this waiver of copyright interest. 20 | -------------------------------------------------------------------------------- /HSM/utils/db.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import create_engine 2 | from sqlalchemy.ext.declarative import declarative_base 3 | from sqlalchemy import Column, JSON, Integer, String, ForeignKey 4 | from sqlalchemy.orm import relationship, sessionmaker 5 | from utils.config import SQLALCHEMY_URI 6 | 7 | Base = declarative_base() 8 | 9 | 10 | class DataAccessLayer: 11 | 12 | def __init__(self): 13 | self.engine = None 14 | self.conn_string = SQLALCHEMY_URI 15 | 16 | def connect(self): 17 | self.engine = create_engine(self.conn_string, echo=True) 18 | Base.metadata.create_all(self.engine) 19 | self.Session = sessionmaker(bind=self.engine) 20 | 21 | 22 | dal = DataAccessLayer() 23 | 24 | 25 | class Data(Base): 26 | __tablename__ = 'data' 27 | id = Column(Integer, primary_key=True, index=True) 28 | filter_feature = Column(String(10000), nullable=True) 29 | validation = Column(Integer) 30 | 31 | support_data = relationship("SupportData", uselist=False, back_populates="data") 32 | 33 | 34 | class SupportData(Base): 35 | __tablename__ = 'support_data' 36 | id = Column(Integer, primary_key=True, index=True) 37 | support_data = Column(JSON) 38 | 39 | data_id = Column(Integer, ForeignKey('data.id'), nullable=False) 40 | data = relationship("Data", back_populates="support_data") 41 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | docker: 5 | - image: circleci/python:3.6.8 6 | environment: 7 | TZ: America/New_York 8 | PIPENV_VENV_IN_PROJECT: true 9 | DATABASE_URL: postgres://postgres@localhost/hsm-db 10 | CI_TESTING: true 11 | 12 | - image: circleci/postgres:9.6.5 13 | environment: 14 | POSTGRES_USER: hsm 15 | POSTGRES_DB: hsm-db 16 | steps: 17 | - checkout 18 | 19 | # Download and cache dependencies 20 | - restore_cache: 21 | keys: 22 | - v1-dependencies-{{ .Branch }}-{{ checksum "Pipfile.lock" }} 23 | 24 | - run: 25 | name: install dependencies 26 | command: | 27 | pip install pipenv 28 | pipenv install --dev 29 | 30 | - save_cache: 31 | paths: 32 | - ./.venv 33 | key: v1-dependencies-{{ .Branch }}-{{ checksum "Pipfile.lock" }} 34 | 35 | - run: 36 | name: Put sample environment variables in place 37 | command: | 38 | cp sample.env .env 39 | 40 | - run: 41 | name: HSM test suite 42 | command: | 43 | cd HSM 44 | pipenv run python -m unittest 45 | 46 | - run: 47 | name: Flake8 48 | command: | 49 | pipenv run flake8 50 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | As a work of the United States government, this project is in the 2 | public domain within the United States. 3 | 4 | Additionally, we waive copyright and related rights in the work 5 | worldwide through the CC0 1.0 Universal public domain dedication. 6 | 7 | ## CC0 1.0 Universal summary 8 | 9 | This is a human-readable summary of the [Legal Code (read the full text)](https://creativecommons.org/publicdomain/zero/1.0/legalcode). 10 | 11 | ### No copyright 12 | 13 | The person who associated a work with this deed has dedicated the work to 14 | the public domain by waiving all rights to the work worldwide 15 | under copyright law, including all related and neighboring rights, to the 16 | extent allowed by law. 17 | 18 | You can copy, modify, distribute and perform the work, even for commercial 19 | purposes, all without asking permission. 20 | 21 | ### Other information 22 | 23 | In no way are the patent or trademark rights of any person affected by CC0, 24 | nor are the rights that other persons may have in the work or in how the 25 | work is used, such as publicity or privacy rights. 26 | 27 | Unless expressly stated otherwise, the person who associated a work with 28 | this deed makes no warranties about the work, and disclaims liability for 29 | all uses of the work, to the fullest extent permitted by applicable law. 30 | When using or citing the work, you should not imply endorsement by the 31 | author or the affirmer. 32 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | bandit = "*" 8 | beautifulsoup4 = "==4.6.3" 9 | certifi = "==2018.10.15" 10 | chardet = "==3.0.4" 11 | contractions = "==0.0.17" 12 | dill = "==0.2.8.2" 13 | et-xmlfile = "==1.0.1" 14 | idna = "==2.7" 15 | imbalanced-learn = "==0.3.3" 16 | jdcal = "==1.4" 17 | nltk = "==3.4.5" 18 | numpy = "==1.15.4" 19 | openpyxl = "==2.5.9" 20 | pandas = "==0.23.4" 21 | python-dateutil = "==2.7.5" 22 | pytz = "==2018.7" 23 | requests = "==2.20.0" 24 | scikit-learn = "==0.19.1" 25 | scipy = "==1.1.0" 26 | six = "==1.11.0" 27 | urllib3 = ">=1.24.1" 28 | xlrd = "==1.1.0" 29 | SQLAlchemy = ">=1.2.13" 30 | SQLAlchemy-Utils = "==0.33.6" 31 | flake8 = "*" 32 | parameterized = "*" 33 | 34 | [packages] 35 | beautifulsoup4 = "==4.6.3" 36 | certifi = "==2018.10.15" 37 | chardet = "==3.0.4" 38 | contractions = "==0.0.17" 39 | dill = "==0.2.8.2" 40 | idna = "==2.7" 41 | imbalanced-learn = "==0.3.3" 42 | jdcal = "==1.4" 43 | nltk = "==3.4.5" 44 | numpy = "==1.15.4" 45 | openpyxl = "==2.5.9" 46 | pandas = "==0.23.4" 47 | python-dateutil = "==2.7.5" 48 | pytz = "==2018.7" 49 | requests = "==2.20.0" 50 | scikit-learn = "==0.19.1" 51 | scipy = "==1.1.0" 52 | six = "==1.11.0" 53 | urllib3 = ">=1.24.2" 54 | xlrd = "==1.1.0" 55 | et_xmlfile = "==1.0.1" 56 | SQLAlchemy = ">=1.3.0" 57 | SQLAlchemy-Utils = "==0.33.6" 58 | flask = "*" 59 | gunicorn = "*" 60 | flask-httpauth = "*" 61 | psycopg2-binary = "*" 62 | xlsxwriter = "*" 63 | cfenv = "*" 64 | wordcloud = "*" 65 | jupyter = "*" 66 | sklearn = "*" 67 | matplotlib = "*" 68 | seaborn = "*" 69 | pyldavis = "*" 70 | jupyter-contrib-nbextensions = "*" 71 | 72 | [requires] 73 | python_version = "3.6" 74 | -------------------------------------------------------------------------------- /HSM/load_data.py: -------------------------------------------------------------------------------- 1 | import json 2 | from argparse import ArgumentParser 3 | import pandas as pd 4 | from utils import db, db_utils 5 | from utils.db import Data, SupportData 6 | 7 | filter_feature = 'Comments Concatenated' 8 | validation = 'Validation' 9 | 10 | 11 | def main(file): 12 | db_utils.create_postgres_db() 13 | db.dal.connect() 14 | session = db.dal.Session() 15 | 16 | df = pd.read_excel(file) 17 | 18 | data_columns = [filter_feature, validation] 19 | 20 | data = df[data_columns] 21 | support_data = json.loads(df[df.columns.difference(data_columns)].to_json(orient='records')) 22 | 23 | for i in range(len(data)): 24 | 25 | data_row = data.iloc[i] 26 | support_data_row = support_data[i] 27 | 28 | data_obj = Data(filter_feature=str(data_row[filter_feature]), validation=int(data_row[validation])) 29 | session.add(data_obj) 30 | session.flush() 31 | support_data_obj = SupportData(support_data=support_data_row) 32 | data_obj.support_data = support_data_obj 33 | support_data_obj.data = data_obj 34 | support_data_obj.data_id = support_data_obj.data.id 35 | session.add(support_data_obj) 36 | 37 | session.commit() 38 | print(f'Loaded {len(data)} records of data and support_data.') 39 | 40 | 41 | if __name__ == '__main__': 42 | 43 | program_desc = '''This application will get the spreadsheet and pull out essential data to fill out 44 | the database. It will populate the database in the `data` table. It also put all 45 | other data in the database as well in support_data table.''' 46 | 47 | parser = ArgumentParser(description=program_desc) 48 | parser.add_argument("file", help="specify path to file") 49 | 50 | args = parser.parse_args() 51 | 52 | main(file=args.file) 53 | -------------------------------------------------------------------------------- /HSM/tests/test_api.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | import base64 4 | from api import app 5 | from parameterized import parameterized 6 | from unittest.mock import patch 7 | from werkzeug.security import generate_password_hash 8 | 9 | 10 | class BasicTestCase(unittest.TestCase): 11 | 12 | @parameterized.expand([ 13 | ("index", "/"), 14 | ("predict", "/predict"), 15 | ("validate", "/validate"), 16 | ("train", "/train") 17 | ]) 18 | def test_api_no_username(self, name, input): 19 | tester = app.test_client(self) 20 | response = tester.get(input) 21 | self.assertEqual(response.status_code, 401) 22 | self.assertDictEqual(json.loads(response.data), {'error': 'Unauthorized access'}) 23 | 24 | @parameterized.expand([ 25 | ("index", "/", {'title': 'Predict->Validate->Train', 'username': 'amy'}), 26 | ("predict", "/predict", {'task': 'predict', 'username': 'amy'}), 27 | ("validate", "/validate", {'task': 'validate', 'username': 'amy'}), 28 | ("train", "/train", {'task': 'train', 'username': 'amy'}) 29 | ]) 30 | @patch.dict('utils.config.users', {'amy': generate_password_hash('password')}, clear=True) 31 | def test_index(self, name, input, expected): 32 | tester = app.test_client(self) 33 | creds = base64.b64encode(b'amy:password').decode('utf-8') 34 | 35 | response = tester.get(input, headers={'Authorization': 'Basic ' + creds}) 36 | self.assertEqual(response.status_code, 200) 37 | self.assertDictEqual(json.loads(response.data), expected) 38 | 39 | def test_invalid_path(self): 40 | tester = app.test_client(self) 41 | response = tester.get('/not_valid') 42 | self.assertEqual(response.status_code, 404) 43 | self.assertDictEqual(json.loads(response.data), {'error': 'Not found'}) 44 | 45 | 46 | if __name__ == '__main__': 47 | unittest.main() 48 | -------------------------------------------------------------------------------- /HSM/api.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, jsonify, make_response 2 | from flask_httpauth import HTTPBasicAuth 3 | from utils import config, db, db_utils 4 | from werkzeug.security import check_password_hash 5 | 6 | # initialization 7 | app = Flask(__name__) 8 | app.config['SECRET_KEY'] = config.APP_SECRET_KEY 9 | auth = HTTPBasicAuth() 10 | 11 | 12 | @auth.verify_password 13 | def verify_password(username, password): 14 | if username in config.users: 15 | return check_password_hash(config.users.get(username), password) 16 | return False 17 | 18 | 19 | @auth.error_handler 20 | def unauthorized(): 21 | return make_response(jsonify({'error': 'Unauthorized access'}), 401) 22 | 23 | 24 | @app.errorhandler(404) 25 | def not_found(error): 26 | return make_response(jsonify({'error': 'Not found'}), 404) 27 | 28 | 29 | @app.route('/') 30 | @auth.login_required 31 | def index(): 32 | return jsonify({'title': 'Predict->Validate->Train', 33 | 'username': auth.username() 34 | }) 35 | 36 | 37 | @app.route('/predict', methods=['GET']) 38 | @auth.login_required 39 | def predict(): 40 | return jsonify({'task': 'predict', 41 | 'username': auth.username() 42 | }) 43 | 44 | 45 | @app.route('/validate') # , methods=['POST']) 46 | @auth.login_required 47 | def validate(): 48 | return jsonify({'task': 'validate', 49 | 'username': auth.username(), 50 | }), 200 51 | 52 | 53 | @app.route('/train') # , methods=['POST']) 54 | @auth.login_required 55 | def train(): 56 | 57 | return jsonify({'task': 'train', 58 | 'username': auth.username() 59 | }), 200 60 | 61 | 62 | if __name__ == "__main__": 63 | db_utils.create_postgres_db() 64 | db.dal.connect() 65 | session = db.dal.Session() 66 | port = int(config.APP_PORT) 67 | app.run(host='0.0.0.0', port=port) 68 | -------------------------------------------------------------------------------- /.cfignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # IPython 76 | profile_default/ 77 | ipython_config.py 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | .dmypy.json 110 | dmypy.json 111 | 112 | ### System Files ### 113 | *.DS_Store 114 | .DS_Store? 115 | 116 | # Windows thumbnail cache files 117 | Thumbs.db 118 | ehthumbs.db 119 | ehthumbs_vista.db 120 | 121 | # Folder config file 122 | Desktop.ini 123 | 124 | # Recycle Bin used on file shares 125 | $RECYCLE.BIN/ 126 | 127 | # Thumbnails 128 | ._* 129 | 130 | # Files that might appear in the root of a volume 131 | .DocumentRevisions-V100 132 | .fseventsd 133 | .Spotlight-V100 134 | .TemporaryItems 135 | .Trashes 136 | .VolumeIcon.icns 137 | .com.apple.timemachine.donotpresent 138 | 139 | # Data files 140 | *.xlsx 141 | *.csv 142 | *.bin 143 | *.json 144 | 145 | # Database dump 146 | *.dump 147 | *.sql 148 | 149 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # IPython 76 | profile_default/ 77 | ipython_config.py 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | .dmypy.json 110 | dmypy.json 111 | 112 | ### System Files ### 113 | *.DS_Store 114 | .DS_Store? 115 | 116 | # Windows thumbnail cache files 117 | Thumbs.db 118 | ehthumbs.db 119 | ehthumbs_vista.db 120 | 121 | # Folder config file 122 | Desktop.ini 123 | 124 | # Recycle Bin used on file shares 125 | $RECYCLE.BIN/ 126 | 127 | # Thumbnails 128 | ._* 129 | 130 | # Files that might appear in the root of a volume 131 | .DocumentRevisions-V100 132 | .fseventsd 133 | .Spotlight-V100 134 | .TemporaryItems 135 | .Trashes 136 | .VolumeIcon.icns 137 | .com.apple.timemachine.donotpresent 138 | 139 | # Data files 140 | *.xlsx 141 | *.csv 142 | *.bin 143 | *.json 144 | 145 | # Database dump 146 | *.dump 147 | *.sql 148 | 149 | -------------------------------------------------------------------------------- /HSM/utils/db_utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pandas as pd 3 | from utils.config import SQLALCHEMY_URI 4 | from sqlalchemy_utils import database_exists, create_database 5 | from utils.db import ( 6 | Data, 7 | SupportData, 8 | ) 9 | from utils.config import ENTRY_ID 10 | from sqlalchemy import create_engine # , desc, event, func 11 | from sqlalchemy import func 12 | 13 | 14 | def create_postgres_db(): 15 | connection_string = SQLALCHEMY_URI 16 | engine = create_engine(connection_string, echo=False) 17 | if not database_exists(engine.url): 18 | create_database(engine.url) 19 | 20 | 21 | def insert_data(df, session): 22 | ''' 23 | Insert data and supporting data into database 24 | ''' 25 | 26 | filter_feature = 'Comments_Concatenated' 27 | validation = 'validated prediction' 28 | data_columns = [filter_feature, validation] 29 | 30 | data = df[data_columns] 31 | support_data = json.loads(df[df.columns.difference(data_columns)].to_json(orient='records')) 32 | 33 | for i in range(len(data)): 34 | 35 | data_row = data.iloc[i] 36 | support_data_row = support_data[i] 37 | 38 | data_obj = Data(filter_feature=str(data_row[filter_feature]), validation=int(data_row[validation])) 39 | session.add(data_obj) 40 | session.flush() 41 | support_data_obj = SupportData(support_data=support_data_row) 42 | data_obj.support_data = support_data_obj 43 | support_data_obj.data = data_obj 44 | support_data_obj.data_id = support_data_obj.data.id 45 | session.add(support_data_obj) 46 | 47 | session.commit() 48 | 49 | 50 | def get_data(session, filter_feature='filter_feature', validation='validation'): 51 | ''' 52 | Get data from database and return as dataframe 53 | ''' 54 | 55 | data_rows = [(row.filter_feature, row.validation) for row in session.query(Data).all()] 56 | df = pd.DataFrame(data_rows, columns=[filter_feature, validation]) 57 | return df 58 | 59 | 60 | def fetch_last_RespondentID(session): 61 | ''' 62 | Fetch the last RespondentID from the database to use with the Qualtrics API 63 | 64 | Parameters: 65 | session: an instance of a sqlalchemy session object created by DataAccessLayer 66 | 67 | Returns: 68 | last_response_id (str): the RespondentID of the last survey response 69 | ''' 70 | try: 71 | last_response = session.query(Data).order_by(Data.id.desc()).first() 72 | last_response_id = last_response.support_data.support_data[ENTRY_ID] 73 | 74 | except AttributeError: 75 | last_response_id = None 76 | 77 | return last_response_id 78 | 79 | 80 | def count_table_rows(table, session): 81 | rows = session.query(func.count(table.id)).scalar() 82 | 83 | return rows 84 | -------------------------------------------------------------------------------- /sample.env: -------------------------------------------------------------------------------- 1 | ## QUALTRICS SPECIFIC ENV VARS (Only needed if using Qualtrics as input) ## 2 | 3 | # This is the API Token to access Qualtrics API 4 | QUALTRICS_API_TOKEN=78901 5 | 6 | # This is the survey ID for the specific survey we want to use 7 | QUALTRICS_SW_SURVEY_ID=12345 8 | 9 | # This is the filename of the data being used once data can be uploaded 10 | QUALTRICS_FILENAME="survey.json" 11 | 12 | 13 | 14 | ## DOCKER/DATABASE SPECIFIC ENV VARS (Required to set up docker container) ## 15 | # This is the database username that will be used when setting up the database inside Docker container 16 | DB_USER=hsm 17 | 18 | # This is the database password that will be used when setting up the database inside Docker container 19 | DB_PASS=hsm 20 | 21 | # This is the database name that will be used when setting up the database inside Docker container 22 | DB_NAME=hsm-db 23 | 24 | # This is the database address that will be used 25 | DB_ADDR=mlaas-db 26 | 27 | # This is the web service container name inside Docker container 28 | # If you are running more than one dataset, this needs to be unique 29 | CONTAINER_NAME_WEB=mlaas-web 30 | 31 | # This is the database service container name inside Docker container 32 | # If you are running more than one dataset, this needs to be unique 33 | CONTAINER_NAME_DB=mlaas-db 34 | 35 | # This is the port being used in the host machine, when using API, this is the port being used locally 36 | # If you are running more than one dataset, this needs to be unique 37 | # If HOST_PORT=8080, the localhost address is http://127.0.0.1:8080 to access the API 38 | HOST_PORT=8080 39 | 40 | 41 | 42 | ## API SPECIFIC ENV VARS (Only needed when using API) ## 43 | # This is the secret key being used by the application 44 | APP_SECRET_KEY=CHANGE_TO_SOMETHING_SECRETIVE 45 | 46 | # This is the admin password for API 47 | ADMIN=CHANGE_TO_SOMETHING_SECRETIVE 48 | 49 | # This is the user password for API 50 | USER=CHANGE_TO_SOMETHING_SECRETIVE 51 | 52 | 53 | 54 | ## CLOUD.GOV ENV VARS (Only needed when using API) ## 55 | # This indicates that if Cloud.gov is being used for database and file storage, YES means we are, NO means we are not 56 | # or if CLOUD_GOV environment variable is not here, we are not using Cloud.gov as well 57 | CLOUD_GOV=NO 58 | 59 | ### CLOUD.GOV DATABASE SETTINGS ### 60 | # To obtain these data, you will need to do a SSH-Tunneling to the Cloud.gov database 61 | # Run `cf connect-to-service -no-client ` in a terminal 62 | # It will give you the following output, pull the corresponding information out as environment variables: 63 | # Finding the service instance details... 64 | # Setting up SSH tunnel... 65 | # SSH tunnel created. 66 | # Skipping call to client CLI. Connection information: 67 | 68 | # Host: localhost 69 | # Port: 70 | # Username: 71 | # Password: 72 | # Name: 73 | 74 | # Leave this terminal open while you want to use the SSH tunnel. Press Control-C to stop. 75 | 76 | # Pull the data under `Username` above 77 | CLOUD_DB_USER=hsm 78 | 79 | # Pull the data under `Password` above 80 | CLOUD_DB_PASS=hsm 81 | 82 | # This information should not be changed, as we are using Docker to bring up our environment 83 | # This will figure out the host address correctly 84 | CLOUD_DB_ADDR=host.docker.internal 85 | 86 | # Pull the data under `Port` above, this variable changes every time you run the command above 87 | CLOUD_DB_PORT=5432 88 | 89 | # Pull the data under `Name` above 90 | CLOUD_DB_NAME=hsm-db 91 | -------------------------------------------------------------------------------- /HSM/utils/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from werkzeug.security import generate_password_hash 3 | 4 | # FLASK SETTINGS 5 | APP_SECRET_KEY = os.environ['APP_SECRET_KEY'] 6 | APP_PORT = os.getenv("PORT", 8080) 7 | 8 | # USER SETTINGS 9 | users = { 10 | "admin": generate_password_hash(os.getenv("ADMIN")), 11 | "user": generate_password_hash(os.getenv("USER")) 12 | } 13 | 14 | # DATABASE SETTINGS 15 | print(f'CLOUD_GOV:{os.getenv("CLOUD_GOV")}') 16 | DB_DIALECT = "postgresql+psycopg2" 17 | if not os.getenv('CLOUD_GOV') or os.getenv('CLOUD_GOV') == 'NO': 18 | DB_USER = os.getenv('DB_USER') 19 | DB_PASS = os.getenv('DB_PASS') 20 | DB_ADDR = os.getenv('DB_ADDR') 21 | DB_NAME = os.getenv('DB_NAME') 22 | 23 | SQLALCHEMY_URI = f"{DB_DIALECT}://{DB_USER}:{DB_PASS}@{DB_ADDR}/{DB_NAME}" 24 | else: # CLOUD_GOV 25 | CLOUD_DB_USER = os.getenv('CLOUD_DB_USER') 26 | CLOUD_DB_PASS = os.getenv('CLOUD_DB_PASS') 27 | CLOUD_DB_ADDR = os.getenv('CLOUD_DB_ADDR') 28 | CLOUD_DB_PORT = os.getenv('CLOUD_DB_PORT') 29 | CLOUD_DB_NAME = os.getenv('CLOUD_DB_NAME') 30 | SQLALCHEMY_URI = f"{DB_DIALECT}://{CLOUD_DB_USER}:{CLOUD_DB_PASS}@{CLOUD_DB_ADDR}:{CLOUD_DB_PORT}/{CLOUD_DB_NAME}" 31 | print(f'SQLALCHEMY_URI: {SQLALCHEMY_URI}') 32 | 33 | 34 | # QUALTRICS API SETTINGS 35 | apiToken = os.environ['QUALTRICS_API_TOKEN'] 36 | survey_id = os.environ['QUALTRICS_SW_SURVEY_ID'] 37 | filename = os.environ['QUALTRICS_FILENAME'] 38 | if filename: 39 | filename = filename.replace('"', '').replace("'", "") 40 | 41 | qualtrics_sitewide_creds = { 42 | "apiToken": apiToken, 43 | "surveyId": survey_id, 44 | "filename": filename, 45 | } 46 | 47 | # INPUT FILE SETTINGS 48 | INPUT_DIR = os.path.join(os.getcwd(), 'HSM', 'model', 'inputs') 49 | if not os.path.exists(INPUT_DIR): 50 | os.makedirs(os.path.join(INPUT_DIR)) 51 | 52 | # CELERY SETTINGS 53 | 54 | # SPREADSHEET SETTINGS 55 | 56 | # [ACTIONS] These are fields that need to be updated for your input dataset. 57 | # The list of fieldnames that you want to appear in your ClassificationResults.xlsx 58 | FIELDS = [ 59 | "ResponseID", 60 | "pageType", 61 | "StartDate", 62 | "EndDate", 63 | "Country", 64 | "State", 65 | "UPVC", 66 | "TVPC", 67 | "Site_Referrer", 68 | "PR_URL", 69 | "CP_URL", 70 | "Asset_Click", 71 | "Q1", 72 | "Q4", 73 | "Comments Concatenated", 74 | "SPAM", 75 | "Q2", 76 | "Q3", 77 | "Q5", 78 | "Q6", 79 | "Q7", 80 | "Q8", 81 | "Q9", 82 | "DeviceType", 83 | "Referer", 84 | "History", 85 | "Browser Metadata Q_1_TEXT", 86 | "Browser Metadata Q_2_TEXT", 87 | "Browser Metadata Q_3_TEXT", 88 | "Browser Metadata Q_4_TEXT", 89 | # "Duration (in seconds)", 90 | ] 91 | 92 | # DATA COLUMNS SETTINGS 93 | 94 | # [ACTIONS] Fields are needed to do filter, they will be combined to be used for prediction and training 95 | FILTER_FEATURE_FIELDS = ['Q5', 'Q7', 'Q6', 'Q3'] 96 | 97 | # [ACTIONS] Fielded from the raw spreadsheet to be included when processing data, 98 | # key is the field name we want to use for the processed data dataframe 99 | # value is the field name being used in the raw data 100 | FIELDS_TO_INCLUDED_FOR_PROCESSED_DATA_MAPPING = {'ResponseID': 'ResponseID', 'Date': 'EndDate'} 101 | 102 | # [ACTIONS] Field name that represents the filter feature 103 | FILTER_FEATURE = 'Comments Concatenated' 104 | 105 | # [ACTIONS] Field name that represents the normalized filter feature 106 | NORMALIZED_FILTER_FEATURE = 'Normalized Comments' 107 | 108 | # [ACTIONS] Field name to place the prediction/validation 109 | PREDICTION_FIELD_NAME = 'SPAM' 110 | 111 | # [ACTIONS] Field name that identify each row in the raw spreadsheet 112 | ENTRY_ID = 'ResponseID' 113 | -------------------------------------------------------------------------------- /HSM/utils/qualtrics.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import zipfile 3 | import json 4 | import os 5 | import sys 6 | import pandas as pd 7 | from time import sleep 8 | from utils.config import qualtrics_sitewide_creds 9 | from utils import db, db_utils 10 | 11 | 12 | class QualtricsApi: 13 | """Query Qualtrics API for new survey responses and then write to database. 14 | 15 | Attributes: 16 | apiToken (str): a Qualtrics API token. 17 | surveyId (str): the survey id. 18 | fileFormat (str): the preferred file format. Only 'json' is possible now. 19 | dataCenter (str): the datacenter from the hostname of the qualtrics 20 | account url 21 | """ 22 | 23 | def __init__(self, last_response_id, apiToken=None, surveyId=None, fileFormat='json', 24 | dataCenter='cemgsa'): 25 | print("Getting data from Qualtrics...") 26 | if not apiToken and not surveyId: 27 | apiToken = qualtrics_sitewide_creds['apiToken'] 28 | surveyId = qualtrics_sitewide_creds['surveyId'] 29 | 30 | self.apiToken = apiToken 31 | self.surveyId = surveyId 32 | self.fileFormat = fileFormat 33 | self.dataCenter = dataCenter 34 | if not last_response_id: 35 | db_utils.create_postgres_db() 36 | db.dal.connect() 37 | session = db.dal.Session() 38 | last_response_id = db_utils.fetch_last_RespondentID(session) 39 | self.lastResponseId = last_response_id 40 | 41 | def download_responses(self): 42 | """ 43 | Void function that gets and writes survey responses within the working 44 | directory. 45 | 46 | The process of getting survey responses requires four steps: 47 | 1. Request the responses with the CreateResponseExport API. 48 | 2. Request the export status with the GetResponseExportProgress API. 49 | 3. Once the export progress is 100, make a GET request to retrieve 50 | the response file, which will be a zipped file. 51 | 4. Unzip the file to find the survey responses in the format you 52 | requested (csv, csv2013, xml, json, or spss). 53 | 54 | Returns: 55 | None 56 | """ 57 | 58 | # Setting static parameters 59 | requestCheckProgress = 0 60 | baseUrl = "https://{0}.gov1.qualtrics.com/API/v3/responseexports/".format(self.dataCenter) 61 | headers = { 62 | "content-type": "application/json", 63 | "x-api-token": self.apiToken, 64 | } 65 | # Step 1: Creating Data Export 66 | downloadRequestUrl = baseUrl 67 | downloadRequestPayload = { 68 | "format": self.fileFormat, 69 | "surveyId": self.surveyId, 70 | "useLabels": True 71 | } 72 | # Include lastResponseId in payload if provided during init 73 | if self.lastResponseId: 74 | downloadRequestPayload['lastResponseId'] = self.lastResponseId 75 | 76 | downloadRequestResponse = requests.request("POST", downloadRequestUrl, 77 | data=json.dumps(downloadRequestPayload), 78 | headers=headers) 79 | 80 | status_code = downloadRequestResponse.json()['meta']['httpStatus'] 81 | if '200' in status_code: 82 | print('Post Request to Qualtrics was a success!') 83 | else: 84 | print(status_code) 85 | # TODO: log errors, including 500 status codes (see GH37) 86 | sys.exit(0) 87 | progressId = downloadRequestResponse.json()["result"]["id"] 88 | 89 | # Step 2: Checking on Data Export Progress and waiting until export is ready 90 | while requestCheckProgress < 100: 91 | sleep(2) 92 | requestCheckUrl = baseUrl + progressId 93 | print(requestCheckUrl) 94 | requestCheckResponse = requests.request("GET", requestCheckUrl, headers=headers) 95 | requestCheckProgress = requestCheckResponse.json()["result"]["percentComplete"] 96 | print("Download is " + str(requestCheckProgress) + " complete") 97 | 98 | # Step 3: Downloading file 99 | requestDownloadUrl = baseUrl + progressId + '/file' 100 | print(requestDownloadUrl) 101 | requestDownload = requests.request("GET", requestDownloadUrl, 102 | headers=headers, stream=True) 103 | 104 | # Step 4: Unzipping the file 105 | with open("RequestFile.zip", "wb") as f: 106 | for chunk in requestDownload.iter_content(chunk_size=1024): 107 | f.write(chunk) 108 | zipfile.ZipFile("RequestFile.zip").extractall("temp") 109 | os.remove("RequestFile.zip") 110 | 111 | def get_data(self): 112 | """ 113 | Convert the json into a pandas dataframe 114 | """ 115 | file_name = os.path.join(os.getcwd(), 'temp', qualtrics_sitewide_creds['filename']) 116 | with open(file_name, encoding='utf8') as f: 117 | data = json.load(f) 118 | df = pd.DataFrame(data['responses']) 119 | # replace np.nan with None so sql insertions don't insert 'nan' strings 120 | df = df.where(pd.notnull(df), None) 121 | os.remove(file_name) 122 | df_n_rows = df.shape[0] 123 | # if number of rows more than zero 124 | if df_n_rows > 0: 125 | return df 126 | else: 127 | print("No new survey responses to download. Exiting") 128 | sys.exit(0) 129 | -------------------------------------------------------------------------------- /HSM/model/predict.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | import dill as pickle 4 | from datetime import datetime 5 | from model.train import TrainClassifier 6 | from utils.config import ( 7 | ENTRY_ID, 8 | FIELDS, 9 | FIELDS_TO_INCLUDED_FOR_PROCESSED_DATA_MAPPING, 10 | FILTER_FEATURE, 11 | FILTER_FEATURE_FIELDS, 12 | NORMALIZED_FILTER_FEATURE, 13 | PREDICTION_FIELD_NAME, 14 | survey_id, 15 | ) 16 | 17 | 18 | class MakePredictions(): 19 | 20 | def __init__(self, df, survey_type='sw'): 21 | self.df = df 22 | if survey_type == 'sw': 23 | model_path = os.path.join(os.getcwd(), 'HSM', 'model', 'best_estimators', 'model_sw.pkl') 24 | self.model = model_path 25 | # TODO: This two paths look the same, what was the intention here? 26 | else: 27 | model_path = os.path.join(os.getcwd(), 'HSM', 'model', 'best_estimators', 'model_sw.pkl') 28 | self.model = model_path 29 | 30 | def prepare_data(self): 31 | df = self.df 32 | comments_concatenated = "" 33 | comments_original = "" 34 | 35 | for field in FILTER_FEATURE_FIELDS: 36 | # Because these types are python objects, they need to be converted. 37 | value = df[field].astype(str) 38 | comments_concatenated += value 39 | comments_original += f'{field}: {value}\n' 40 | df['Comments_Concatenated'] = comments_concatenated.apply(lambda x: x.strip()) 41 | df[NORMALIZED_FILTER_FEATURE] = df['Comments_Concatenated'].apply(TrainClassifier().get_lemmas) 42 | X = df[NORMALIZED_FILTER_FEATURE] 43 | 44 | result_series = {s: df[FIELDS_TO_INCLUDED_FOR_PROCESSED_DATA_MAPPING[s]] 45 | for s in FIELDS_TO_INCLUDED_FOR_PROCESSED_DATA_MAPPING} 46 | 47 | result_series['Original Survey Responses'] = comments_original 48 | 49 | return X, result_series 50 | 51 | def predict(self): 52 | with open(self.model, 'rb') as f: 53 | pickled_model = pickle.load(f) 54 | X, data_columns = self.prepare_data() 55 | preds = pickled_model.predict(X) 56 | dec_func = pickled_model.decision_function(X) 57 | labeled_data_df = pd.DataFrame(X) 58 | labeled_data_df.columns = [FILTER_FEATURE] 59 | labeled_data_df[PREDICTION_FIELD_NAME] = preds 60 | labeled_data_df['Decision Boundary Distance'] = abs(dec_func) 61 | for col in data_columns: 62 | labeled_data_df[col] = data_columns[col] 63 | 64 | print("Combining all specified columns and prediction...") 65 | print("Here's the list of available items to choose from the raw data:") 66 | print(list(self.df)) 67 | print("Here's the list of available items to choose from the processed prediction data:") 68 | print(list(labeled_data_df)) 69 | # Using Outer Join to get all the data even if there's missing info on one side 70 | joined_df = pd.merge(self.df, labeled_data_df, on=ENTRY_ID, how='outer') 71 | 72 | if PREDICTION_FIELD_NAME + '_x' in joined_df.columns: # This means there are two SPAM columns 73 | # There can be two columns for the prediction fields, one in df (which will have a suffix of _x), 74 | # one in labeled_data_df (which will have a suffix of _y), 75 | # and we will keep the one from labeled_data_df because it holds the actual prediction 76 | # but this field needs to rename to with _y. 77 | # i.e. SPAM is the field name and it appears in df and labeled_data_df, then we will have SPAM_x 78 | # and SPAM_y column when joined in joined_df, right now assuming we are using SPAM_y because it 79 | # holds the actual prediction, and SPAM_x should be removed because it came from the raw data. 80 | # We also need to rename SPAM_y to SPAM 81 | joined_df = joined_df.drop(columns=PREDICTION_FIELD_NAME + '_x') 82 | joined_df = joined_df.rename(columns={PREDICTION_FIELD_NAME + '_y': PREDICTION_FIELD_NAME}) 83 | 84 | # Try to figure out what is valid columns 85 | valid_fields = [field for field in FIELDS if field in list(joined_df)] 86 | 87 | if valid_fields != FIELDS: 88 | invalid_fields = [field for field in FIELDS if field not in list(joined_df)] 89 | print("Here is a list of fields that cannot be found in the data:") 90 | print(invalid_fields) 91 | print("Skipping them in the output...") 92 | 93 | # Only pick out what the user wants to output + the filter feature and normalized filter feature, 94 | # and the Decision Boundary Distance 95 | joined_df = joined_df[valid_fields + [FILTER_FEATURE, NORMALIZED_FILTER_FEATURE, "Decision Boundary Distance"]] 96 | print("Here is the final list of the valid user-choosen fields in the config.py file we are using.") 97 | print(list(valid_fields)) 98 | 99 | results_dir = os.path.join(os.getcwd(), 'HSM', 'model', 'results') 100 | if not os.path.exists(results_dir): 101 | os.makedirs(os.path.join(results_dir)) 102 | outfile = 'ClassificationResults_{}_{}.xlsx'.format(survey_id, datetime.now().strftime('%Y%m%d-%H%M%S')) 103 | results_path = os.path.join(results_dir, outfile) 104 | writer = pd.ExcelWriter(results_path, engine='xlsxwriter', options={'strings_to_urls': False}) 105 | joined_df.to_excel(writer, 'Classification Results', index=False) 106 | writer.save() 107 | id_pred_map = dict(zip(labeled_data_df[ENTRY_ID], 108 | labeled_data_df[PREDICTION_FIELD_NAME])) 109 | df = self.df.drop(labels=[NORMALIZED_FILTER_FEATURE], axis=1) 110 | 111 | return results_path, df, id_pred_map, outfile 112 | -------------------------------------------------------------------------------- /HSM/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import pandas as pd 4 | from argparse import ArgumentParser 5 | from utils import config, qualtrics, validate, db, db_utils 6 | from model import predict, train 7 | 8 | 9 | def get_survey_data(session, excel_filename=None): 10 | ''' 11 | Implements the Qualtrics class to get most recent qualtrics survey data from their API. 12 | 13 | Parameters: 14 | session: a sqlalchemy session object 15 | excel_filename: The filename of the Excel file that has the input 16 | 17 | Returns: 18 | df (pandas DataFrame): a dataframe of the survey data 19 | ''' 20 | if excel_filename: 21 | print(f'******** Getting data from Excel Spreadsheet at location {excel_filename} ********') 22 | input_path = os.path.join(config.INPUT_DIR, excel_filename) 23 | df = pd.read_excel(input_path) 24 | 25 | else: # QUALTRICS 26 | print(f'******** Reading data from Qualtrics ********') 27 | last_response_id = db_utils.fetch_last_RespondentID(session) 28 | qa = qualtrics.QualtricsApi(last_response_id) 29 | qa.download_responses() 30 | df = qa.get_data() 31 | 32 | return df 33 | 34 | 35 | def make_predictions(df): 36 | ''' 37 | Given a dataframe of survey data, parse the comments, feed them to model, and make predictions 38 | 39 | Parameters: 40 | df (pandas DataFrame): a dataframe of the survey data, as returned by get_survey_data() 41 | 42 | Returns: 43 | results_path (str): abs path to the results, ClassificationResults.xlsx 44 | df (pandas DataFrame): a dataframe of the survey data, with new columns for the predictions and decision 45 | boundary 46 | id_pred_map (dict): a dict mapping Qualtrics ResponseIDs to the SPAM predition (i.e. 0 (ham) or 1 (spam)) 47 | ''' 48 | 49 | mp = predict.MakePredictions(df, survey_type='sw') 50 | results_path, df, id_pred_map, outfile = mp.predict() 51 | 52 | return results_path, df, id_pred_map, outfile 53 | 54 | 55 | def user_prompt(outfile): 56 | print("Done making predictions. You can find the results in {}".format(outfile)) 57 | print('-'*80) 58 | print("Take a moment to review the predictions.") 59 | print("Change those that you disagree with.") 60 | print("When you're done, save and exit the spreadsheet. Then return here.") 61 | time.sleep(10) 62 | user_input = '' 63 | while user_input != 'y': 64 | user_input = str(input("If you've finished reviewing the predictions, enter 'y': ")) 65 | print("Inserting data into database. It may take a while. Hold on...") 66 | 67 | 68 | def get_validations(results_path): 69 | ''' 70 | After the user has had time to review the predictions and make/save corrections, read in the file and get 71 | validations 72 | 73 | Parameters: 74 | results_path (str): abs path to the results, ClassificationResults.xlsx 75 | 76 | Returns: 77 | validated_id_pred_map (dict): a dict mapping Qualtrics ResponseIDs to the user-validated SPAM predictions 78 | ''' 79 | 80 | v = validate.Validate(results_path) 81 | validated_id_pred_map = v.get_validations() 82 | 83 | return validated_id_pred_map 84 | 85 | 86 | def insert_data(df, validated_id_pred_map, id_pred_map, survey_name, model_description, session): 87 | ''' 88 | Void function to insert data into postgres database, as configured in config.py 89 | 90 | Parameters: 91 | df (pandas DataFrame): a dataframe of the survey data, as returned by get_predictions() 92 | validated_id_pred_map (dict): a dict mapping Qualtrics ResponseIDs to the user-validated SPAM predictions 93 | id_pred_map (dict): a dict mapping Qualtrics ResponseIDs to the SPAM prediction (i.e. 0 (ham) or 1 (spam)) 94 | survey_name (str): name of the survey we're dealing with 95 | model_description (str): file name for the model (e.g. model_sw.pkl) 96 | session: a sqlalchemy session object 97 | ''' 98 | 99 | df['prediction'] = df['ResponseID'].map(id_pred_map) 100 | df['validated prediction'] = df['ResponseID'].map(validated_id_pred_map) 101 | db_utils.insert_data(df, session) 102 | 103 | 104 | def retrain_model(session): 105 | ''' 106 | Retrain the model by getting all data from database to do so. Called train to train the model and save 107 | it to the designated place. 108 | 109 | Parameters: 110 | session: a database session that will be passed in to access the database data 111 | ''' 112 | 113 | df_comment_spam = db_utils.get_data(session, filter_feature='Comments Concatenated', validation='SPAM') 114 | 115 | train.main(df_comment_spam) 116 | 117 | 118 | def main(survey_name="Site-Wide Survey English", model_description="model_sw.pkl", excel_filename=None): 119 | ''' 120 | Create db if it doesn't exist; fetch survey data from Qualtrics; make predictions; provide the user 121 | with a chance to validate the predictions in a spreadsheet; and insert data into db. 122 | ''' 123 | 124 | db_utils.create_postgres_db() 125 | db.dal.connect() 126 | session = db.dal.Session() 127 | df = get_survey_data(session, excel_filename) 128 | results_path, df, id_pred_map, outfile = make_predictions(df) 129 | user_prompt(outfile) 130 | validated_id_pred_map = get_validations(results_path) 131 | 132 | insert_data(df, validated_id_pred_map, id_pred_map, survey_name, model_description, session) 133 | session.commit() 134 | 135 | retrain_model(session) 136 | 137 | print("DONE!") 138 | 139 | 140 | if __name__ == '__main__': 141 | 142 | program_desc = '''This application will get survey data from Qualtrics and make prediction on the data. 143 | It will then retrain the model based on the validated data.''' 144 | 145 | parser = ArgumentParser(description=program_desc) 146 | parser.add_argument("-s", "--survey_name", dest="survey_name", 147 | help="specify survey name to use", default="Site-Wide Survey English") 148 | parser.add_argument("-m", "--model", 149 | default="model_sw.pkl", 150 | help="specify model file name") 151 | parser.add_argument("-i", "--input", default=None, 152 | help="specify input excel file name else Qualtrics API is being used to get data, file is " 153 | "expected to be saved at 10x-MLaaS/HSM/model/inputs folder") 154 | 155 | args = parser.parse_args() 156 | 157 | main(survey_name=args.survey_name, model_description=args.model, excel_filename=args.input) 158 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![MlaaS](https://i.postimg.cc/vZDzYQb2/mlaas2.jpg) 2 | # 10x Machine Learning as a Service 3 | [Formerly known as Qualitative Data Management] 4 | 5 | Making is easier to identity what information the public is looking for. 6 | 7 | We are currently in the **third phase of [10x](tenx): Development**. We estimate this phase will end in May 2020. 8 | 9 | ## Overview 10 | 11 | > USA.gov and Gobierno.USA.gov provide a vital service: connecting members of the public with official information about government services. In an effort to better serve the public, both sites collect (and act on) feedback from users. They collect this feedback through various channels: call centers, the chat feature, sitewide surveys, and page-level surveys. (Our research focused almost entirely on page-level surveys.) For context, page-level surveys appear on “nearly every content page, directory record page, and state details page” — in other words, pages with the content that users need to answer their questions. 12 | 13 | ### The problem statement 14 | 15 | > As a government employee, how can I more quickly and effectively analyze comments provided by site visitors to identify timely issues, improve the usability of the site and information provided, and further direct visitors to important services and information they need? 16 | 17 | ### Our challenge 18 | 19 | > Help the USAgov team better serve their users by (a) introducing process improvements and simple automation in analyzing open-ended comments submitted through their website and (b) introduce experimental sentiment-analysis tools and other qualitative data analysis techniques to more robustly analyze these data with less human intervention. 20 | 21 | As expected, the scope of our project has shifted to offering these machine learning tools to the entire Office of Customer Experience. During Phase II, we prototyped and delivered a machine learning tool to aid the USAgov team but we believe this tool (or similar SaaS) could be leveraged to reduce the burden on other teams in the Office of Customer Experience, as well as outside GSA. 22 | 23 | During Phase III, we narrowed the scope from development of an expansive machine learning service to building a MVP that will use open text data to (1)provide data insights, and (2) decrease time to classify and identify themes manually. We would like to introduce you to _MeL_ which uses machine learning to filter, classify and provide user sentiment, so that you have greater insights into your text data in less time. 24 | 25 | We continue to work the Office of Customer Experience on the development of this MVP and are looking to work with other federal agencies and datasets to explore different use cases for MeL. 26 | 27 | You can follow _MeL_ development journey [here](https://github.com/18F/10x-MeL/). 28 | 29 | 30 | ## Who are we? 31 | 32 | Team members: 33 | 34 | - Tiffany Andrews, Innovation Specialist, [18F](eighteenf) 35 | - Adam Gerber, Flexion Data Scientist and Machine Learning Engineer 36 | - Will Cahoe, Program Analyst, [10x](tenx) 37 | 38 | 39 | Advisers: 40 | 41 | - David Kaufmann, USAgov Analytics, [Office of Products and Platforms](opp) 42 | - Marybeth Murphy, USAgov Analytics, [Office of Products and Platforms](opp) 43 | - Scott McAllister, Data Scientist, [Office of Products and Platforms](opp) 44 | 45 | Former team members: 46 | - Amy Mok, Innovation Specialist, [18F](eighteenf) 47 | - Amos Stone, Innovation Specialist, [Login](login) 48 | - Colin Craig, Innovation Specialist, [18F](eighteenf) 49 | - Chris Goranson, research, [18F](tenx) 50 | - Kate Saul, research, [18F](eighteenf) 51 | 52 | 53 | ## Progress 54 | 55 | We are tracking the work for this Phase on our [Kanban board](https://github.com/18F/10x-MLaaS/projects/2). 56 | 57 | Any issues or ideas that we want to keep track of for later are being noted in 58 | the [GitHub issues](https://github.com/18F/10x-MLaaS/issues). 59 | 60 | We post weekly progress updates in [updates](updates). 61 | 62 | 63 | ## Investigation 64 | 65 | 1. **USA.gov’s data-management process is entirely manual.** Although the team uses HHS’s Voice of the Customer tool to capture survey data, all of the review and analysis are manual. 66 | 1. **This manual process takes time and creates significant challenges.** Manual review and analysis take a considerable amount of time — time that could be spent creating more effective content and replying to urgent user inquiries. 67 | 1. **Workflow improvements would complement automation.** In addition to automating data processing, identifying a product owner, documenting the workflow, and finding other ways to streamline the process would increase efficiency. 68 | 1. **Automation improvements will speed up the workflow improvements and reduce errors.** Steps that are largely duplicative between analysis periods can be automated, thereby allowing the USA.gov team to spend more time gleaning insights from the valuable data. 69 | 1. **The USA.gov team enthusiastically seeks process improvements.** Everyone we spoke to emphasized the need for process improvements and an openness toward change. 70 | 1. **Novel approaches to qualitative data enhancements can be applied without getting in the way.** Once workflow and automation enhancements are complete, the analyses can be further explored using methods and tools that work well for analyzing qualitative data (natural language processing, sentiment analysis). 71 | 1. **We recommend moving forward with this project.** Based on the potential for improving USA.gov’s service offerings, the applicability of automation tactics to other federal agencies and other qualitative data held by the government, the impact improvements will have on the lives of American citizens, and the team’s openness to change, we recommend that the next phase of this project should be funded. 72 | 73 | The full Phase I investigation report is available [here](https://docs.google.com/document/d/1InUpl7v3wa0v05JYCB8-atoDene9-Gzbz-ELY7OPVKY/). 74 | 75 | 76 | ## Contributing 77 | 78 | See [CONTRIBUTING](CONTRIBUTING.md) for additional information. 79 | 80 | Join us in 81 | [#10x-mlaas](https://gsa-tts.slack.com/messages/C9QNC7STG) or [ask 82 | us a question](https://github.com/18F/10x-MLaaS/issues/new). 83 | 84 | 85 | ## Public domain 86 | 87 | This project is in the worldwide [public domain](LICENSE.md). As stated in [CONTRIBUTING](CONTRIBUTING.md): 88 | 89 | > This project is in the public domain within the United States, and copyright and related rights in the work worldwide are waived through the [CC0 1.0 Universal public domain dedication](https://creativecommons.org/publicdomain/zero/1.0/). 90 | > 91 | > All contributions to this project will be released under the CC0 dedication. By submitting a pull request, you are agreeing to comply with this waiver of copyright interest. 92 | 93 | [tts]: https://www.gsa.gov/about-us/organization/federal-acquisition-service/technology-transformation-services 94 | [tenx]: https://10x.gsa.gov 95 | [eighteenf]: https://18f.gsa.gov 96 | [opp]: https://www.gsa.gov/about-us/organization/federal-acquisition-service/technology-transformation-services/office-of-products-and-programs 97 | 98 | -------------------------------------------------------------------------------- /HSM/README.MD: -------------------------------------------------------------------------------- 1 | # HSM 2 | `app.py` provides a command line interface for the Ham-Spam Machine (HSM). Currently, it can fetches and classifies responses to the **site-wide version** and **page-level-version** of the surveys and possibly other surveys. It uses a local instance of a postgreSQL database to store the data for model retraining purpose. It also allows the user to validate comment predictions in Excel while the script is sleeping. 3 | 4 | ## Requirements 5 | - [Docker](https://www.docker.com/) 6 | - [Python](https://www.python.org/) (3.6) 7 | - [pipenv](https://pipenv.kennethreitz.org/en/latest/) 8 | 9 | ## Limitations 10 | - Currently we are in testing phase. We have only tested to have about 18K rows of data to process. If the data is larger than that, there is a chance that model cannot be trained as there is an issue that is unresolved at this point. See [issue description here](https://github.com/18F/10x-MLaaS/issues/100). 11 | 12 | ## Getting Started 13 | 14 | ### Step 1: Clone the Repository 15 | Navigate to where you'd like to clone the repo. Then clone it: 16 | ```bash 17 | git clone https://github.com/18F/10x-MLaaS.git 18 | ``` 19 | 20 | Now `cd` into the repository you've just cloned: 21 | ```bash 22 | cd 10x-MLaaS # or the folder name you used above 23 | ``` 24 | 25 | ### Step 2: Configure the application 26 | 27 | #### Step a: Configure Environment Variables 28 | An `.env` file is needed to specify settings on how to access Qualtrics API, how to set up Docker containers, and API. 29 | 30 | First copy the template in `sample.env`. 31 | 32 | ```bash 33 | cp sample.env .env 34 | ``` 35 | 36 | Modify environment variables to set up for the specific dataset in `.env`. See details in `.env`. If you want to store data on the cloud instead of using a local database and filesystem, see instructions on [Cloud.gov Configuration](cloud-gov-configuration). This will allow you to have a database and filesystem on the cloud for sharing. 37 | 38 | Install Pipenv if it is not installed: 39 | ```bash 40 | pip install pipenv 41 | ``` 42 | 43 | Create a virtual environment with Pipenv to load environment variables locally to set up docker compose script to build and bring up your containers: 44 | ```bash 45 | pipenv shell 46 | ``` 47 | 48 | If you ever made a new change to the `.env` file, you will need to restart your virtual environment and rebuild your containers. 49 | Assuming you already started `pipenv shell` and you just updated your `.env`, then you can use the following commands: 50 | ```bash 51 | exit # This will exit out of the virtual environment 52 | pipenv shell 53 | ``` 54 | 55 | After that is done you will see `(10x-MLaaS)` or the name of the base folder if you have changed it in front of your command line prompt. 56 | 57 | #### Step b: Configure Data Columns 58 | There are specific settings that need to be set in `10x-MLaaS/HSM/utils/config.py` in order to know how to read the input dataset for prediction. Those that has a label of `[ACTION]` will need to be updated according to your specific dataset. These items are used to know how to create the classification results spreadsheet, which columns are used for filter feature, prediction, row identifier, and which columns are needed to process data. 59 | 60 | ### Step 3: Install Docker 61 | NOTE: You can skip this step if Docker is already installed on your system. 62 | 63 | https://docs.docker.com/compose/install/ 64 | 65 | 66 | ## Using the Tool 67 | #### Launching HSM and the Qualtrics Updater 68 | 69 | In the top-level of the repository, there is a folder named `launcher`. 70 | Within that folder, there are two files that end in `.command`: 71 | ``` 72 | start_hsm.command 73 | ``` 74 | and 75 | ``` 76 | update_qualtrics.command 77 | ``` 78 | 79 | You can double-click these files to launch them: 80 | 1. Double-click `start-hsm.command` to launch the HSM tool, which should be running before you proceed to the next step. 81 | 2. Double-click `update-qualtrics.command` to the launch the Qualtrics Updater, which pulls down survey data from 82 | Qualtrics. If you intend to load data other than Qualtics survey data, you will currently have to use the command line 83 | to load that data (for example, loading data from an Excel spreadsheet - please see below) 84 | 85 | 86 | ### Input dataset is an Excel Spreadsheet 87 | If the input is an Excel file (.xlsx), the Excel input file requires to be saved in `10x-MLaaS/HSM/model/inputs`. 88 | You will need to only supply the filename in the command below. Replace with the actual filename. 89 | 90 | (In another terminal). This will run the application within `${CONTAINER_NAME_WEB}` `${CONTAINER_NAME_WEB}` is an 91 | environment variable you have specified in your `.env` file: 92 | ```bash 93 | pipenv shell 94 | docker exec --user hsm --workdir /home/hsm -it ${CONTAINER_NAME_WEB} /bin/bash -c "python ~/HSM/app.py -i " 95 | ``` 96 | 97 | i.e. The file path is `~/workspace/10x-MLaaS/HSM/model/inputs/survey.xlsx`, so the filename would be `survey.xlsx`. 98 | The command would be as follows: 99 | ```bash 100 | docker exec --user hsm --workdir /home/hsm -it ${CONTAINER_NAME_WEB} /bin/bash -c "python ~/HSM/app.py -i survey.xlsx" 101 | ``` 102 | 103 | ## Using the CLI 104 | Now that your environment is set up, you can use `app.py` as a CLI tool. Here's what that script does: 105 | - Downloads data from the Qualtrics API. 106 | - If it's your first time running this, it'll download all responses to-date. Otherwise it'll check the database for the last response and then only fetch new responses. 107 | - Feeds concatenated survey comments to a pre-trained `sklearn` classifer to predict spam (1) or ham (0) 108 | - User can make changes to what fields to return as part of the outputting spreadsheet in utils/config.py 109 | - Keep in mind that you should make sure `SPAM` and `Comment Concatentated` fields should be included for training purpose later. 110 | - Sleeps to give you time to review the predictions in `HSM/model/results/ClassificationResults.xlsx` 111 | - When reviewing the results, the prediction is in the `SPAM` column (0 = ham and 1 = spam). 112 | - Make your changes inplace, overwriting the prediction if you disagree. 113 | - Save and exit the file once you're done. Do not alter the file name. 114 | - Return to your terminal and enter `y` to tell the script to wake up and continue. 115 | 116 | - Inserts the survey data along with model predictions and your validation into the database. 117 | 118 | 119 | ## Cloud.gov Configuration 120 | These are work-in-progress instructions as they were only quickly tested out. You will need to swap out `docker-compose.yml` to use the `docker-compose-cloud.yml` file first. Save the current `docker-compose.yml` at a safe place that you can swap back out if necessary when you want to use database within Docker. The following commands include moving `docker-compose.yml` to `docker-compose-local.yml`: 121 | ```bash 122 | mv docker-compose.yml docker-compose-local.yml 123 | mv docker-compose-cloud.yml docker-compose.yml 124 | ``` 125 | 126 | ### Cloud.gov Database 127 | This tool makes use of a database to store all the dataset for the purposes of training the model, keeping all the available data to avoid downloading the same data from Qualtrics again. In order to use a database on Cloud.gov, you will need to have an account on cloud.gov, and you will create an application with a Postgresql database service instance. This service instance is also binded to the application you create. Once that is set up, you will need to log into cloud.gov in your terminal: 128 | ```bash 129 | cf login -a api.fr.cloud.gov --sso 130 | ``` 131 | 132 | You will need to target your organization and space where your application is in. Then you will need to install [CF-Service-Connect](https://github.com/18F/cf-service-connect) if you haven't. This will allow you to SSH tunnel into Cloud.gov environment so you will have access to your database. Once installed, you will need to connect to the database by running the following command, and replacing those in the `< >`: 133 | ```bash 134 | cf connect-to-service -no-client 135 | ``` 136 | 137 | You will get an output that looks like this: 138 | ```bash 139 | It will give you the following output, pull the corresponding information out as environment variables: 140 | Finding the service instance details... 141 | Setting up SSH tunnel... 142 | SSH tunnel created. 143 | Skipping call to client CLI. Connection information: 144 | 145 | Host: localhost 146 | Port: 147 | Username: 148 | Password: 149 | Name: 150 | 151 | Leave this terminal open while you want to use the SSH tunnel. Press Control-C to stop. 152 | ``` 153 | 154 | Take the information in the `< >` above and modify the `.env` file accordingly. You will have to keep this terminal window open throughout the time you run the tool. 155 | 156 | If at any time, you are getting errors about the database access, you may have to exit out of this and rerun the `connect-to-service` command. 157 | 158 | ### Cloud.gov S3 Storage 159 | This part has not been tested out at all. The storage is to store the machine learning model. But you will do the same as you would set up the Cloud.gov database. You will need to create a new S3 service instance, and bind it to an application. This is the [detailed instructions on how to interact with S3 outside of Cloud.gov](https://cloud.gov/docs/services/s3/#interacting-with-your-s3-bucket-from-outside-cloud-gov). Code changes are necessary to get this implemented. 160 | 161 | 162 | ## Load Data 163 | There are times we need to load data in the database. `load_data.py` will help with this task. This version assumes it either has no `data` and `support_data` tables defined, or there are no data in `data` and `support_data` tables. If they do exist, those will need to be deleted before performing running `load_data.py`, else, there can be duplicated data, incorrect representation of the actual data. 164 | 165 | To use `load_data.py`, you can specify the path to the spreadsheet file that contains all the data. The data will include two fields that represent the `filter_feature` and `validation`. 166 | - `filter_feature` - This value is the filter feature that is used to run prediction on. In this specific case, it is the `'Comments Concatenated'` field. 167 | - `validation` - This value is the validation that started as a prediction and subject matter expert verified and corrected any mistake. In this specific case, it is the `'Validation'`. 168 | 169 | To run `load_data.py`, type in the following commands in the terminal (this assumes you have `docker_compose up` running and your command line prompt activated 170 | the virtual environment with `pipenv shell`): 171 | ```bash 172 | docker exec --user hsm --workdir /home/hsm -it ${CONTAINER_NAME_WEB} /bin/bash -c "cd HSM;python load_data.py " 173 | ``` 174 | 175 | ## Testing 176 | The current state of the code do not included a lot of automated testing. During development, to ensure the tool is still working as intended, a manual testing process is necessary to spot check different items. If you are the general user and not the person who maintain the tool. You will need to work with the maintainer if any issues come up. 177 | 178 | ### End-to-end testing 179 | Assuming that you have already set up the tool to run prediction/training. These are the steps to follow after to do end-to-end testing: 180 | 181 | #### Pulling the data 182 | - You would need to follow [Step 3](#step-3-run-with-docker--docker-compose) under [Getting Started](#getting-started) to build and run the tool 183 | - Once the tool pulled the data, you will make sure it didn't fail prematurely. 184 | 185 | #### Prediction 186 | - Once it performed prediction, you will be prompted to check the prediction on the spreadsheet 187 | - Open the spreadsheet, you would want to check the field that you have specified in `config.py` as `PREDICTION_FIELD_NAME`. To check if data looks okay, and correct them accordingly. 188 | - Make sure all the data columns you specified in `config.py` as `FIELDS` are included. 189 | - There would be columns named as what you specified in `config.py` as `FILTER_FEATURE`, `NORMALIZED_FILTER_FEATURE`, `PREDICTION_FIELD_NAME`, and `ENTRY_ID`. 190 | - The column with the name specified in `config.py` as `FILTER_FEATURE` should have essential the combination of all of columns in `FILTER_FEATURE_FIELDS`. They are not directly the same values because the `FILTER_FEATURE` has been processed to pull out some unnecessary words for training purposes. But do a few spot check from the data to see it captures most of the essential words. 191 | 192 | #### Training 193 | - Once you type `Y` to say the prediction is correct, it will start inserting data into the database, note any errors that come out. 194 | - Once it inserted all the data, it will start the retraining process. It should run smoothly with a `Classification Report` that comes out, an ended with a `DONE!`. If at any point, it started hanging, there may be an issue. 195 | 196 | 197 | ## TODO 198 | - log performance of the models (based on the ground truth established by the validation) 199 | - include a training data table in the database. Move training data there and include support to insert validated samples there. 200 | - retrain the classifier if a certain threshold of newly validated samples is met 201 | - extend to the page-level survey 202 | - include unit and integration tests 203 | -------------------------------------------------------------------------------- /HSM/model/train.py: -------------------------------------------------------------------------------- 1 | import contractions 2 | import numpy as np 3 | import re 4 | import os 5 | import pandas as pd 6 | import nltk 7 | from nltk.stem.wordnet import WordNetLemmatizer 8 | from nltk.tokenize import word_tokenize 9 | from nltk.corpus import wordnet as wn 10 | from scipy import stats 11 | from bs4 import BeautifulSoup 12 | from sklearn import metrics 13 | from sklearn.feature_selection import SelectPercentile 14 | from sklearn.model_selection import train_test_split, RandomizedSearchCV 15 | from sklearn.linear_model import SGDClassifier 16 | from sklearn.feature_extraction.text import TfidfVectorizer 17 | # in order to use SMOTE, you've got to import Pipeline from imblearn 18 | from imblearn.pipeline import Pipeline 19 | from imblearn.over_sampling import SMOTE 20 | import dill as pickle 21 | import warnings 22 | from utils.config import ( 23 | FILTER_FEATURE, 24 | NORMALIZED_FILTER_FEATURE, 25 | PREDICTION_FIELD_NAME, 26 | ) 27 | 28 | warnings.filterwarnings('ignore') 29 | nltk.download('stopwords') 30 | nltk.download('punkt') 31 | nltk.download('averaged_perceptron_tagger') 32 | nltk.download('wordnet') 33 | 34 | 35 | class log_uniform(): 36 | """ 37 | Provides an instance of the log-uniform distribution with an .rvs() method. 38 | Meant to be used with RandomizedSearchCV, particularly for hyperparams like 39 | alpha, C, gamma, etc. 40 | 41 | Attributes: 42 | a (int or float): the exponent of the beginning of the range and 43 | b (int or float): the exponent of the end of range. 44 | base (int or float): the base of the logarithm. 10 by default. 45 | """ 46 | 47 | def __init__(self, a=-1, b=0, base=10): 48 | self.loc = a 49 | self.scale = b - a 50 | self.base = base 51 | 52 | def rvs(self, size=1, random_state=None): 53 | uniform = stats.uniform(loc=self.loc, scale=self.scale) 54 | return np.power(self.base, 55 | uniform.rvs(size=size, 56 | random_state=random_state)) 57 | 58 | 59 | class TrainClassifier(): 60 | """ 61 | Description: 62 | This class will train a model depending for the site-wide survey. 63 | Attributes: 64 | metric (str): the classifier scoring metric to use. Choose from: 65 | accuracy, roc_auc, avg_precision, fbeta, or recall. Note that for fbeta, 66 | beta = 2. 67 | train_df (DataFrame): Training dataframe with 2 columns FILTER_FEATURE, PREDICTION_FIELD_NAME) 68 | Default is None 69 | """ 70 | 71 | def __init__(self, metric='avg_precision', train_df=None): 72 | self.metric = metric 73 | self.train_df = train_df 74 | 75 | @staticmethod 76 | def clean(doc): 77 | """ 78 | Prepares text for NLP by stripping html tags; replacing urls with 'url'; 79 | and replacing email addresses with 'email'. It also expands contractions 80 | and lowercases everything. Finally, it only keeps words that are at least 81 | three characters long, do not contain a number, and are no more than 82 | 17 chars long. 83 | 84 | Arguments: 85 | doc (str): A single document within the corpus. 86 | 87 | Returns: 88 | normalized (str): The normalized string. 89 | """ 90 | 91 | def strip_html_tags(text): 92 | """ 93 | Strips html tags from a string. 94 | """ 95 | 96 | soup = BeautifulSoup(text, "html.parser") 97 | stripped_text = soup.get_text() 98 | return stripped_text 99 | 100 | def strip_urls(text): 101 | """ 102 | Replaces urls in a string with 'url'. 103 | """ 104 | # @TODO: This needs more comments and explanations 105 | pattern = r""" 106 | (?i)\b # There is 0 boundary 107 | ((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/) # This is the base address 108 | (?:[^\s()<>]+| 109 | \(([^\s()<>]+| 110 | (\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+| 111 | (\([^\s()<>]+\)))*\)| 112 | [^\s`!()\[\]{};:'".,<>?«»“”‘’]))""" 113 | 114 | url_re = re.compile(pattern, re.VERBOSE) 115 | text = url_re.sub('url', text) 116 | return text 117 | 118 | def strip_emails(text): 119 | """ 120 | Replaces email addresses in a string with 'email'. 121 | """ 122 | 123 | email_re = re.compile(r'\S+@\S+') 124 | text = email_re.sub('email', text) 125 | return text 126 | 127 | def strip_nonsense(text): 128 | """ 129 | Returns words from a string that are at least 3 characters long, do not contain a number, and 130 | are no more than 17 chars long. 131 | """ 132 | 133 | no_nonsense = re.findall(r'\b[a-z][a-z][a-z]+\b', text) 134 | text = ' '.join(w for w in no_nonsense if w != 'nan' and len(w) <= 17) 135 | return text 136 | 137 | def expand_contractions(text, contraction_mapping=contractions.contractions_dict): 138 | """ 139 | Expands contractions within a string. For example, can't becomes cannot. 140 | """ 141 | 142 | contractions_pattern = re.compile('({})'.format('|'.join(contraction_mapping.keys())), 143 | flags=re.IGNORECASE | re.DOTALL) 144 | 145 | def expand_match(contraction): 146 | match = contraction.group(0) 147 | first_char = match[0] 148 | if contraction_mapping.get(match): 149 | expanded_contraction = contraction_mapping.get(match) 150 | else: 151 | expanded_contraction = contraction_mapping.get(match.lower()) 152 | if expanded_contraction: 153 | expanded_contraction = first_char+expanded_contraction[1:] 154 | return expanded_contraction 155 | else: 156 | pass 157 | 158 | expanded_text = contractions_pattern.sub(expand_match, text) 159 | expanded_text = re.sub("'", "", expanded_text) 160 | return expanded_text 161 | 162 | doc = doc.lower() 163 | contraction_free = expand_contractions(doc) 164 | tag_free = strip_html_tags(contraction_free) 165 | url_free = strip_urls(tag_free) 166 | email_free = strip_emails(url_free) 167 | normalized = strip_nonsense(email_free) 168 | return normalized 169 | 170 | @staticmethod 171 | def get_lemmas(document): 172 | """ 173 | Lemmatizes the string of a single document after normalizing it with the 174 | clean function. 175 | 176 | Arguments: 177 | document (str): A single document within the corpus. 178 | 179 | Returns: 180 | lemmas_str (str): A space-delimited string of lemmas. This can be 181 | passed into a word vectorizer, such as tf-idf. 182 | """ 183 | 184 | def get_wordnet_pos(treebank_tag): 185 | """ 186 | Converts the part of speech tag returned by nltk.pos_tag() to a value 187 | that can be passed to the `pos` kwarg of wordnet_lemmatizer.lemmatize() 188 | """ 189 | 190 | if treebank_tag.startswith('J'): 191 | return wn.ADJ 192 | elif treebank_tag.startswith('V'): 193 | return wn.VERB 194 | elif treebank_tag.startswith('N'): 195 | return wn.NOUN 196 | elif treebank_tag.startswith('R'): 197 | return wn.ADV 198 | else: 199 | return wn.NOUN 200 | 201 | # stopword_set = set(stopwords.words('english')) 202 | # using the clean function defined above here 203 | text = word_tokenize(TrainClassifier.clean(document)) 204 | word_pos = nltk.pos_tag(text) 205 | wordnet_lemmatizer = WordNetLemmatizer() 206 | lemmas = [] 207 | for word, pos in word_pos: 208 | pos = get_wordnet_pos(pos) 209 | lemma = wordnet_lemmatizer.lemmatize(word, pos=pos) 210 | if 'research' in lemma: 211 | lemmas.append('research') 212 | elif 'dataset' in lemma: 213 | lemmas.append('dataset') 214 | else: 215 | lemmas.append(lemma) 216 | # lemmas_list = [lemma for lemma in lemmas if lemma not in stopword_set] 217 | lemmas_str = " ".join(lemma for lemma in lemmas) 218 | return lemmas_str 219 | 220 | def prepare_train(self): 221 | if self.train_df is None: 222 | labeled_data_path = os.path.join('model', 223 | 'training_data', 224 | 'training-sw.xlsx') 225 | 226 | train_df = pd.read_excel(labeled_data_path) 227 | else: 228 | train_df = self.train_df 229 | print("\tNormalizing the text...") 230 | # normalize the comments, preparing for tf-idf 231 | train_df[NORMALIZED_FILTER_FEATURE] = train_df[FILTER_FEATURE].astype(str).apply( 232 | TrainClassifier.get_lemmas) 233 | print("\tDone normalizing the text.") 234 | print("_"*80) 235 | return train_df 236 | 237 | def randomized_grid_search(self, 238 | train_df, 239 | clf=SGDClassifier(), 240 | n_iter_search=10, # 10 for testing purposes 241 | pickle_best=True): 242 | """ 243 | Given labeled training data (`df`) for a binary classification task, 244 | performs a randomized grid search `n_iter_search` times using `clf` as the 245 | classifier and the `score` as a scoring metric. 246 | 247 | Attributes: 248 | df (pandas DataFrame): The training data. Currently, you must specify 249 | within the function the label and feature column 250 | names. 251 | clf (instance of an sklearn classifier): SGDClassifier() by default 252 | n_iter_search: number of parameter settings that are sampled. Trades 253 | off runtime vs quality of the solution. 254 | pickle_best (bool): whether or not to pickle the best estimator 255 | returned by the grid search. Default is True 256 | """ 257 | 258 | score = self.metric 259 | scoring = {'accuracy': metrics.make_scorer(metrics.accuracy_score), 260 | 'roc_auc': metrics.make_scorer(metrics.roc_auc_score), 261 | 'avg_precision': metrics.make_scorer(metrics.average_precision_score), 262 | 'fbeta': metrics.make_scorer(metrics.fbeta_score, beta=1.5), 263 | 'recall': metrics.make_scorer(metrics.recall_score)} 264 | # clf_name = clf.__class__.__name__ 265 | X = train_df[NORMALIZED_FILTER_FEATURE] 266 | # y = train_df['Spam'] 267 | y = train_df[PREDICTION_FIELD_NAME] 268 | X_train, X_test, y_train, y_test = train_test_split(X, y, 269 | test_size=0.25, 270 | random_state=123) 271 | pipe = Pipeline([ 272 | ('vectorizer', TfidfVectorizer()), 273 | ('upsample', SMOTE()), 274 | ('select', SelectPercentile()), 275 | ('clf', clf)]) 276 | param_dist = { 277 | "vectorizer__ngram_range": [(1, 1), (1, 2), (1, 3)], 278 | "vectorizer__min_df": stats.randint(1, 3), 279 | "vectorizer__max_df": stats.uniform(.7, .3), 280 | "vectorizer__sublinear_tf": [True, False], 281 | "upsample": [None, 282 | SMOTE(ratio='minority', kind='svm'), 283 | SMOTE(ratio='minority', kind='regular'), 284 | SMOTE(ratio='minority', kind='borderline1'), 285 | SMOTE(ratio='minority', kind='borderline2')], 286 | "select": [None, 287 | SelectPercentile(percentile=10), 288 | SelectPercentile(percentile=20), 289 | SelectPercentile(percentile=50), 290 | SelectPercentile(percentile=75)], 291 | "clf__alpha": log_uniform(-5, 2), 292 | "clf__penalty": ['l2', 'l1', 'elasticnet'], 293 | "clf__loss": ['hinge', 'log', 'modified_huber', 'squared_hinge', 'perceptron'], 294 | } 295 | 296 | random_search = RandomizedSearchCV(pipe, param_distributions=param_dist, 297 | scoring=scoring, refit=score, 298 | n_iter=n_iter_search, cv=5, n_jobs=1, 299 | verbose=1) 300 | random_search.fit(X_train, y_train) 301 | y_pred = random_search.predict(X_test) 302 | # get the col number of the positive class (i.e. spam) 303 | positive_class_col = list(random_search.classes_).index(1) 304 | try: 305 | y_score = random_search.predict_proba(X_test)[:, positive_class_col] 306 | except AttributeError: 307 | y_score = random_search.decision_function(X_test) 308 | average_precision = metrics.average_precision_score(y_test, y_score) 309 | acc = metrics.accuracy_score(y_test, y_pred) 310 | roc_auc = metrics.roc_auc_score(y_test, y_pred) 311 | precisions, recalls, _ = metrics.precision_recall_curve(y_test, y_score) 312 | auc = metrics.auc(recalls, precisions) 313 | fbeta = metrics.fbeta_score(y_test, y_pred, beta=1.5) 314 | recall = metrics.recall_score(y_test, y_pred) 315 | print("\tRecall on test data: {0:.2f}".format(recall)) 316 | print("\tAccuracy on test data: {0:.2f}".format(acc)) 317 | print("\tROC-AUC on test data: {0:.2f}".format(roc_auc)) 318 | print("\tFbeta on test data: {0:.2f}".format(fbeta)) 319 | print("\tAverage Precision on test data: {0:.2f}".format(average_precision)) 320 | print("\tPrecision-Recall AUC on test data: {0:.2f}".format(auc)) 321 | print("-"*80) 322 | print("Classification Report:") 323 | class_names = ['ham', 'spam'] 324 | print(metrics.classification_report(y_test, 325 | y_pred, 326 | target_names=class_names)) 327 | best_estimator = random_search.best_estimator_ 328 | best_score = random_search.best_score_ 329 | result_values = [y_pred, y_score, precisions, recall, average_precision, 330 | acc, roc_auc, auc, fbeta, recalls, best_score, best_estimator, y_test] 331 | result_keys = ['y_pred', 'y_score', 'precisions', 'recall', 'average_precision', 'acc', 332 | 'roc_auc', 'auc', 'fbeta', 'recalls', 'best_score', 'best_estimator', 'y_test'] 333 | results = {k: v for k, v in zip(result_keys, result_values)} 334 | if pickle_best: 335 | pickle_dir = os.path.join(os.getcwd(), 'HSM', 'model', 'best_estimators') 336 | if not os.path.exists(pickle_dir): 337 | os.makedirs(pickle_dir) 338 | pickle_path = os.path.join(pickle_dir, 'model_sw.pkl') 339 | with open(pickle_path, 'wb') as f: 340 | pickle.dump(random_search.best_estimator_, f) 341 | return results 342 | 343 | 344 | def main(train_df=None): 345 | tc = TrainClassifier(train_df=train_df) 346 | train_df = tc.prepare_train() 347 | # Right now randomized_grid_search returns results, but not being used here 348 | tc.randomized_grid_search(train_df) 349 | 350 | 351 | if __name__ == '__main__': 352 | main() 353 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "8a94bd917abf65ef4c42729aedd3647fd9c6e434676dcfd45fb5ba1de7e1fc01" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.6" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "argon2-cffi": { 20 | "hashes": [ 21 | "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf", 22 | "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5", 23 | "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5", 24 | "sha256:36320372133a003374ef4275fbfce78b7ab581440dfca9f9471be3dd9a522428", 25 | "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b", 26 | "sha256:3aa804c0e52f208973845e8b10c70d8957c9e5a666f702793256242e9167c4e0", 27 | "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc", 28 | "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203", 29 | "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003", 30 | "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78", 31 | "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe", 32 | "sha256:8282b84ceb46b5b75c3a882b28856b8cd7e647ac71995e71b6705ec06fc232c3", 33 | "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32", 34 | "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361", 35 | "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2", 36 | "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647", 37 | "sha256:b94042e5dcaa5d08cf104a54bfae614be502c6f44c9c89ad1535b2ebdaacbd4c", 38 | "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496", 39 | "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b", 40 | "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d", 41 | "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa", 42 | "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be" 43 | ], 44 | "version": "==20.1.0" 45 | }, 46 | "async-generator": { 47 | "hashes": [ 48 | "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", 49 | "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144" 50 | ], 51 | "version": "==1.10" 52 | }, 53 | "attrs": { 54 | "hashes": [ 55 | "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", 56 | "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" 57 | ], 58 | "version": "==21.2.0" 59 | }, 60 | "backcall": { 61 | "hashes": [ 62 | "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e", 63 | "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255" 64 | ], 65 | "version": "==0.2.0" 66 | }, 67 | "beautifulsoup4": { 68 | "hashes": [ 69 | "sha256:194ec62a25438adcb3fdb06378b26559eda1ea8a747367d34c33cef9c7f48d57", 70 | "sha256:90f8e61121d6ae58362ce3bed8cd997efb00c914eae0ff3d363c32f9a9822d10", 71 | "sha256:f0abd31228055d698bb392a826528ea08ebb9959e6bea17c606fd9c9009db938" 72 | ], 73 | "index": "pypi", 74 | "version": "==4.6.3" 75 | }, 76 | "bleach": { 77 | "hashes": [ 78 | "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125", 79 | "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433" 80 | ], 81 | "version": "==3.3.0" 82 | }, 83 | "certifi": { 84 | "hashes": [ 85 | "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", 86 | "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" 87 | ], 88 | "index": "pypi", 89 | "version": "==2018.10.15" 90 | }, 91 | "cfenv": { 92 | "hashes": [ 93 | "sha256:7815bffcc4a3db350f92517157fafc577c11b5a7ff172dc5632f1042b93073e8", 94 | "sha256:c7a91a4c82431acfc35db664c194d5e6cc7f4df3dcb692d0f836a6ceb0156167" 95 | ], 96 | "index": "pypi", 97 | "version": "==0.5.3" 98 | }, 99 | "cffi": { 100 | "hashes": [ 101 | "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813", 102 | "sha256:04c468b622ed31d408fea2346bec5bbffba2cc44226302a0de1ade9f5ea3d373", 103 | "sha256:06d7cd1abac2ffd92e65c0609661866709b4b2d82dd15f611e602b9b188b0b69", 104 | "sha256:06db6321b7a68b2bd6df96d08a5adadc1fa0e8f419226e25b2a5fbf6ccc7350f", 105 | "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06", 106 | "sha256:0f861a89e0043afec2a51fd177a567005847973be86f709bbb044d7f42fc4e05", 107 | "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea", 108 | "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee", 109 | "sha256:1bf1ac1984eaa7675ca8d5745a8cb87ef7abecb5592178406e55858d411eadc0", 110 | "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396", 111 | "sha256:24a570cd11895b60829e941f2613a4f79df1a27344cbbb82164ef2e0116f09c7", 112 | "sha256:24ec4ff2c5c0c8f9c6b87d5bb53555bf267e1e6f70e52e5a9740d32861d36b6f", 113 | "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73", 114 | "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315", 115 | "sha256:293e7ea41280cb28c6fcaaa0b1aa1f533b8ce060b9e701d78511e1e6c4a1de76", 116 | "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1", 117 | "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49", 118 | "sha256:3c3f39fa737542161d8b0d680df2ec249334cd70a8f420f71c9304bd83c3cbed", 119 | "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892", 120 | "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482", 121 | "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058", 122 | "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5", 123 | "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53", 124 | "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045", 125 | "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3", 126 | "sha256:681d07b0d1e3c462dd15585ef5e33cb021321588bebd910124ef4f4fb71aef55", 127 | "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5", 128 | "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e", 129 | "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c", 130 | "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369", 131 | "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827", 132 | "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053", 133 | "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa", 134 | "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4", 135 | "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322", 136 | "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132", 137 | "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62", 138 | "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa", 139 | "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0", 140 | "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396", 141 | "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e", 142 | "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991", 143 | "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6", 144 | "sha256:cc5a8e069b9ebfa22e26d0e6b97d6f9781302fe7f4f2b8776c3e1daea35f1adc", 145 | "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1", 146 | "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406", 147 | "sha256:df5052c5d867c1ea0b311fb7c3cd28b19df469c056f7fdcfe88c7473aa63e333", 148 | "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d", 149 | "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c" 150 | ], 151 | "version": "==1.14.5" 152 | }, 153 | "chardet": { 154 | "hashes": [ 155 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 156 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 157 | ], 158 | "index": "pypi", 159 | "version": "==3.0.4" 160 | }, 161 | "click": { 162 | "hashes": [ 163 | "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", 164 | "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" 165 | ], 166 | "version": "==8.0.1" 167 | }, 168 | "contractions": { 169 | "hashes": [ 170 | "sha256:7eb1665238be07e29555d03e44f122eda5973e04c2e80044a454383aa16112af", 171 | "sha256:ba0165ed47c0cbaa81736d631391c3c1ce9c9ca33a83853ffe8f122b7bd0e033" 172 | ], 173 | "index": "pypi", 174 | "version": "==0.0.17" 175 | }, 176 | "cycler": { 177 | "hashes": [ 178 | "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d", 179 | "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8" 180 | ], 181 | "version": "==0.10.0" 182 | }, 183 | "dataclasses": { 184 | "hashes": [ 185 | "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf", 186 | "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97" 187 | ], 188 | "markers": "python_version < '3.7'", 189 | "version": "==0.8" 190 | }, 191 | "decorator": { 192 | "hashes": [ 193 | "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323", 194 | "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5" 195 | ], 196 | "version": "==5.0.9" 197 | }, 198 | "defusedxml": { 199 | "hashes": [ 200 | "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", 201 | "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61" 202 | ], 203 | "version": "==0.7.1" 204 | }, 205 | "dill": { 206 | "hashes": [ 207 | "sha256:624dc244b94371bb2d6e7f40084228a2edfff02373fe20e018bef1ee92fdd5b3" 208 | ], 209 | "index": "pypi", 210 | "version": "==0.2.8.2" 211 | }, 212 | "entrypoints": { 213 | "hashes": [ 214 | "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", 215 | "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" 216 | ], 217 | "version": "==0.3" 218 | }, 219 | "et-xmlfile": { 220 | "hashes": [ 221 | "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" 222 | ], 223 | "index": "pypi", 224 | "version": "==1.0.1" 225 | }, 226 | "flask": { 227 | "hashes": [ 228 | "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", 229 | "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6" 230 | ], 231 | "index": "pypi", 232 | "version": "==1.1.1" 233 | }, 234 | "flask-httpauth": { 235 | "hashes": [ 236 | "sha256:0149953720489407e51ec24bc2f86273597b7973d71cd51f9443bd0e2a89bd72", 237 | "sha256:6ef8b761332e780f9ff74d5f9056c2616f52babc1998b01d9f361a1e439e61b9" 238 | ], 239 | "index": "pypi", 240 | "version": "==3.3.0" 241 | }, 242 | "funcy": { 243 | "hashes": [ 244 | "sha256:1d3fc5d42cf7564a6b2be04042d0df7a50c77903cf760a34786d0c9ebd659b25", 245 | "sha256:2775409b7dc9106283f1224d97e6df5f2c02e7291c8caed72764f5a115dffb50" 246 | ], 247 | "version": "==1.16" 248 | }, 249 | "furl": { 250 | "hashes": [ 251 | "sha256:a2c6adb472fc5faba2e18b6c28b83464b80201f168fd10b81997895a7cb5d5a6", 252 | "sha256:f7dba33eafbee7dbc83963534b25e72f816cced48ac53191ee60bfcc62933918" 253 | ], 254 | "version": "==2.1.2" 255 | }, 256 | "future": { 257 | "hashes": [ 258 | "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d" 259 | ], 260 | "version": "==0.18.2" 261 | }, 262 | "gunicorn": { 263 | "hashes": [ 264 | "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626", 265 | "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c" 266 | ], 267 | "index": "pypi", 268 | "version": "==20.0.4" 269 | }, 270 | "idna": { 271 | "hashes": [ 272 | "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", 273 | "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" 274 | ], 275 | "index": "pypi", 276 | "version": "==2.7" 277 | }, 278 | "imbalanced-learn": { 279 | "hashes": [ 280 | "sha256:d4d59b96780044ba97c6263e69cf145b9564241c402177f3d650b6c0c63fab93", 281 | "sha256:e49550da4c98771afd0aa49db7710ab0d85e766754bad9fb767567c367d2b2e4" 282 | ], 283 | "index": "pypi", 284 | "version": "==0.3.3" 285 | }, 286 | "importlib-metadata": { 287 | "hashes": [ 288 | "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00", 289 | "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139" 290 | ], 291 | "markers": "python_version < '3.8'", 292 | "version": "==4.5.0" 293 | }, 294 | "iniconfig": { 295 | "hashes": [ 296 | "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", 297 | "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" 298 | ], 299 | "version": "==1.1.1" 300 | }, 301 | "ipykernel": { 302 | "hashes": [ 303 | "sha256:29eee66548ee7c2edb7941de60c0ccf0a7a8dd957341db0a49c5e8e6a0fcb712", 304 | "sha256:e976751336b51082a89fc2099fb7f96ef20f535837c398df6eab1283c2070884" 305 | ], 306 | "version": "==5.5.5" 307 | }, 308 | "ipython": { 309 | "hashes": [ 310 | "sha256:2dbcc8c27ca7d3cfe4fcdff7f45b27f9a8d3edfa70ff8024a71c7a8eb5f09d64", 311 | "sha256:9f4fcb31d3b2c533333893b9172264e4821c1ac91839500f31bd43f2c59b3ccf" 312 | ], 313 | "markers": "python_version >= '3.3'", 314 | "version": "==7.16.1" 315 | }, 316 | "ipython-genutils": { 317 | "hashes": [ 318 | "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8", 319 | "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8" 320 | ], 321 | "version": "==0.2.0" 322 | }, 323 | "ipywidgets": { 324 | "hashes": [ 325 | "sha256:9f1a43e620530f9e570e4a493677d25f08310118d315b00e25a18f12913c41f0", 326 | "sha256:e6513cfdaf5878de30f32d57f6dc2474da395a2a2991b94d487406c0ab7f55ca" 327 | ], 328 | "version": "==7.6.3" 329 | }, 330 | "itsdangerous": { 331 | "hashes": [ 332 | "sha256:5174094b9637652bdb841a3029700391451bd092ba3db90600dea710ba28e97c", 333 | "sha256:9e724d68fc22902a1435351f84c3fb8623f303fffcc566a4cb952df8c572cff0" 334 | ], 335 | "version": "==2.0.1" 336 | }, 337 | "jdcal": { 338 | "hashes": [ 339 | "sha256:948fb8d079e63b4be7a69dd5f0cd618a0a57e80753de8248fd786a8a20658a07", 340 | "sha256:ea0a5067c5f0f50ad4c7bdc80abad3d976604f6fb026b0b3a17a9d84bb9046c9" 341 | ], 342 | "index": "pypi", 343 | "version": "==1.4" 344 | }, 345 | "jedi": { 346 | "hashes": [ 347 | "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93", 348 | "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707" 349 | ], 350 | "version": "==0.18.0" 351 | }, 352 | "jinja2": { 353 | "hashes": [ 354 | "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4", 355 | "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4" 356 | ], 357 | "version": "==3.0.1" 358 | }, 359 | "joblib": { 360 | "hashes": [ 361 | "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7", 362 | "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5" 363 | ], 364 | "version": "==1.0.1" 365 | }, 366 | "jsonschema": { 367 | "hashes": [ 368 | "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163", 369 | "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a" 370 | ], 371 | "version": "==3.2.0" 372 | }, 373 | "jupyter": { 374 | "hashes": [ 375 | "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7", 376 | "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78", 377 | "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f" 378 | ], 379 | "index": "pypi", 380 | "version": "==1.0.0" 381 | }, 382 | "jupyter-client": { 383 | "hashes": [ 384 | "sha256:9715152067e3f7ea3b56f341c9a0f9715c8c7cc316ee0eb13c3c84f5ca0065f5", 385 | "sha256:e2ab61d79fbf8b56734a4c2499f19830fbd7f6fefb3e87868ef0545cb3c17eb9" 386 | ], 387 | "version": "==6.2.0" 388 | }, 389 | "jupyter-console": { 390 | "hashes": [ 391 | "sha256:242248e1685039cd8bff2c2ecb7ce6c1546eb50ee3b08519729e6e881aec19c7", 392 | "sha256:7799c4ea951e0e96ba8260575423cb323ea5a03fcf5503560fa3e15748869e27" 393 | ], 394 | "version": "==6.4.0" 395 | }, 396 | "jupyter-contrib-core": { 397 | "hashes": [ 398 | "sha256:1ec81e275a8f5858d56b0c4c6cd85335aa8e915001b8657fe51c620c3cdde50f", 399 | "sha256:e65bc0e932ff31801003cef160a4665f2812efe26a53801925a634735e9a5794" 400 | ], 401 | "version": "==0.3.3" 402 | }, 403 | "jupyter-contrib-nbextensions": { 404 | "hashes": [ 405 | "sha256:2c071f0aa208c569666f656bdc0f66906ca493cf9f06f46db6350db11030ff40", 406 | "sha256:eecd28ecc2fc410226c0a3d4932ed2fac4860ccf8d9e9b1b29548835a35b22ab" 407 | ], 408 | "index": "pypi", 409 | "version": "==0.5.1" 410 | }, 411 | "jupyter-core": { 412 | "hashes": [ 413 | "sha256:79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4", 414 | "sha256:8c6c0cac5c1b563622ad49321d5ec47017bd18b94facb381c6973a0486395f8e" 415 | ], 416 | "version": "==4.7.1" 417 | }, 418 | "jupyter-highlight-selected-word": { 419 | "hashes": [ 420 | "sha256:9545dfa9cb057eebe3a5795604dcd3a5294ea18637e553f61a0b67c1b5903c58", 421 | "sha256:9fa740424859a807950ca08d2bfd28a35154cd32dd6d50ac4e0950022adc0e7b" 422 | ], 423 | "version": "==0.2.0" 424 | }, 425 | "jupyter-latex-envs": { 426 | "hashes": [ 427 | "sha256:070a31eb2dc488bba983915879a7c2939247bf5c3b669b398bdb36a9b5343872" 428 | ], 429 | "version": "==1.4.6" 430 | }, 431 | "jupyter-nbextensions-configurator": { 432 | "hashes": [ 433 | "sha256:e5e86b5d9d898e1ffb30ebb08e4ad8696999f798fef3ff3262d7b999076e4e83" 434 | ], 435 | "version": "==0.4.1" 436 | }, 437 | "jupyterlab-pygments": { 438 | "hashes": [ 439 | "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008", 440 | "sha256:cfcda0873626150932f438eccf0f8bf22bfa92345b814890ab360d666b254146" 441 | ], 442 | "version": "==0.1.2" 443 | }, 444 | "jupyterlab-widgets": { 445 | "hashes": [ 446 | "sha256:5c1a29a84d3069208cb506b10609175b249b6486d6b1cbae8fcde2a11584fb78", 447 | "sha256:caeaf3e6103180e654e7d8d2b81b7d645e59e432487c1d35a41d6d3ee56b3fef" 448 | ], 449 | "markers": "python_version >= '3.6'", 450 | "version": "==1.0.0" 451 | }, 452 | "kiwisolver": { 453 | "hashes": [ 454 | "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d", 455 | "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31", 456 | "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9", 457 | "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0", 458 | "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72", 459 | "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3", 460 | "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6", 461 | "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e", 462 | "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000", 463 | "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3", 464 | "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18", 465 | "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21", 466 | "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621", 467 | "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b", 468 | "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc", 469 | "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131", 470 | "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882", 471 | "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454", 472 | "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248", 473 | "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de", 474 | "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598", 475 | "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54", 476 | "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278", 477 | "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6", 478 | "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81", 479 | "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030", 480 | "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8", 481 | "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689", 482 | "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4", 483 | "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0", 484 | "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05", 485 | "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9" 486 | ], 487 | "version": "==1.3.1" 488 | }, 489 | "lxml": { 490 | "hashes": [ 491 | "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d", 492 | "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3", 493 | "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2", 494 | "sha256:1b38116b6e628118dea5b2186ee6820ab138dbb1e24a13e478490c7db2f326ae", 495 | "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f", 496 | "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927", 497 | "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3", 498 | "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7", 499 | "sha256:3082c518be8e97324390614dacd041bb1358c882d77108ca1957ba47738d9d59", 500 | "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f", 501 | "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade", 502 | "sha256:36108c73739985979bf302006527cf8a20515ce444ba916281d1c43938b8bb96", 503 | "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468", 504 | "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b", 505 | "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4", 506 | "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354", 507 | "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83", 508 | "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04", 509 | "sha256:5c8c163396cc0df3fd151b927e74f6e4acd67160d6c33304e805b84293351d16", 510 | "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791", 511 | "sha256:6f12e1427285008fd32a6025e38e977d44d6382cf28e7201ed10d6c1698d2a9a", 512 | "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51", 513 | "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1", 514 | "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a", 515 | "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f", 516 | "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee", 517 | "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec", 518 | "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969", 519 | "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28", 520 | "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a", 521 | "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa", 522 | "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106", 523 | "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d", 524 | "sha256:c47ff7e0a36d4efac9fd692cfa33fbd0636674c102e9e8d9b26e1b93a94e7617", 525 | "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4", 526 | "sha256:cdaf11d2bd275bf391b5308f86731e5194a21af45fbaaaf1d9e8147b9160ea92", 527 | "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0", 528 | "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4", 529 | "sha256:d916d31fd85b2f78c76400d625076d9124de3e4bda8b016d25a050cc7d603f24", 530 | "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2", 531 | "sha256:e1cbd3f19a61e27e011e02f9600837b921ac661f0c40560eefb366e4e4fb275e", 532 | "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0", 533 | "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654", 534 | "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2", 535 | "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23", 536 | "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586" 537 | ], 538 | "version": "==4.6.3" 539 | }, 540 | "markupsafe": { 541 | "hashes": [ 542 | "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298", 543 | "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64", 544 | "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b", 545 | "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567", 546 | "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff", 547 | "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74", 548 | "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35", 549 | "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26", 550 | "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7", 551 | "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75", 552 | "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f", 553 | "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135", 554 | "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8", 555 | "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a", 556 | "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914", 557 | "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18", 558 | "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8", 559 | "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2", 560 | "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d", 561 | "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b", 562 | "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f", 563 | "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb", 564 | "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833", 565 | "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415", 566 | "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902", 567 | "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9", 568 | "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d", 569 | "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066", 570 | "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f", 571 | "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5", 572 | "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94", 573 | "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509", 574 | "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51", 575 | "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872" 576 | ], 577 | "version": "==2.0.1" 578 | }, 579 | "matplotlib": { 580 | "hashes": [ 581 | "sha256:08ccc8922eb4792b91c652d3e6d46b1c99073f1284d1b6705155643e8046463a", 582 | "sha256:161dcd807c0c3232f4dcd4a12a382d52004a498174cbfafd40646106c5bcdcc8", 583 | "sha256:1f9e885bfa1b148d16f82a6672d043ecf11197f6c71ae222d0546db706e52eb2", 584 | "sha256:2d6ab54015a7c0d727c33e36f85f5c5e4172059efdd067f7527f6e5d16ad01aa", 585 | "sha256:5d2e408a2813abf664bd79431107543ecb449136912eb55bb312317edecf597e", 586 | "sha256:61c8b740a008218eb604de518eb411c4953db0cb725dd0b32adf8a81771cab9e", 587 | "sha256:80f10af8378fccc136da40ea6aa4a920767476cdfb3241acb93ef4f0465dbf57", 588 | "sha256:819d4860315468b482f38f1afe45a5437f60f03eaede495d5ff89f2eeac89500", 589 | "sha256:8cc0e44905c2c8fda5637cad6f311eb9517017515a034247ab93d0cf99f8bb7a", 590 | "sha256:8e8e2c2fe3d873108735c6ee9884e6f36f467df4a143136209cff303b183bada", 591 | "sha256:98c2ffeab8b79a4e3a0af5dd9939f92980eb6e3fec10f7f313df5f35a84dacab", 592 | "sha256:d59bb0e82002ac49f4152963f8a1079e66794a4f454457fd2f0dcc7bf0797d30", 593 | "sha256:ee59b7bb9eb75932fe3787e54e61c99b628155b0cedc907864f24723ba55b309" 594 | ], 595 | "index": "pypi", 596 | "version": "==3.1.2" 597 | }, 598 | "mistune": { 599 | "hashes": [ 600 | "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e", 601 | "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4" 602 | ], 603 | "version": "==0.8.4" 604 | }, 605 | "nbclient": { 606 | "hashes": [ 607 | "sha256:db17271330c68c8c88d46d72349e24c147bb6f34ec82d8481a8f025c4d26589c", 608 | "sha256:e79437364a2376892b3f46bedbf9b444e5396cfb1bc366a472c37b48e9551500" 609 | ], 610 | "version": "==0.5.3" 611 | }, 612 | "nbconvert": { 613 | "hashes": [ 614 | "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d", 615 | "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002" 616 | ], 617 | "version": "==6.0.7" 618 | }, 619 | "nbformat": { 620 | "hashes": [ 621 | "sha256:b516788ad70771c6250977c1374fcca6edebe6126fd2adb5a69aa5c2356fd1c8", 622 | "sha256:eb8447edd7127d043361bc17f2f5a807626bc8e878c7709a1c647abda28a9171" 623 | ], 624 | "version": "==5.1.3" 625 | }, 626 | "nest-asyncio": { 627 | "hashes": [ 628 | "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c", 629 | "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa" 630 | ], 631 | "version": "==1.5.1" 632 | }, 633 | "nltk": { 634 | "hashes": [ 635 | "sha256:bed45551259aa2101381bbdd5df37d44ca2669c5c3dad72439fa459b29137d94" 636 | ], 637 | "index": "pypi", 638 | "version": "==3.4.5" 639 | }, 640 | "notebook": { 641 | "hashes": [ 642 | "sha256:9c4625e2a2aa49d6eae4ce20cbc3d8976db19267e32d2a304880e0c10bf8aef9", 643 | "sha256:f7f0a71a999c7967d9418272ae4c3378a220bd28330fbfb49860e46cf8a5838a" 644 | ], 645 | "version": "==6.4.0" 646 | }, 647 | "numexpr": { 648 | "hashes": [ 649 | "sha256:05b97b19e864a5d1a0b106933b1637233a2444fd375685bead264a818f847ef2", 650 | "sha256:0732c9989bff8568ee78fa461f3698166d4ac79363860be22ff49eae1dcd15e7", 651 | "sha256:23718ac5f2ebae995f5899509624781b375da568f2b645b5d1fd6dbb17f41a56", 652 | "sha256:24cdb8c0e93f31387a4c2ddd09a687874c006e6139fd68bcf77b96e51d17cb01", 653 | "sha256:2e14b44a79030fbe25f16393162a4d21ced14056fac49ff73856f661a78db731", 654 | "sha256:3daa55515ee3cb40bf5ab8263c0c13fff8d484d64d107a9c414e8ca151dc08a6", 655 | "sha256:43616529f9b7d1afc83386f943dc66c4da5e052f00217ba7e3ad8dd1b5f3a825", 656 | "sha256:4527a0a7b04f858a73c348c9c4ce8441b7a54965db74a32ba808c51d9d53b7cd", 657 | "sha256:51277a530a353e0f94665b44615249d7e7075f0c73f78d4743da632fc44bc648", 658 | "sha256:5223a519f48754dd350723d9fbcadbcd0476881bc954a281a09a6538ecabfc27", 659 | "sha256:5d6dbf050a9b8ebff0b7706ebeaf1cd57d64ef4dfe61aef3790851b481daf6b5", 660 | "sha256:5f4122bd58aa4e4891814c2f72bd47b1cdb202c9d863ea96c5394dffb72a16e2", 661 | "sha256:602df9b5c500d0a887dc96b4cfd16fb60ae7ef39ccd6f013f4df2ee11ae70553", 662 | "sha256:618259287b8b81a352a7d088ad03fe3b393a842ccb45f0b3cfc6a712d41b7595", 663 | "sha256:74df157ab4577bfc83c14f4e39d14781b06ade5406d3efef049f90c88d8c28ea", 664 | "sha256:785065819ce98e3d3dd853794244e0de190d7ba36ab42c8fd79e0e9cd40de7af", 665 | "sha256:7ab40e2b438f4ea2ea8234c63639cdf5072cdb29d0ac521307854efe0281a567", 666 | "sha256:833a363c86266424349467b53f4060f77aaa7ec03c1e6f38c54e69c65ceebf30", 667 | "sha256:8b76bcca930cbf0db0fe98b6a51d6286dff77d525dad670cb7750e29a138d434", 668 | "sha256:8fc23a49f4266c24a23310c0cb92ff54c4b4f535635f90372b3a2d5cb1f83329", 669 | "sha256:90ea6d5813e1906bb203ef220a600b30d83e75aea2607a7e7037cceae9e93346", 670 | "sha256:97753d17d1ea39e082b1907b99b6cb63cac7d1dfa512d2ff5079eb7bfab1ea88", 671 | "sha256:99472731bc1111f5d73285dd2a4c228b5bfb176f785a567872e0fbfec6584f2b", 672 | "sha256:a3f1cec8657bd3920869a2ea27f98d68ac3000334f366d844a9670ae671fe4bd", 673 | "sha256:a8e0e48d72391543b68d0471fac2e31c614efdce4036e2a0a8a182fde1edb0e0", 674 | "sha256:aae4ce158da53ebc47df053de90fed9d0d51fa0df8cc481abc8a901ea4f0cec7", 675 | "sha256:b0a9124a66a61b05ea84b832358d6aa5561c30e69b4dcaea819b296f4f025f89", 676 | "sha256:c2605e5665b0d7362e0d2b92683387c12e15c7440daf702a7637f7502a967810", 677 | "sha256:c9218aeb76717768f617362b72a87e9219da95ba7cdec0732ccecc4a4719124c", 678 | "sha256:c978c49bd9dded6a4ba6b3501e3a34e3aba9312cbb7d800bed7ac6fcd2d5949d", 679 | "sha256:d14ae09318ad86579e35aacf1596c83d5db1139cd68615967ee23605e11f5d82", 680 | "sha256:d423441593a952ac56d1f774068b81fb22f514fb68873c066578345a6af74c0d", 681 | "sha256:dc707486b1f3dda18a39bc4d06a0a09d3c0ea47bd6b99fdb98adb26d1277253f", 682 | "sha256:dfdca3d1f4c83fa8fd3ee7573110efd13e838543896641b89367622ec6a67eb4", 683 | "sha256:e000570a6a704c594832ff4fc45f18864b721b7b444a185b365dbb03d3fe3abb", 684 | "sha256:e985026e64350dd59fd91a09bc364edf706d58b12e01362ddfa63829878bd434", 685 | "sha256:eeeb6325df6cf3f3ab7d9dbabf3bc03ac88b7e2f2aed21419c31e23c3048dce1", 686 | "sha256:f9df0a74d39616fd011071c5850418f244bac414f24ed55c00dcf3c5385e8374" 687 | ], 688 | "version": "==2.7.3" 689 | }, 690 | "numpy": { 691 | "hashes": [ 692 | "sha256:0df89ca13c25eaa1621a3f09af4c8ba20da849692dcae184cb55e80952c453fb", 693 | "sha256:154c35f195fd3e1fad2569930ca51907057ae35e03938f89a8aedae91dd1b7c7", 694 | "sha256:18e84323cdb8de3325e741a7a8dd4a82db74fde363dce32b625324c7b32aa6d7", 695 | "sha256:1e8956c37fc138d65ded2d96ab3949bd49038cc6e8a4494b1515b0ba88c91565", 696 | "sha256:23557bdbca3ccbde3abaa12a6e82299bc92d2b9139011f8c16ca1bb8c75d1e95", 697 | "sha256:24fd645a5e5d224aa6e39d93e4a722fafa9160154f296fd5ef9580191c755053", 698 | "sha256:36e36b6868e4440760d4b9b44587ea1dc1f06532858d10abba98e851e154ca70", 699 | "sha256:3d734559db35aa3697dadcea492a423118c5c55d176da2f3be9c98d4803fc2a7", 700 | "sha256:416a2070acf3a2b5d586f9a6507bb97e33574df5bd7508ea970bbf4fc563fa52", 701 | "sha256:4a22dc3f5221a644dfe4a63bf990052cc674ef12a157b1056969079985c92816", 702 | "sha256:4d8d3e5aa6087490912c14a3c10fbdd380b40b421c13920ff468163bc50e016f", 703 | "sha256:4f41fd159fba1245e1958a99d349df49c616b133636e0cf668f169bce2aeac2d", 704 | "sha256:561ef098c50f91fbac2cc9305b68c915e9eb915a74d9038ecf8af274d748f76f", 705 | "sha256:56994e14b386b5c0a9b875a76d22d707b315fa037affc7819cda08b6d0489756", 706 | "sha256:73a1f2a529604c50c262179fcca59c87a05ff4614fe8a15c186934d84d09d9a5", 707 | "sha256:7da99445fd890206bfcc7419f79871ba8e73d9d9e6b82fe09980bc5bb4efc35f", 708 | "sha256:99d59e0bcadac4aa3280616591fb7bcd560e2218f5e31d5223a2e12a1425d495", 709 | "sha256:a4cc09489843c70b22e8373ca3dfa52b3fab778b57cf81462f1203b0852e95e3", 710 | "sha256:a61dc29cfca9831a03442a21d4b5fd77e3067beca4b5f81f1a89a04a71cf93fa", 711 | "sha256:b1853df739b32fa913cc59ad9137caa9cc3d97ff871e2bbd89c2a2a1d4a69451", 712 | "sha256:b1f44c335532c0581b77491b7715a871d0dd72e97487ac0f57337ccf3ab3469b", 713 | "sha256:b261e0cb0d6faa8fd6863af26d30351fd2ffdb15b82e51e81e96b9e9e2e7ba16", 714 | "sha256:c857ae5dba375ea26a6228f98c195fec0898a0fd91bcf0e8a0cae6d9faf3eca7", 715 | "sha256:cf5bb4a7d53a71bb6a0144d31df784a973b36d8687d615ef6a7e9b1809917a9b", 716 | "sha256:db9814ff0457b46f2e1d494c1efa4111ca089e08c8b983635ebffb9c1573361f", 717 | "sha256:df04f4bad8a359daa2ff74f8108ea051670cafbca533bb2636c58b16e962989e", 718 | "sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61", 719 | "sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e" 720 | ], 721 | "index": "pypi", 722 | "version": "==1.15.4" 723 | }, 724 | "openpyxl": { 725 | "hashes": [ 726 | "sha256:022c0f3fa1e873cc0ba20651c54dd5e6276fc4ff150b4060723add4fc448645e" 727 | ], 728 | "index": "pypi", 729 | "version": "==2.5.9" 730 | }, 731 | "orderedmultidict": { 732 | "hashes": [ 733 | "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad", 734 | "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3" 735 | ], 736 | "version": "==1.0.1" 737 | }, 738 | "packaging": { 739 | "hashes": [ 740 | "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", 741 | "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" 742 | ], 743 | "version": "==20.9" 744 | }, 745 | "pandas": { 746 | "hashes": [ 747 | "sha256:11975fad9edbdb55f1a560d96f91830e83e29bed6ad5ebf506abda09818eaf60", 748 | "sha256:12e13d127ca1b585dd6f6840d3fe3fa6e46c36a6afe2dbc5cb0b57032c902e31", 749 | "sha256:1c87fcb201e1e06f66e23a61a5fea9eeebfe7204a66d99df24600e3f05168051", 750 | "sha256:242e9900de758e137304ad4b5663c2eff0d798c2c3b891250bd0bd97144579da", 751 | "sha256:26c903d0ae1542890cb9abadb4adcb18f356b14c2df46e4ff657ae640e3ac9e7", 752 | "sha256:2e1e88f9d3e5f107b65b59cd29f141995597b035d17cc5537e58142038942e1a", 753 | "sha256:31b7a48b344c14691a8e92765d4023f88902ba3e96e2e4d0364d3453cdfd50db", 754 | "sha256:4fd07a932b4352f8a8973761ab4e84f965bf81cc750fb38e04f01088ab901cb8", 755 | "sha256:5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4", 756 | "sha256:647b3b916cc8f6aeba240c8171be3ab799c3c1b2ea179a3be0bd2712c4237553", 757 | "sha256:66b060946046ca27c0e03e9bec9bba3e0b918bafff84c425ca2cc2e157ce121e", 758 | "sha256:6efa9fa6e1434141df8872d0fa4226fc301b17aacf37429193f9d70b426ea28f", 759 | "sha256:be4715c9d8367e51dbe6bc6d05e205b1ae234f0dc5465931014aa1c4af44c1ba", 760 | "sha256:bea90da782d8e945fccfc958585210d23de374fa9294a9481ed2abcef637ebfc", 761 | "sha256:d318d77ab96f66a59e792a481e2701fba879e1a453aefeebdb17444fe204d1ed", 762 | "sha256:d785fc08d6f4207437e900ffead930a61e634c5e4f980ba6d3dc03c9581748c7", 763 | "sha256:de9559287c4fe8da56e8c3878d2374abc19d1ba2b807bfa7553e912a8e5ba87c", 764 | "sha256:f4f98b190bb918ac0bc0e3dd2ab74ff3573da9f43106f6dba6385406912ec00f", 765 | "sha256:f71f1a7e2d03758f6e957896ed696254e2bc83110ddbc6942018f1a232dd9dad", 766 | "sha256:fb944c8f0b0ab5c1f7846c686bc4cdf8cde7224655c12edcd59d5212cd57bec0" 767 | ], 768 | "index": "pypi", 769 | "version": "==0.23.4" 770 | }, 771 | "pandocfilters": { 772 | "hashes": [ 773 | "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb" 774 | ], 775 | "version": "==1.4.3" 776 | }, 777 | "parso": { 778 | "hashes": [ 779 | "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398", 780 | "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22" 781 | ], 782 | "version": "==0.8.2" 783 | }, 784 | "pexpect": { 785 | "hashes": [ 786 | "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937", 787 | "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c" 788 | ], 789 | "markers": "sys_platform != 'win32'", 790 | "version": "==4.8.0" 791 | }, 792 | "pickleshare": { 793 | "hashes": [ 794 | "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca", 795 | "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56" 796 | ], 797 | "version": "==0.7.5" 798 | }, 799 | "pillow": { 800 | "hashes": [ 801 | "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5", 802 | "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4", 803 | "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9", 804 | "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a", 805 | "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9", 806 | "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727", 807 | "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120", 808 | "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c", 809 | "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2", 810 | "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797", 811 | "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b", 812 | "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f", 813 | "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef", 814 | "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232", 815 | "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb", 816 | "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9", 817 | "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812", 818 | "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178", 819 | "sha256:8b56553c0345ad6dcb2e9b433ae47d67f95fc23fe28a0bde15a120f25257e291", 820 | "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b", 821 | "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5", 822 | "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b", 823 | "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1", 824 | "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713", 825 | "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4", 826 | "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484", 827 | "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c", 828 | "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9", 829 | "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388", 830 | "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d", 831 | "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602", 832 | "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9", 833 | "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e", 834 | "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2" 835 | ], 836 | "index": "pypi", 837 | "version": "==8.2.0" 838 | }, 839 | "pluggy": { 840 | "hashes": [ 841 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", 842 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" 843 | ], 844 | "version": "==0.13.1" 845 | }, 846 | "prometheus-client": { 847 | "hashes": [ 848 | "sha256:3a8baade6cb80bcfe43297e33e7623f3118d660d41387593758e2fb1ea173a86", 849 | "sha256:b014bc76815eb1399da8ce5fc84b7717a3e63652b0c0f8804092c9363acab1b2" 850 | ], 851 | "version": "==0.11.0" 852 | }, 853 | "prompt-toolkit": { 854 | "hashes": [ 855 | "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04", 856 | "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc" 857 | ], 858 | "version": "==3.0.18" 859 | }, 860 | "psycopg2-binary": { 861 | "hashes": [ 862 | "sha256:040234f8a4a8dfd692662a8308d78f63f31a97e1c42d2480e5e6810c48966a29", 863 | "sha256:086f7e89ec85a6704db51f68f0dcae432eff9300809723a6e8782c41c2f48e03", 864 | "sha256:18ca813fdb17bc1db73fe61b196b05dd1ca2165b884dd5ec5568877cabf9b039", 865 | "sha256:19dc39616850342a2a6db70559af55b22955f86667b5f652f40c0e99253d9881", 866 | "sha256:2166e770cb98f02ed5ee2b0b569d40db26788e0bf2ec3ae1a0d864ea6f1d8309", 867 | "sha256:3a2522b1d9178575acee4adf8fd9f979f9c0449b00b4164bb63c3475ea6528ed", 868 | "sha256:3aa773580f85a28ffdf6f862e59cb5a3cc7ef6885121f2de3fca8d6ada4dbf3b", 869 | "sha256:3b5deaa3ee7180585a296af33e14c9b18c218d148e735c7accf78130765a47e3", 870 | "sha256:407af6d7e46593415f216c7f56ba087a9a42bd6dc2ecb86028760aa45b802bd7", 871 | "sha256:4c3c09fb674401f630626310bcaf6cd6285daf0d5e4c26d6e55ca26a2734e39b", 872 | "sha256:4c6717962247445b4f9e21c962ea61d2e884fc17df5ddf5e35863b016f8a1f03", 873 | "sha256:50446fae5681fc99f87e505d4e77c9407e683ab60c555ec302f9ac9bffa61103", 874 | "sha256:5057669b6a66aa9ca118a2a860159f0ee3acf837eda937bdd2a64f3431361a2d", 875 | "sha256:5dd90c5438b4f935c9d01fcbad3620253da89d19c1f5fca9158646407ed7df35", 876 | "sha256:659c815b5b8e2a55193ede2795c1e2349b8011497310bb936da7d4745652823b", 877 | "sha256:69b13fdf12878b10dc6003acc8d0abf3ad93e79813fd5f3812497c1c9fb9be49", 878 | "sha256:7a1cb80e35e1ccea3e11a48afe65d38744a0e0bde88795cc56a4d05b6e4f9d70", 879 | "sha256:7e6e3c52e6732c219c07bd97fff6c088f8df4dae3b79752ee3a817e6f32e177e", 880 | "sha256:7f42a8490c4fe854325504ce7a6e4796b207960dabb2cbafe3c3959cb00d1d7e", 881 | "sha256:84156313f258eafff716b2961644a4483a9be44a5d43551d554844d15d4d224e", 882 | "sha256:8578d6b8192e4c805e85f187bc530d0f52ba86c39172e61cd51f68fddd648103", 883 | "sha256:890167d5091279a27e2505ff0e1fb273f8c48c41d35c5b92adbf4af80e6b2ed6", 884 | "sha256:98e10634792ac0e9e7a92a76b4991b44c2325d3e7798270a808407355e7bb0a1", 885 | "sha256:9aadff9032e967865f9778485571e93908d27dab21d0fdfdec0ca779bb6f8ad9", 886 | "sha256:9f24f383a298a0c0f9b3113b982e21751a8ecde6615494a3f1470eb4a9d70e9e", 887 | "sha256:a73021b44813b5c84eda4a3af5826dd72356a900bac9bd9dd1f0f81ee1c22c2f", 888 | "sha256:afd96845e12638d2c44d213d4810a08f4dc4a563f9a98204b7428e567014b1cd", 889 | "sha256:b73ddf033d8cd4cc9dfed6324b1ad2a89ba52c410ef6877998422fcb9c23e3a8", 890 | "sha256:b8f490f5fad1767a1331df1259763b3bad7d7af12a75b950c2843ba319b2415f", 891 | "sha256:dbc5cd56fff1a6152ca59445178652756f4e509f672e49ccdf3d79c1043113a4", 892 | "sha256:eac8a3499754790187bb00574ab980df13e754777d346f85e0ff6df929bcd964", 893 | "sha256:eaed1c65f461a959284649e37b5051224f4db6ebdc84e40b5e65f2986f101a08" 894 | ], 895 | "index": "pypi", 896 | "version": "==2.8.4" 897 | }, 898 | "ptyprocess": { 899 | "hashes": [ 900 | "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", 901 | "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" 902 | ], 903 | "markers": "os_name != 'nt'", 904 | "version": "==0.7.0" 905 | }, 906 | "py": { 907 | "hashes": [ 908 | "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", 909 | "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" 910 | ], 911 | "version": "==1.10.0" 912 | }, 913 | "pycparser": { 914 | "hashes": [ 915 | "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0", 916 | "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705" 917 | ], 918 | "version": "==2.20" 919 | }, 920 | "pygments": { 921 | "hashes": [ 922 | "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f", 923 | "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e" 924 | ], 925 | "version": "==2.9.0" 926 | }, 927 | "pyldavis": { 928 | "hashes": [ 929 | "sha256:022041229a8fa88eee354bb19896f3e87293a82f90be98ad0df1fdd494a0fa22" 930 | ], 931 | "index": "pypi", 932 | "version": "==2.1.2" 933 | }, 934 | "pyparsing": { 935 | "hashes": [ 936 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", 937 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" 938 | ], 939 | "version": "==2.4.7" 940 | }, 941 | "pyrsistent": { 942 | "hashes": [ 943 | "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e" 944 | ], 945 | "version": "==0.17.3" 946 | }, 947 | "pytest": { 948 | "hashes": [ 949 | "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b", 950 | "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890" 951 | ], 952 | "version": "==6.2.4" 953 | }, 954 | "python-dateutil": { 955 | "hashes": [ 956 | "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", 957 | "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" 958 | ], 959 | "index": "pypi", 960 | "version": "==2.7.5" 961 | }, 962 | "pytz": { 963 | "hashes": [ 964 | "sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca", 965 | "sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6" 966 | ], 967 | "index": "pypi", 968 | "version": "==2018.7" 969 | }, 970 | "pyyaml": { 971 | "hashes": [ 972 | "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", 973 | "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", 974 | "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", 975 | "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", 976 | "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", 977 | "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", 978 | "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", 979 | "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", 980 | "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", 981 | "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", 982 | "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", 983 | "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", 984 | "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", 985 | "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", 986 | "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", 987 | "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", 988 | "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", 989 | "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", 990 | "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", 991 | "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", 992 | "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", 993 | "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", 994 | "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", 995 | "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", 996 | "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", 997 | "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", 998 | "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", 999 | "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", 1000 | "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" 1001 | ], 1002 | "version": "==5.4.1" 1003 | }, 1004 | "pyzmq": { 1005 | "hashes": [ 1006 | "sha256:089b974ec04d663b8685ac90e86bfe0e4da9d911ff3cf52cb765ff22408b102d", 1007 | "sha256:0ea7f4237991b0f745a4432c63e888450840bf8cb6c48b93fb7d62864f455529", 1008 | "sha256:0f0f27eaab9ba7b92d73d71c51d1a04464a1da6097a252d007922103253d2313", 1009 | "sha256:12ffcf33db6ba7c0e5aaf901e65517f5e2b719367b80bcbfad692f546a297c7a", 1010 | "sha256:1389b615917d4196962a9b469e947ba862a8ec6f5094a47da5e7a8d404bc07a4", 1011 | "sha256:18dd2ca4540c476558099891c129e6f94109971d110b549db2a9775c817cedbd", 1012 | "sha256:24fb5bb641f0b2aa25fc3832f4b6fc62430f14a7d328229fe994b2bcdc07c93a", 1013 | "sha256:285514956c08c7830da9d94e01f5414661a987831bd9f95e4d89cc8aaae8da10", 1014 | "sha256:41049cff5265e9cd75606aa2c90a76b9c80b98d8fe70ee08cf4af3cedb113358", 1015 | "sha256:461ed80d741692d9457ab820b1cc057ba9c37c394e67b647b639f623c8b321f6", 1016 | "sha256:4b8fb1b3174b56fd020e4b10232b1764e52cf7f3babcfb460c5253bdc48adad0", 1017 | "sha256:4c4fe69c7dc0d13d4ae180ad650bb900854367f3349d3c16f0569f6c6447f698", 1018 | "sha256:4e9b9a2f6944acdaf57316436c1acdcb30b8df76726bcf570ad9342bc5001654", 1019 | "sha256:6355f81947e1fe6e7bb9e123aeb3067264391d3ebe8402709f824ef8673fa6f3", 1020 | "sha256:68be16107f41563b9f67d93dff1c9f5587e0f76aa8fd91dc04c83d813bcdab1f", 1021 | "sha256:68e2c4505992ab5b89f976f89a9135742b18d60068f761bef994a6805f1cae0c", 1022 | "sha256:7040d6dd85ea65703904d023d7f57fab793d7ffee9ba9e14f3b897f34ff2415d", 1023 | "sha256:734ea6565c71fc2d03d5b8c7d0d7519c96bb5567e0396da1b563c24a4ac66f0c", 1024 | "sha256:9ee48413a2d3cd867fd836737b4c89c24cea1150a37f4856d82d20293fa7519f", 1025 | "sha256:a1c77796f395804d6002ff56a6a8168c1f98579896897ad7e35665a9b4a9eec5", 1026 | "sha256:b2f707b52e09098a7770503e39294ca6e22ae5138ffa1dd36248b6436d23d78e", 1027 | "sha256:bf80b2cec42d96117248b99d3c86e263a00469c840a778e6cb52d916f4fdf82c", 1028 | "sha256:c4674004ed64685a38bee222cd75afa769424ec603f9329f0dd4777138337f48", 1029 | "sha256:c6a81c9e6754465d09a87e3acd74d9bb1f0039b2d785c6899622f0afdb41d760", 1030 | "sha256:c6d0c32532a0519997e1ded767e184ebb8543bdb351f8eff8570bd461e874efc", 1031 | "sha256:c8fff75af4c7af92dce9f81fa2a83ed009c3e1f33ee8b5222db2ef80b94e242e", 1032 | "sha256:cb9f9fe1305ef69b65794655fd89b2209b11bff3e837de981820a8aa051ef914", 1033 | "sha256:d3ecfee2ee8d91ab2e08d2d8e89302c729b244e302bbc39c5b5dde42306ff003", 1034 | "sha256:d5e5be93e1714a59a535bbbc086b9e4fd2448c7547c5288548f6fd86353cad9e", 1035 | "sha256:de5806be66c9108e4dcdaced084e8ceae14100aa559e2d57b4f0cceb98c462de", 1036 | "sha256:f49755684a963731479ff3035d45a8185545b4c9f662d368bd349c419839886d", 1037 | "sha256:fc712a90401bcbf3fa25747f189d6dcfccbecc32712701cad25c6355589dac57" 1038 | ], 1039 | "version": "==22.1.0" 1040 | }, 1041 | "qtconsole": { 1042 | "hashes": [ 1043 | "sha256:12c734494901658787339dea9bbd82f3dc0d5e394071377a1c77b4a0954d7d8b", 1044 | "sha256:3a2adecc43ff201a08972fb2179df22e7b3a08d71b9ed680f46ad1bfd4fb9132" 1045 | ], 1046 | "version": "==5.1.0" 1047 | }, 1048 | "qtpy": { 1049 | "hashes": [ 1050 | "sha256:2db72c44b55d0fe1407be8fba35c838ad0d6d3bb81f23007886dc1fc0f459c8d", 1051 | "sha256:fa0b8363b363e89b2a6f49eddc162a04c0699ae95e109a6be3bb145a913190ea" 1052 | ], 1053 | "version": "==1.9.0" 1054 | }, 1055 | "requests": { 1056 | "hashes": [ 1057 | "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", 1058 | "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" 1059 | ], 1060 | "index": "pypi", 1061 | "version": "==2.20.0" 1062 | }, 1063 | "scikit-learn": { 1064 | "hashes": [ 1065 | "sha256:13136c6e4f6b808569f7f59299d439b2cd718f85d72ea14b5b6077d44ebc7d17", 1066 | "sha256:370919e3148253fd6552496c33a1e3d78290a336fc8d1b9349d9e9770fae6ec0", 1067 | "sha256:3775cca4ce3f94508bb7c8a6b113044b78c16b0a30a5c169ddeb6b9fe57a8a72", 1068 | "sha256:42f3c5bd893ed73bf47ccccf04dfb98fae743f397d688bb58c2238c0e6ec15d2", 1069 | "sha256:56cfa19c31edf62e6414da0a337efee37a4af488b135640e67238786b9be6ab3", 1070 | "sha256:5c9ff456d67ef9094e5ea272fff2be05d399a47fc30c6c8ed653b94bdf787bd1", 1071 | "sha256:5ca0ad32ee04abe0d4ba02c8d89d501b4e5e0304bdf4d45c2e9875a735b323a0", 1072 | "sha256:5db9e68a384ce80a17fc449d4d5d9b45025fe17cf468429599bf404eccb51049", 1073 | "sha256:72c194c5092e921d6107a8de8a5adae58c35bbc54e030ba624b6f02fd823bb21", 1074 | "sha256:871669cdb5b3481650fe3adff46eb97c455e30ecdc307eaf382ef90d4e2570ab", 1075 | "sha256:873245b03361710f47c5410a050dc56ee8ae97b9f8dcc6e3a81521ca2b64ad10", 1076 | "sha256:8b17fc29554c5c98d88142f895516a5bec2b6b61daa815e1193a64c868ad53d2", 1077 | "sha256:95b155ef6bf829ddfba6026f100ba8e4218b7171ecab97b2163bc9e8d206848f", 1078 | "sha256:a21cf8217e31a9e8e32c559246e05e6909981816152406945ae2e3e244dfcc1f", 1079 | "sha256:ba3fd442ae1a46830789b3578867daaf2c8409dcca6bf192e30e85beeabbfc2f", 1080 | "sha256:ce78bf4d10bd7e28807c36c6d2ab25a9934aaf80906ad987622a5e45627d91a2", 1081 | "sha256:d384e6f9a055b7a43492f9d27779adb717eb5dcf78b0603b01d0f070a608d241", 1082 | "sha256:d4da369614e55540c7e830ccdd17ab4fe5412ff8e803a4906d3ece393e2e3a63", 1083 | "sha256:ddc1eb10138ae93c136cc4b5945d3977f302b5d693592a4731b2805a7d7f2a74", 1084 | "sha256:e54a3dd1fe1f8124de90b93c48d120e6da2ea8df29b6895325df01ddc1bd8e26", 1085 | "sha256:ee8c3b1898c728b6e5b5659c233f547700a1fea13ce876b6fe7d3434c70cc0e0", 1086 | "sha256:f528c4b2bba652cf116f5cccf36f4db95a7f9cbfcd1ee549c4e8d0f8628783b5", 1087 | "sha256:f9abae483f4d52acd6f660addb1b67e35dc5748655250af479de2ea6aefc6df0" 1088 | ], 1089 | "index": "pypi", 1090 | "version": "==0.19.1" 1091 | }, 1092 | "scipy": { 1093 | "hashes": [ 1094 | "sha256:0611ee97296265af4a21164a5323f8c1b4e8e15c582d3dfa7610825900136bb7", 1095 | "sha256:08237eda23fd8e4e54838258b124f1cd141379a5f281b0a234ca99b38918c07a", 1096 | "sha256:0e645dbfc03f279e1946cf07c9c754c2a1859cb4a41c5f70b25f6b3a586b6dbd", 1097 | "sha256:0e9bb7efe5f051ea7212555b290e784b82f21ffd0f655405ac4f87e288b730b3", 1098 | "sha256:108c16640849e5827e7d51023efb3bd79244098c3f21e4897a1007720cb7ce37", 1099 | "sha256:340ef70f5b0f4e2b4b43c8c8061165911bc6b2ad16f8de85d9774545e2c47463", 1100 | "sha256:3ad73dfc6f82e494195144bd3a129c7241e761179b7cb5c07b9a0ede99c686f3", 1101 | "sha256:3b243c77a822cd034dad53058d7c2abf80062aa6f4a32e9799c95d6391558631", 1102 | "sha256:404a00314e85eca9d46b80929571b938e97a143b4f2ddc2b2b3c91a4c4ead9c5", 1103 | "sha256:423b3ff76957d29d1cce1bc0d62ebaf9a3fdfaf62344e3fdec14619bb7b5ad3a", 1104 | "sha256:42d9149a2fff7affdd352d157fa5717033767857c11bd55aa4a519a44343dfef", 1105 | "sha256:625f25a6b7d795e8830cb70439453c9f163e6870e710ec99eba5722775b318f3", 1106 | "sha256:698c6409da58686f2df3d6f815491fd5b4c2de6817a45379517c92366eea208f", 1107 | "sha256:729f8f8363d32cebcb946de278324ab43d28096f36593be6281ca1ee86ce6559", 1108 | "sha256:8190770146a4c8ed5d330d5b5ad1c76251c63349d25c96b3094875b930c44692", 1109 | "sha256:878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1", 1110 | "sha256:8b984f0821577d889f3c7ca8445564175fb4ac7c7f9659b7c60bef95b2b70e76", 1111 | "sha256:8f841bbc21d3dad2111a94c490fb0a591b8612ffea86b8e5571746ae76a3deac", 1112 | "sha256:c22b27371b3866c92796e5d7907e914f0e58a36d3222c5d436ddd3f0e354227a", 1113 | "sha256:d0cdd5658b49a722783b8b4f61a6f1f9c75042d0e29a30ccb6cacc9b25f6d9e2", 1114 | "sha256:d40dc7f494b06dcee0d303e51a00451b2da6119acbeaccf8369f2d29e28917ac", 1115 | "sha256:d8491d4784aceb1f100ddb8e31239c54e4afab8d607928a9f7ef2469ec35ae01", 1116 | "sha256:dfc5080c38dde3f43d8fbb9c0539a7839683475226cf83e4b24363b227dfe552", 1117 | "sha256:e24e22c8d98d3c704bb3410bce9b69e122a8de487ad3dbfe9985d154e5c03a40", 1118 | "sha256:e7a01e53163818d56eabddcafdc2090e9daba178aad05516b20c6591c4811020", 1119 | "sha256:ee677635393414930541a096fc8e61634304bb0153e4e02b75685b11eba14cae", 1120 | "sha256:f0521af1b722265d824d6ad055acfe9bd3341765735c44b5a4d0069e189a0f40", 1121 | "sha256:f25c281f12c0da726c6ed00535ca5d1622ec755c30a3f8eafef26cf43fede694" 1122 | ], 1123 | "index": "pypi", 1124 | "version": "==1.1.0" 1125 | }, 1126 | "seaborn": { 1127 | "hashes": [ 1128 | "sha256:42e627b24e849c2d3bbfd059e00005f6afbc4a76e4895baf44ae23fe8a4b09a5", 1129 | "sha256:76c83f794ca320fb6b23a7c6192d5e185a5fcf4758966a0c0a54baee46d41e2f" 1130 | ], 1131 | "index": "pypi", 1132 | "version": "==0.9.0" 1133 | }, 1134 | "send2trash": { 1135 | "hashes": [ 1136 | "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2", 1137 | "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b" 1138 | ], 1139 | "version": "==1.5.0" 1140 | }, 1141 | "six": { 1142 | "hashes": [ 1143 | "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", 1144 | "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" 1145 | ], 1146 | "index": "pypi", 1147 | "version": "==1.11.0" 1148 | }, 1149 | "sklearn": { 1150 | "hashes": [ 1151 | "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31" 1152 | ], 1153 | "index": "pypi", 1154 | "version": "==0.0" 1155 | }, 1156 | "sqlalchemy": { 1157 | "hashes": [ 1158 | "sha256:bfb8f464a5000b567ac1d350b9090cf081180ec1ab4aa87e7bca12dab25320ec" 1159 | ], 1160 | "index": "pypi", 1161 | "version": "==1.3.12" 1162 | }, 1163 | "sqlalchemy-utils": { 1164 | "hashes": [ 1165 | "sha256:45ab41c90bfb8dd676e83179be3088b3f2d64b613e3b590187163dd941c22d4c" 1166 | ], 1167 | "index": "pypi", 1168 | "version": "==0.33.6" 1169 | }, 1170 | "terminado": { 1171 | "hashes": [ 1172 | "sha256:048ce7b271ad1f94c48130844af1de163e54913b919f8c268c89b36a6d468d7c", 1173 | "sha256:46fd07c9dc7db7321922270d544a1f18eaa7a02fd6cd4438314f27a687cabbea" 1174 | ], 1175 | "version": "==0.10.0" 1176 | }, 1177 | "testpath": { 1178 | "hashes": [ 1179 | "sha256:1acf7a0bcd3004ae8357409fc33751e16d37ccc650921da1094a86581ad1e417", 1180 | "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589" 1181 | ], 1182 | "version": "==0.5.0" 1183 | }, 1184 | "toml": { 1185 | "hashes": [ 1186 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", 1187 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" 1188 | ], 1189 | "version": "==0.10.2" 1190 | }, 1191 | "tornado": { 1192 | "hashes": [ 1193 | "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb", 1194 | "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c", 1195 | "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288", 1196 | "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95", 1197 | "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558", 1198 | "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe", 1199 | "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791", 1200 | "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d", 1201 | "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326", 1202 | "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b", 1203 | "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4", 1204 | "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c", 1205 | "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910", 1206 | "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5", 1207 | "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c", 1208 | "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0", 1209 | "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675", 1210 | "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd", 1211 | "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f", 1212 | "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c", 1213 | "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea", 1214 | "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6", 1215 | "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05", 1216 | "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd", 1217 | "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575", 1218 | "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a", 1219 | "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37", 1220 | "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795", 1221 | "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f", 1222 | "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32", 1223 | "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c", 1224 | "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01", 1225 | "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4", 1226 | "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2", 1227 | "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921", 1228 | "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085", 1229 | "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df", 1230 | "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102", 1231 | "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5", 1232 | "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68", 1233 | "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5" 1234 | ], 1235 | "version": "==6.1" 1236 | }, 1237 | "traitlets": { 1238 | "hashes": [ 1239 | "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44", 1240 | "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7" 1241 | ], 1242 | "version": "==4.3.3" 1243 | }, 1244 | "typing-extensions": { 1245 | "hashes": [ 1246 | "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", 1247 | "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", 1248 | "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" 1249 | ], 1250 | "markers": "python_version < '3.8'", 1251 | "version": "==3.10.0.0" 1252 | }, 1253 | "urllib3": { 1254 | "hashes": [ 1255 | "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", 1256 | "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" 1257 | ], 1258 | "index": "pypi", 1259 | "version": "==1.24.3" 1260 | }, 1261 | "wcwidth": { 1262 | "hashes": [ 1263 | "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", 1264 | "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" 1265 | ], 1266 | "version": "==0.2.5" 1267 | }, 1268 | "webencodings": { 1269 | "hashes": [ 1270 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 1271 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 1272 | ], 1273 | "version": "==0.5.1" 1274 | }, 1275 | "werkzeug": { 1276 | "hashes": [ 1277 | "sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42", 1278 | "sha256:6c1ec500dcdba0baa27600f6a22f6333d8b662d22027ff9f6202e3367413caa8" 1279 | ], 1280 | "version": "==2.0.1" 1281 | }, 1282 | "wheel": { 1283 | "hashes": [ 1284 | "sha256:78b5b185f0e5763c26ca1e324373aadd49182ca90e825f7853f4b2509215dc0e", 1285 | "sha256:e11eefd162658ea59a60a0f6c7d493a7190ea4b9a85e335b33489d9f17e0245e" 1286 | ], 1287 | "version": "==0.36.2" 1288 | }, 1289 | "widgetsnbextension": { 1290 | "hashes": [ 1291 | "sha256:079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7", 1292 | "sha256:bd314f8ceb488571a5ffea6cc5b9fc6cba0adaf88a9d2386b93a489751938bcd" 1293 | ], 1294 | "version": "==3.5.1" 1295 | }, 1296 | "wordcloud": { 1297 | "hashes": [ 1298 | "sha256:0baf47567bd426bf65963d53a1aaa69af35c2e096dc0ad9073efd5833cccd20a", 1299 | "sha256:358f4ead931bc8297de3dbd3a26ce8d1e3fe27c1027cce091c1b7037e4ba4904", 1300 | "sha256:3971ca6042745169e9645b3bbce64b790f8c211ad7c7d265049992506e033212", 1301 | "sha256:4335deb87b7cd9f8a6ce12de0257d15f14f98874f326e7a839f27b2c8ac792ca", 1302 | "sha256:473b660baee64578dad272a18253b59245a337f5dfa3a186e32cf20b0eee4110", 1303 | "sha256:52d0772e385e38144be2bdb58a0d7817f2c80db0640e1efad699cff8ea86533d", 1304 | "sha256:60c9178ea11d6537f19dad7eb5387f2516737796827710c9409ab9602d9493c7", 1305 | "sha256:61156874a21fffb46cdfb3518bbc9865fbfe9973ecc36eff20e86792687e439b", 1306 | "sha256:950882b89298c318e5f7cf10027f00b4e09402e18f719cb656aea5209a57e5a9", 1307 | "sha256:a8d829e19431709c1310a505687fc7c0f869c48259f4a55b5bf387642ed6da46", 1308 | "sha256:ae6c0030a7fd09bd35713592ba005da9457f7d38f46dc807484c5e0a379d813c", 1309 | "sha256:b0256ca213eb52e5261307e64faaf242742ada1322bb9d5090ecdaa9b44540ee", 1310 | "sha256:b99157f068826697d93d2e5e61b1acff35591d5e534818368ccd56945b9a5f29", 1311 | "sha256:c59387b35af772626d4a87b986eb8ab29d3d7ffca6f94da95f4c3a0961407df3", 1312 | "sha256:d4b970d4d30bc9baec9e8b2d7e69fb9771576bb09d6b6f6ce6f22403ca58d6de", 1313 | "sha256:e9ae81e8dbb5953f8cf94083b990c760b179b4000dae2babd14827d61230fc69", 1314 | "sha256:fc3db0cc71e4d5666f732c5b4b3c04a0d58242579cb6c6e5146ffd2890cc5d57" 1315 | ], 1316 | "index": "pypi", 1317 | "version": "==1.6.0" 1318 | }, 1319 | "xlrd": { 1320 | "hashes": [ 1321 | "sha256:83a1d2f1091078fb3f65876753b5302c5cfb6a41de64b9587b74cefa75157148", 1322 | "sha256:8a21885513e6d915fe33a8ee5fdfa675433b61405ba13e2a69e62ee36828d7e2" 1323 | ], 1324 | "index": "pypi", 1325 | "version": "==1.1.0" 1326 | }, 1327 | "xlsxwriter": { 1328 | "hashes": [ 1329 | "sha256:027fa3d22ccfb5da5d77c29ed740aece286a9a6cc101b564f2f7ca11eb1d490b", 1330 | "sha256:5d480cee5babf3865227d5c81269d96be8e87914fc96403ca6fa1b1e4f64c080" 1331 | ], 1332 | "index": "pypi", 1333 | "version": "==1.2.6" 1334 | }, 1335 | "zipp": { 1336 | "hashes": [ 1337 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", 1338 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" 1339 | ], 1340 | "version": "==3.4.1" 1341 | } 1342 | }, 1343 | "develop": { 1344 | "bandit": { 1345 | "hashes": [ 1346 | "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952", 1347 | "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065" 1348 | ], 1349 | "index": "pypi", 1350 | "version": "==1.6.2" 1351 | }, 1352 | "beautifulsoup4": { 1353 | "hashes": [ 1354 | "sha256:194ec62a25438adcb3fdb06378b26559eda1ea8a747367d34c33cef9c7f48d57", 1355 | "sha256:90f8e61121d6ae58362ce3bed8cd997efb00c914eae0ff3d363c32f9a9822d10", 1356 | "sha256:f0abd31228055d698bb392a826528ea08ebb9959e6bea17c606fd9c9009db938" 1357 | ], 1358 | "index": "pypi", 1359 | "version": "==4.6.3" 1360 | }, 1361 | "certifi": { 1362 | "hashes": [ 1363 | "sha256:339dc09518b07e2fa7eda5450740925974815557727d6bd35d319c1524a04a4c", 1364 | "sha256:6d58c986d22b038c8c0df30d639f23a3e6d172a05c3583e766f4c0b785c0986a" 1365 | ], 1366 | "index": "pypi", 1367 | "version": "==2018.10.15" 1368 | }, 1369 | "chardet": { 1370 | "hashes": [ 1371 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 1372 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 1373 | ], 1374 | "index": "pypi", 1375 | "version": "==3.0.4" 1376 | }, 1377 | "contractions": { 1378 | "hashes": [ 1379 | "sha256:7eb1665238be07e29555d03e44f122eda5973e04c2e80044a454383aa16112af", 1380 | "sha256:ba0165ed47c0cbaa81736d631391c3c1ce9c9ca33a83853ffe8f122b7bd0e033" 1381 | ], 1382 | "index": "pypi", 1383 | "version": "==0.0.17" 1384 | }, 1385 | "dill": { 1386 | "hashes": [ 1387 | "sha256:624dc244b94371bb2d6e7f40084228a2edfff02373fe20e018bef1ee92fdd5b3" 1388 | ], 1389 | "index": "pypi", 1390 | "version": "==0.2.8.2" 1391 | }, 1392 | "entrypoints": { 1393 | "hashes": [ 1394 | "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", 1395 | "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" 1396 | ], 1397 | "version": "==0.3" 1398 | }, 1399 | "et-xmlfile": { 1400 | "hashes": [ 1401 | "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b" 1402 | ], 1403 | "index": "pypi", 1404 | "version": "==1.0.1" 1405 | }, 1406 | "flake8": { 1407 | "hashes": [ 1408 | "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", 1409 | "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" 1410 | ], 1411 | "index": "pypi", 1412 | "version": "==3.7.9" 1413 | }, 1414 | "gitdb": { 1415 | "hashes": [ 1416 | "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0", 1417 | "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005" 1418 | ], 1419 | "version": "==4.0.7" 1420 | }, 1421 | "gitpython": { 1422 | "hashes": [ 1423 | "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135", 1424 | "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e" 1425 | ], 1426 | "version": "==3.1.17" 1427 | }, 1428 | "idna": { 1429 | "hashes": [ 1430 | "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", 1431 | "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" 1432 | ], 1433 | "index": "pypi", 1434 | "version": "==2.7" 1435 | }, 1436 | "imbalanced-learn": { 1437 | "hashes": [ 1438 | "sha256:d4d59b96780044ba97c6263e69cf145b9564241c402177f3d650b6c0c63fab93", 1439 | "sha256:e49550da4c98771afd0aa49db7710ab0d85e766754bad9fb767567c367d2b2e4" 1440 | ], 1441 | "index": "pypi", 1442 | "version": "==0.3.3" 1443 | }, 1444 | "importlib-metadata": { 1445 | "hashes": [ 1446 | "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00", 1447 | "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139" 1448 | ], 1449 | "markers": "python_version < '3.8'", 1450 | "version": "==4.5.0" 1451 | }, 1452 | "jdcal": { 1453 | "hashes": [ 1454 | "sha256:948fb8d079e63b4be7a69dd5f0cd618a0a57e80753de8248fd786a8a20658a07", 1455 | "sha256:ea0a5067c5f0f50ad4c7bdc80abad3d976604f6fb026b0b3a17a9d84bb9046c9" 1456 | ], 1457 | "index": "pypi", 1458 | "version": "==1.4" 1459 | }, 1460 | "mccabe": { 1461 | "hashes": [ 1462 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", 1463 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" 1464 | ], 1465 | "version": "==0.6.1" 1466 | }, 1467 | "nltk": { 1468 | "hashes": [ 1469 | "sha256:bed45551259aa2101381bbdd5df37d44ca2669c5c3dad72439fa459b29137d94" 1470 | ], 1471 | "index": "pypi", 1472 | "version": "==3.4.5" 1473 | }, 1474 | "numpy": { 1475 | "hashes": [ 1476 | "sha256:0df89ca13c25eaa1621a3f09af4c8ba20da849692dcae184cb55e80952c453fb", 1477 | "sha256:154c35f195fd3e1fad2569930ca51907057ae35e03938f89a8aedae91dd1b7c7", 1478 | "sha256:18e84323cdb8de3325e741a7a8dd4a82db74fde363dce32b625324c7b32aa6d7", 1479 | "sha256:1e8956c37fc138d65ded2d96ab3949bd49038cc6e8a4494b1515b0ba88c91565", 1480 | "sha256:23557bdbca3ccbde3abaa12a6e82299bc92d2b9139011f8c16ca1bb8c75d1e95", 1481 | "sha256:24fd645a5e5d224aa6e39d93e4a722fafa9160154f296fd5ef9580191c755053", 1482 | "sha256:36e36b6868e4440760d4b9b44587ea1dc1f06532858d10abba98e851e154ca70", 1483 | "sha256:3d734559db35aa3697dadcea492a423118c5c55d176da2f3be9c98d4803fc2a7", 1484 | "sha256:416a2070acf3a2b5d586f9a6507bb97e33574df5bd7508ea970bbf4fc563fa52", 1485 | "sha256:4a22dc3f5221a644dfe4a63bf990052cc674ef12a157b1056969079985c92816", 1486 | "sha256:4d8d3e5aa6087490912c14a3c10fbdd380b40b421c13920ff468163bc50e016f", 1487 | "sha256:4f41fd159fba1245e1958a99d349df49c616b133636e0cf668f169bce2aeac2d", 1488 | "sha256:561ef098c50f91fbac2cc9305b68c915e9eb915a74d9038ecf8af274d748f76f", 1489 | "sha256:56994e14b386b5c0a9b875a76d22d707b315fa037affc7819cda08b6d0489756", 1490 | "sha256:73a1f2a529604c50c262179fcca59c87a05ff4614fe8a15c186934d84d09d9a5", 1491 | "sha256:7da99445fd890206bfcc7419f79871ba8e73d9d9e6b82fe09980bc5bb4efc35f", 1492 | "sha256:99d59e0bcadac4aa3280616591fb7bcd560e2218f5e31d5223a2e12a1425d495", 1493 | "sha256:a4cc09489843c70b22e8373ca3dfa52b3fab778b57cf81462f1203b0852e95e3", 1494 | "sha256:a61dc29cfca9831a03442a21d4b5fd77e3067beca4b5f81f1a89a04a71cf93fa", 1495 | "sha256:b1853df739b32fa913cc59ad9137caa9cc3d97ff871e2bbd89c2a2a1d4a69451", 1496 | "sha256:b1f44c335532c0581b77491b7715a871d0dd72e97487ac0f57337ccf3ab3469b", 1497 | "sha256:b261e0cb0d6faa8fd6863af26d30351fd2ffdb15b82e51e81e96b9e9e2e7ba16", 1498 | "sha256:c857ae5dba375ea26a6228f98c195fec0898a0fd91bcf0e8a0cae6d9faf3eca7", 1499 | "sha256:cf5bb4a7d53a71bb6a0144d31df784a973b36d8687d615ef6a7e9b1809917a9b", 1500 | "sha256:db9814ff0457b46f2e1d494c1efa4111ca089e08c8b983635ebffb9c1573361f", 1501 | "sha256:df04f4bad8a359daa2ff74f8108ea051670cafbca533bb2636c58b16e962989e", 1502 | "sha256:ecf81720934a0e18526177e645cbd6a8a21bb0ddc887ff9738de07a1df5c6b61", 1503 | "sha256:edfa6fba9157e0e3be0f40168eb142511012683ac3dc82420bee4a3f3981b30e" 1504 | ], 1505 | "index": "pypi", 1506 | "version": "==1.15.4" 1507 | }, 1508 | "openpyxl": { 1509 | "hashes": [ 1510 | "sha256:022c0f3fa1e873cc0ba20651c54dd5e6276fc4ff150b4060723add4fc448645e" 1511 | ], 1512 | "index": "pypi", 1513 | "version": "==2.5.9" 1514 | }, 1515 | "pandas": { 1516 | "hashes": [ 1517 | "sha256:11975fad9edbdb55f1a560d96f91830e83e29bed6ad5ebf506abda09818eaf60", 1518 | "sha256:12e13d127ca1b585dd6f6840d3fe3fa6e46c36a6afe2dbc5cb0b57032c902e31", 1519 | "sha256:1c87fcb201e1e06f66e23a61a5fea9eeebfe7204a66d99df24600e3f05168051", 1520 | "sha256:242e9900de758e137304ad4b5663c2eff0d798c2c3b891250bd0bd97144579da", 1521 | "sha256:26c903d0ae1542890cb9abadb4adcb18f356b14c2df46e4ff657ae640e3ac9e7", 1522 | "sha256:2e1e88f9d3e5f107b65b59cd29f141995597b035d17cc5537e58142038942e1a", 1523 | "sha256:31b7a48b344c14691a8e92765d4023f88902ba3e96e2e4d0364d3453cdfd50db", 1524 | "sha256:4fd07a932b4352f8a8973761ab4e84f965bf81cc750fb38e04f01088ab901cb8", 1525 | "sha256:5b24ca47acf69222e82530e89111dd9d14f9b970ab2cd3a1c2c78f0c4fbba4f4", 1526 | "sha256:647b3b916cc8f6aeba240c8171be3ab799c3c1b2ea179a3be0bd2712c4237553", 1527 | "sha256:66b060946046ca27c0e03e9bec9bba3e0b918bafff84c425ca2cc2e157ce121e", 1528 | "sha256:6efa9fa6e1434141df8872d0fa4226fc301b17aacf37429193f9d70b426ea28f", 1529 | "sha256:be4715c9d8367e51dbe6bc6d05e205b1ae234f0dc5465931014aa1c4af44c1ba", 1530 | "sha256:bea90da782d8e945fccfc958585210d23de374fa9294a9481ed2abcef637ebfc", 1531 | "sha256:d318d77ab96f66a59e792a481e2701fba879e1a453aefeebdb17444fe204d1ed", 1532 | "sha256:d785fc08d6f4207437e900ffead930a61e634c5e4f980ba6d3dc03c9581748c7", 1533 | "sha256:de9559287c4fe8da56e8c3878d2374abc19d1ba2b807bfa7553e912a8e5ba87c", 1534 | "sha256:f4f98b190bb918ac0bc0e3dd2ab74ff3573da9f43106f6dba6385406912ec00f", 1535 | "sha256:f71f1a7e2d03758f6e957896ed696254e2bc83110ddbc6942018f1a232dd9dad", 1536 | "sha256:fb944c8f0b0ab5c1f7846c686bc4cdf8cde7224655c12edcd59d5212cd57bec0" 1537 | ], 1538 | "index": "pypi", 1539 | "version": "==0.23.4" 1540 | }, 1541 | "parameterized": { 1542 | "hashes": [ 1543 | "sha256:6a94dbea30c6abde99fd4c2f2042c1bf7f980e48908bf92ead62394f93cf57ed", 1544 | "sha256:ea0326ba5bbbe7c427329a27b75003410df07d1173ca254976f8f5a64922c322" 1545 | ], 1546 | "index": "pypi", 1547 | "version": "==0.7.1" 1548 | }, 1549 | "pbr": { 1550 | "hashes": [ 1551 | "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd", 1552 | "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4" 1553 | ], 1554 | "version": "==5.6.0" 1555 | }, 1556 | "pycodestyle": { 1557 | "hashes": [ 1558 | "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", 1559 | "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" 1560 | ], 1561 | "version": "==2.5.0" 1562 | }, 1563 | "pyflakes": { 1564 | "hashes": [ 1565 | "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", 1566 | "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" 1567 | ], 1568 | "version": "==2.1.1" 1569 | }, 1570 | "python-dateutil": { 1571 | "hashes": [ 1572 | "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", 1573 | "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" 1574 | ], 1575 | "index": "pypi", 1576 | "version": "==2.7.5" 1577 | }, 1578 | "pytz": { 1579 | "hashes": [ 1580 | "sha256:31cb35c89bd7d333cd32c5f278fca91b523b0834369e757f4c5641ea252236ca", 1581 | "sha256:8e0f8568c118d3077b46be7d654cc8167fa916092e28320cde048e54bfc9f1e6" 1582 | ], 1583 | "index": "pypi", 1584 | "version": "==2018.7" 1585 | }, 1586 | "pyyaml": { 1587 | "hashes": [ 1588 | "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", 1589 | "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", 1590 | "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", 1591 | "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", 1592 | "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", 1593 | "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", 1594 | "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", 1595 | "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", 1596 | "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", 1597 | "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", 1598 | "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", 1599 | "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", 1600 | "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", 1601 | "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", 1602 | "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", 1603 | "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", 1604 | "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", 1605 | "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", 1606 | "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", 1607 | "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", 1608 | "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", 1609 | "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", 1610 | "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", 1611 | "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", 1612 | "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", 1613 | "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", 1614 | "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", 1615 | "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", 1616 | "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" 1617 | ], 1618 | "version": "==5.4.1" 1619 | }, 1620 | "requests": { 1621 | "hashes": [ 1622 | "sha256:99dcfdaaeb17caf6e526f32b6a7b780461512ab3f1d992187801694cba42770c", 1623 | "sha256:a84b8c9ab6239b578f22d1c21d51b696dcfe004032bb80ea832398d6909d7279" 1624 | ], 1625 | "index": "pypi", 1626 | "version": "==2.20.0" 1627 | }, 1628 | "scikit-learn": { 1629 | "hashes": [ 1630 | "sha256:13136c6e4f6b808569f7f59299d439b2cd718f85d72ea14b5b6077d44ebc7d17", 1631 | "sha256:370919e3148253fd6552496c33a1e3d78290a336fc8d1b9349d9e9770fae6ec0", 1632 | "sha256:3775cca4ce3f94508bb7c8a6b113044b78c16b0a30a5c169ddeb6b9fe57a8a72", 1633 | "sha256:42f3c5bd893ed73bf47ccccf04dfb98fae743f397d688bb58c2238c0e6ec15d2", 1634 | "sha256:56cfa19c31edf62e6414da0a337efee37a4af488b135640e67238786b9be6ab3", 1635 | "sha256:5c9ff456d67ef9094e5ea272fff2be05d399a47fc30c6c8ed653b94bdf787bd1", 1636 | "sha256:5ca0ad32ee04abe0d4ba02c8d89d501b4e5e0304bdf4d45c2e9875a735b323a0", 1637 | "sha256:5db9e68a384ce80a17fc449d4d5d9b45025fe17cf468429599bf404eccb51049", 1638 | "sha256:72c194c5092e921d6107a8de8a5adae58c35bbc54e030ba624b6f02fd823bb21", 1639 | "sha256:871669cdb5b3481650fe3adff46eb97c455e30ecdc307eaf382ef90d4e2570ab", 1640 | "sha256:873245b03361710f47c5410a050dc56ee8ae97b9f8dcc6e3a81521ca2b64ad10", 1641 | "sha256:8b17fc29554c5c98d88142f895516a5bec2b6b61daa815e1193a64c868ad53d2", 1642 | "sha256:95b155ef6bf829ddfba6026f100ba8e4218b7171ecab97b2163bc9e8d206848f", 1643 | "sha256:a21cf8217e31a9e8e32c559246e05e6909981816152406945ae2e3e244dfcc1f", 1644 | "sha256:ba3fd442ae1a46830789b3578867daaf2c8409dcca6bf192e30e85beeabbfc2f", 1645 | "sha256:ce78bf4d10bd7e28807c36c6d2ab25a9934aaf80906ad987622a5e45627d91a2", 1646 | "sha256:d384e6f9a055b7a43492f9d27779adb717eb5dcf78b0603b01d0f070a608d241", 1647 | "sha256:d4da369614e55540c7e830ccdd17ab4fe5412ff8e803a4906d3ece393e2e3a63", 1648 | "sha256:ddc1eb10138ae93c136cc4b5945d3977f302b5d693592a4731b2805a7d7f2a74", 1649 | "sha256:e54a3dd1fe1f8124de90b93c48d120e6da2ea8df29b6895325df01ddc1bd8e26", 1650 | "sha256:ee8c3b1898c728b6e5b5659c233f547700a1fea13ce876b6fe7d3434c70cc0e0", 1651 | "sha256:f528c4b2bba652cf116f5cccf36f4db95a7f9cbfcd1ee549c4e8d0f8628783b5", 1652 | "sha256:f9abae483f4d52acd6f660addb1b67e35dc5748655250af479de2ea6aefc6df0" 1653 | ], 1654 | "index": "pypi", 1655 | "version": "==0.19.1" 1656 | }, 1657 | "scipy": { 1658 | "hashes": [ 1659 | "sha256:0611ee97296265af4a21164a5323f8c1b4e8e15c582d3dfa7610825900136bb7", 1660 | "sha256:08237eda23fd8e4e54838258b124f1cd141379a5f281b0a234ca99b38918c07a", 1661 | "sha256:0e645dbfc03f279e1946cf07c9c754c2a1859cb4a41c5f70b25f6b3a586b6dbd", 1662 | "sha256:0e9bb7efe5f051ea7212555b290e784b82f21ffd0f655405ac4f87e288b730b3", 1663 | "sha256:108c16640849e5827e7d51023efb3bd79244098c3f21e4897a1007720cb7ce37", 1664 | "sha256:340ef70f5b0f4e2b4b43c8c8061165911bc6b2ad16f8de85d9774545e2c47463", 1665 | "sha256:3ad73dfc6f82e494195144bd3a129c7241e761179b7cb5c07b9a0ede99c686f3", 1666 | "sha256:3b243c77a822cd034dad53058d7c2abf80062aa6f4a32e9799c95d6391558631", 1667 | "sha256:404a00314e85eca9d46b80929571b938e97a143b4f2ddc2b2b3c91a4c4ead9c5", 1668 | "sha256:423b3ff76957d29d1cce1bc0d62ebaf9a3fdfaf62344e3fdec14619bb7b5ad3a", 1669 | "sha256:42d9149a2fff7affdd352d157fa5717033767857c11bd55aa4a519a44343dfef", 1670 | "sha256:625f25a6b7d795e8830cb70439453c9f163e6870e710ec99eba5722775b318f3", 1671 | "sha256:698c6409da58686f2df3d6f815491fd5b4c2de6817a45379517c92366eea208f", 1672 | "sha256:729f8f8363d32cebcb946de278324ab43d28096f36593be6281ca1ee86ce6559", 1673 | "sha256:8190770146a4c8ed5d330d5b5ad1c76251c63349d25c96b3094875b930c44692", 1674 | "sha256:878352408424dffaa695ffedf2f9f92844e116686923ed9aa8626fc30d32cfd1", 1675 | "sha256:8b984f0821577d889f3c7ca8445564175fb4ac7c7f9659b7c60bef95b2b70e76", 1676 | "sha256:8f841bbc21d3dad2111a94c490fb0a591b8612ffea86b8e5571746ae76a3deac", 1677 | "sha256:c22b27371b3866c92796e5d7907e914f0e58a36d3222c5d436ddd3f0e354227a", 1678 | "sha256:d0cdd5658b49a722783b8b4f61a6f1f9c75042d0e29a30ccb6cacc9b25f6d9e2", 1679 | "sha256:d40dc7f494b06dcee0d303e51a00451b2da6119acbeaccf8369f2d29e28917ac", 1680 | "sha256:d8491d4784aceb1f100ddb8e31239c54e4afab8d607928a9f7ef2469ec35ae01", 1681 | "sha256:dfc5080c38dde3f43d8fbb9c0539a7839683475226cf83e4b24363b227dfe552", 1682 | "sha256:e24e22c8d98d3c704bb3410bce9b69e122a8de487ad3dbfe9985d154e5c03a40", 1683 | "sha256:e7a01e53163818d56eabddcafdc2090e9daba178aad05516b20c6591c4811020", 1684 | "sha256:ee677635393414930541a096fc8e61634304bb0153e4e02b75685b11eba14cae", 1685 | "sha256:f0521af1b722265d824d6ad055acfe9bd3341765735c44b5a4d0069e189a0f40", 1686 | "sha256:f25c281f12c0da726c6ed00535ca5d1622ec755c30a3f8eafef26cf43fede694" 1687 | ], 1688 | "index": "pypi", 1689 | "version": "==1.1.0" 1690 | }, 1691 | "six": { 1692 | "hashes": [ 1693 | "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9", 1694 | "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb" 1695 | ], 1696 | "index": "pypi", 1697 | "version": "==1.11.0" 1698 | }, 1699 | "smmap": { 1700 | "hashes": [ 1701 | "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182", 1702 | "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2" 1703 | ], 1704 | "version": "==4.0.0" 1705 | }, 1706 | "sqlalchemy": { 1707 | "hashes": [ 1708 | "sha256:bfb8f464a5000b567ac1d350b9090cf081180ec1ab4aa87e7bca12dab25320ec" 1709 | ], 1710 | "index": "pypi", 1711 | "version": "==1.3.12" 1712 | }, 1713 | "sqlalchemy-utils": { 1714 | "hashes": [ 1715 | "sha256:45ab41c90bfb8dd676e83179be3088b3f2d64b613e3b590187163dd941c22d4c" 1716 | ], 1717 | "index": "pypi", 1718 | "version": "==0.33.6" 1719 | }, 1720 | "stevedore": { 1721 | "hashes": [ 1722 | "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee", 1723 | "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a" 1724 | ], 1725 | "version": "==3.3.0" 1726 | }, 1727 | "typing-extensions": { 1728 | "hashes": [ 1729 | "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497", 1730 | "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342", 1731 | "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84" 1732 | ], 1733 | "markers": "python_version < '3.8'", 1734 | "version": "==3.10.0.0" 1735 | }, 1736 | "urllib3": { 1737 | "hashes": [ 1738 | "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", 1739 | "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" 1740 | ], 1741 | "index": "pypi", 1742 | "version": "==1.24.3" 1743 | }, 1744 | "xlrd": { 1745 | "hashes": [ 1746 | "sha256:83a1d2f1091078fb3f65876753b5302c5cfb6a41de64b9587b74cefa75157148", 1747 | "sha256:8a21885513e6d915fe33a8ee5fdfa675433b61405ba13e2a69e62ee36828d7e2" 1748 | ], 1749 | "index": "pypi", 1750 | "version": "==1.1.0" 1751 | }, 1752 | "zipp": { 1753 | "hashes": [ 1754 | "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76", 1755 | "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098" 1756 | ], 1757 | "version": "==3.4.1" 1758 | } 1759 | } 1760 | } 1761 | --------------------------------------------------------------------------------