├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── poetry.lock ├── pyproject.toml ├── requirements.txt └── scripts ├── create-session-speakers.py ├── generate_session_tweet_csv.py ├── import_access_codes.py ├── import_access_codes_test.csv ├── import_discount_codes.py ├── reschedule.py └── send_emails_via_sendgrid.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/python 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 4 | 5 | ### Python ### 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | pip-wheel-metadata/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | pytestdebug.log 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | doc/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 102 | __pypackages__/ 103 | 104 | # Celery stuff 105 | celerybeat-schedule 106 | celerybeat.pid 107 | 108 | # SageMath parsed files 109 | *.sage.py 110 | 111 | # Environments 112 | .env 113 | .venv 114 | env/ 115 | venv/ 116 | ENV/ 117 | env.bak/ 118 | venv.bak/ 119 | pythonenv* 120 | 121 | # Spyder project settings 122 | .spyderproject 123 | .spyproject 124 | 125 | # Rope project settings 126 | .ropeproject 127 | 128 | # mkdocs documentation 129 | /site 130 | 131 | # mypy 132 | .mypy_cache/ 133 | .dmypy.json 134 | dmypy.json 135 | 136 | # Pyre type checker 137 | .pyre/ 138 | 139 | # pytype static type analyzer 140 | .pytype/ 141 | 142 | # profiling data 143 | .prof 144 | 145 | # End of https://www.toptal.com/developers/gitignore/api/python 146 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: '.venv' 2 | repos: 3 | - repo: https://github.com/asottile/pyupgrade 4 | rev: v2.9.0 5 | hooks: 6 | - id: pyupgrade 7 | args: [--py38-plus] 8 | - repo: https://github.com/hadialqattan/pycln 9 | rev: 0.0.1-beta.2 10 | hooks: 11 | - id: pycln 12 | args: [--config=pyproject.toml] 13 | - repo: https://github.com/pycqa/isort 14 | rev: 5.7.0 15 | hooks: 16 | - id: isort 17 | - repo: https://github.com/psf/black 18 | rev: 20.8b1 19 | hooks: 20 | - id: black 21 | language_version: python3.8 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # open-event-scripts 2 | 3 | Useful scripts for managing events using Open Event API 4 | 5 | ## Syestem Requirements 6 | Before installing open-event-scripts, make sure that your system meets the following requirements: 7 | 8 | - python 3.x 9 | - pip package manager 10 | 11 | ## Installation Steps 12 | Follow these steps to install the open-event-scripts. 13 | 14 | 1. Clone the open-event-scripts repository: 15 | 16 | ``` git clone https://github.com/fossasia/open-event-scripts.git ``` 17 | 18 | 2. Change into open-event-scripts: 19 | 20 | ``` cd open-event-scripts ``` 21 | 22 | 3. Install the required python packages using pip: 23 | 24 | ``` pip install -r requirements.txt ``` 25 | 26 | Or using: 27 | 28 | ``` peotry install ``` 29 | 30 | 31 | ```peotry shell ``` 32 | 33 | 34 | ## Usage 35 | Use this script to change the event schedule: 36 | 37 | ``` 38 | python scripts/reschedule.py 39 | ``` 40 | 41 | Use this script to generate session of tweets: 42 | 43 | ``` 44 | python scripts/generate_session_tweet_csv.py 45 | ``` 46 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "appdirs" 5 | version = "1.4.4" 6 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 7 | optional = false 8 | python-versions = "*" 9 | files = [ 10 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 11 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 12 | ] 13 | 14 | [[package]] 15 | name = "black" 16 | version = "20.8b1" 17 | description = "The uncompromising code formatter." 18 | optional = false 19 | python-versions = ">=3.6" 20 | files = [ 21 | {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, 22 | ] 23 | 24 | [package.dependencies] 25 | appdirs = "*" 26 | click = ">=7.1.2" 27 | mypy-extensions = ">=0.4.3" 28 | pathspec = ">=0.6,<1" 29 | regex = ">=2020.1.8" 30 | toml = ">=0.10.1" 31 | typed-ast = ">=1.4.0" 32 | typing-extensions = ">=3.7.4" 33 | 34 | [package.extras] 35 | colorama = ["colorama (>=0.4.3)"] 36 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] 37 | 38 | [[package]] 39 | name = "certifi" 40 | version = "2020.12.5" 41 | description = "Python package for providing Mozilla's CA Bundle." 42 | optional = false 43 | python-versions = "*" 44 | files = [ 45 | {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, 46 | {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, 47 | ] 48 | 49 | [[package]] 50 | name = "cfgv" 51 | version = "3.2.0" 52 | description = "Validate configuration and produce human readable error messages." 53 | optional = false 54 | python-versions = ">=3.6.1" 55 | files = [ 56 | {file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"}, 57 | {file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"}, 58 | ] 59 | 60 | [[package]] 61 | name = "chardet" 62 | version = "4.0.0" 63 | description = "Universal encoding detector for Python 2 and 3" 64 | optional = false 65 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 66 | files = [ 67 | {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, 68 | {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, 69 | ] 70 | 71 | [[package]] 72 | name = "click" 73 | version = "7.1.2" 74 | description = "Composable command line interface toolkit" 75 | optional = false 76 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 77 | files = [ 78 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, 79 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, 80 | ] 81 | 82 | [[package]] 83 | name = "distlib" 84 | version = "0.3.1" 85 | description = "Distribution utilities" 86 | optional = false 87 | python-versions = "*" 88 | files = [ 89 | {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, 90 | {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, 91 | ] 92 | 93 | [[package]] 94 | name = "et-xmlfile" 95 | version = "1.1.0" 96 | description = "An implementation of lxml.xmlfile for the standard library" 97 | optional = false 98 | python-versions = ">=3.6" 99 | files = [ 100 | {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, 101 | {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, 102 | ] 103 | 104 | [[package]] 105 | name = "filelock" 106 | version = "3.0.12" 107 | description = "A platform independent file lock." 108 | optional = false 109 | python-versions = "*" 110 | files = [ 111 | {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, 112 | {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, 113 | ] 114 | 115 | [[package]] 116 | name = "identify" 117 | version = "2.0.0" 118 | description = "File identification library for Python" 119 | optional = false 120 | python-versions = ">=3.6.1" 121 | files = [ 122 | {file = "identify-2.0.0-py2.py3-none-any.whl", hash = "sha256:9cdd81e5d2b6e76c3006d5226316dd947bd6324fbeebb881bec489202fa09d3a"}, 123 | {file = "identify-2.0.0.tar.gz", hash = "sha256:b99aa309329c4fea679463eb35d169f3fbe13e66e9dd6162ad1856cbeb03dcbd"}, 124 | ] 125 | 126 | [package.extras] 127 | license = ["editdistance"] 128 | 129 | [[package]] 130 | name = "idna" 131 | version = "2.10" 132 | description = "Internationalized Domain Names in Applications (IDNA)" 133 | optional = false 134 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 135 | files = [ 136 | {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, 137 | {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, 138 | ] 139 | 140 | [[package]] 141 | name = "mypy-extensions" 142 | version = "0.4.3" 143 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 144 | optional = false 145 | python-versions = "*" 146 | files = [ 147 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 148 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 149 | ] 150 | 151 | [[package]] 152 | name = "nodeenv" 153 | version = "1.5.0" 154 | description = "Node.js virtual environment builder" 155 | optional = false 156 | python-versions = "*" 157 | files = [ 158 | {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, 159 | {file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"}, 160 | ] 161 | 162 | [[package]] 163 | name = "numpy" 164 | version = "1.24.4" 165 | description = "Fundamental package for array computing in Python" 166 | optional = false 167 | python-versions = ">=3.8" 168 | files = [ 169 | {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, 170 | {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, 171 | {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, 172 | {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, 173 | {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, 174 | {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, 175 | {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, 176 | {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, 177 | {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, 178 | {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, 179 | {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, 180 | {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, 181 | {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, 182 | {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, 183 | {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, 184 | {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, 185 | {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, 186 | {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, 187 | {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, 188 | {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, 189 | {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, 190 | {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, 191 | {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, 192 | {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, 193 | {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, 194 | {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, 195 | {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, 196 | {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, 197 | ] 198 | 199 | [[package]] 200 | name = "numpy" 201 | version = "1.25.1" 202 | description = "Fundamental package for array computing in Python" 203 | optional = false 204 | python-versions = ">=3.9" 205 | files = [ 206 | {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, 207 | {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, 208 | {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, 209 | {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, 210 | {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, 211 | {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, 212 | {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, 213 | {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, 214 | {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, 215 | {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, 216 | {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, 217 | {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, 218 | {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, 219 | {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, 220 | {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, 221 | {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, 222 | {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, 223 | {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, 224 | {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, 225 | {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, 226 | {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, 227 | {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, 228 | {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, 229 | {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, 230 | {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, 231 | ] 232 | 233 | [[package]] 234 | name = "openpyxl" 235 | version = "3.1.2" 236 | description = "A Python library to read/write Excel 2010 xlsx/xlsm files" 237 | optional = false 238 | python-versions = ">=3.6" 239 | files = [ 240 | {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, 241 | {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, 242 | ] 243 | 244 | [package.dependencies] 245 | et-xmlfile = "*" 246 | 247 | [[package]] 248 | name = "pandas" 249 | version = "2.0.3" 250 | description = "Powerful data structures for data analysis, time series, and statistics" 251 | optional = false 252 | python-versions = ">=3.8" 253 | files = [ 254 | {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, 255 | {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, 256 | {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, 257 | {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, 258 | {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, 259 | {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, 260 | {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, 261 | {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, 262 | {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, 263 | {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, 264 | {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, 265 | {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, 266 | {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, 267 | {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, 268 | {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, 269 | {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, 270 | {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, 271 | {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, 272 | {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, 273 | {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, 274 | {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, 275 | {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, 276 | {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, 277 | {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, 278 | {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, 279 | ] 280 | 281 | [package.dependencies] 282 | numpy = [ 283 | {version = ">=1.20.3", markers = "python_version < \"3.10\""}, 284 | {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, 285 | {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, 286 | ] 287 | python-dateutil = ">=2.8.2" 288 | pytz = ">=2020.1" 289 | tzdata = ">=2022.1" 290 | 291 | [package.extras] 292 | all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] 293 | aws = ["s3fs (>=2021.08.0)"] 294 | clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] 295 | compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] 296 | computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] 297 | excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] 298 | feather = ["pyarrow (>=7.0.0)"] 299 | fss = ["fsspec (>=2021.07.0)"] 300 | gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] 301 | hdf5 = ["tables (>=3.6.1)"] 302 | html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] 303 | mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] 304 | output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] 305 | parquet = ["pyarrow (>=7.0.0)"] 306 | performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] 307 | plot = ["matplotlib (>=3.6.1)"] 308 | postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] 309 | spss = ["pyreadstat (>=1.1.2)"] 310 | sql-other = ["SQLAlchemy (>=1.4.16)"] 311 | test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] 312 | xml = ["lxml (>=4.6.3)"] 313 | 314 | [[package]] 315 | name = "pathspec" 316 | version = "0.8.1" 317 | description = "Utility library for gitignore style pattern matching of file paths." 318 | optional = false 319 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 320 | files = [ 321 | {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, 322 | {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, 323 | ] 324 | 325 | [[package]] 326 | name = "pre-commit" 327 | version = "2.10.1" 328 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 329 | optional = false 330 | python-versions = ">=3.6.1" 331 | files = [ 332 | {file = "pre_commit-2.10.1-py2.py3-none-any.whl", hash = "sha256:16212d1fde2bed88159287da88ff03796863854b04dc9f838a55979325a3d20e"}, 333 | {file = "pre_commit-2.10.1.tar.gz", hash = "sha256:399baf78f13f4de82a29b649afd74bef2c4e28eb4f021661fc7f29246e8c7a3a"}, 334 | ] 335 | 336 | [package.dependencies] 337 | cfgv = ">=2.0.0" 338 | identify = ">=1.0.0" 339 | nodeenv = ">=0.11.1" 340 | pyyaml = ">=5.1" 341 | toml = "*" 342 | virtualenv = ">=20.0.8" 343 | 344 | [[package]] 345 | name = "python-dateutil" 346 | version = "2.8.2" 347 | description = "Extensions to the standard Python datetime module" 348 | optional = false 349 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 350 | files = [ 351 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 352 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 353 | ] 354 | 355 | [package.dependencies] 356 | six = ">=1.5" 357 | 358 | [[package]] 359 | name = "python-http-client" 360 | version = "3.3.7" 361 | description = "HTTP REST client, simplified for Python" 362 | optional = false 363 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 364 | files = [ 365 | {file = "python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36"}, 366 | {file = "python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0"}, 367 | ] 368 | 369 | [[package]] 370 | name = "pytz" 371 | version = "2021.1" 372 | description = "World timezone definitions, modern and historical" 373 | optional = false 374 | python-versions = "*" 375 | files = [ 376 | {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, 377 | {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, 378 | ] 379 | 380 | [[package]] 381 | name = "pyyaml" 382 | version = "5.4.1" 383 | description = "YAML parser and emitter for Python" 384 | optional = false 385 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 386 | files = [ 387 | {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, 388 | {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, 389 | {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, 390 | {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, 391 | {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, 392 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, 393 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, 394 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, 395 | {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, 396 | {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, 397 | {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, 398 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, 399 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, 400 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, 401 | {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, 402 | {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, 403 | {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, 404 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, 405 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, 406 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, 407 | {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, 408 | {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, 409 | {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, 410 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, 411 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, 412 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, 413 | {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, 414 | {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, 415 | {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, 416 | ] 417 | 418 | [[package]] 419 | name = "regex" 420 | version = "2020.11.13" 421 | description = "Alternative regular expression module, to replace re." 422 | optional = false 423 | python-versions = "*" 424 | files = [ 425 | {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, 426 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, 427 | {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, 428 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, 429 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, 430 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, 431 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, 432 | {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, 433 | {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, 434 | {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, 435 | {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, 436 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, 437 | {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, 438 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, 439 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, 440 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, 441 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, 442 | {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, 443 | {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, 444 | {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, 445 | {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, 446 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, 447 | {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, 448 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, 449 | {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, 450 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, 451 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, 452 | {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, 453 | {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, 454 | {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, 455 | {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, 456 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, 457 | {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, 458 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, 459 | {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, 460 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, 461 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, 462 | {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, 463 | {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, 464 | {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, 465 | {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, 466 | ] 467 | 468 | [[package]] 469 | name = "requests" 470 | version = "2.25.1" 471 | description = "Python HTTP for Humans." 472 | optional = false 473 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 474 | files = [ 475 | {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, 476 | {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, 477 | ] 478 | 479 | [package.dependencies] 480 | certifi = ">=2017.4.17" 481 | chardet = ">=3.0.2,<5" 482 | idna = ">=2.5,<3" 483 | urllib3 = ">=1.21.1,<1.27" 484 | 485 | [package.extras] 486 | security = ["cryptography (>=1.3.4)", "pyOpenSSL (>=0.14)"] 487 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] 488 | 489 | [[package]] 490 | name = "sendgrid" 491 | version = "6.10.0" 492 | description = "Twilio SendGrid library for Python" 493 | optional = false 494 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 495 | files = [ 496 | {file = "sendgrid-6.10.0-py3-none-any.whl", hash = "sha256:522b30fc98306496208c5d8bdd5642cd6a2fd65cad487475f57f9098ce880604"}, 497 | {file = "sendgrid-6.10.0.tar.gz", hash = "sha256:9b15050c6f8826ee576f76a786efb15d956639f485478cbddd79ed69e8350ab8"}, 498 | ] 499 | 500 | [package.dependencies] 501 | python-http-client = ">=3.2.1" 502 | starkbank-ecdsa = ">=2.0.1" 503 | 504 | [[package]] 505 | name = "six" 506 | version = "1.15.0" 507 | description = "Python 2 and 3 compatibility utilities" 508 | optional = false 509 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 510 | files = [ 511 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, 512 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, 513 | ] 514 | 515 | [[package]] 516 | name = "starkbank-ecdsa" 517 | version = "2.2.0" 518 | description = "A lightweight and fast pure python ECDSA library" 519 | optional = false 520 | python-versions = "*" 521 | files = [ 522 | {file = "starkbank-ecdsa-2.2.0.tar.gz", hash = "sha256:9399c3371b899d4a235b68a1ed7919d202fbf024bd2c863ae8ebdad343c2a63a"}, 523 | ] 524 | 525 | [[package]] 526 | name = "toml" 527 | version = "0.10.2" 528 | description = "Python Library for Tom's Obvious, Minimal Language" 529 | optional = false 530 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 531 | files = [ 532 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 533 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 534 | ] 535 | 536 | [[package]] 537 | name = "typed-ast" 538 | version = "1.4.2" 539 | description = "a fork of Python 2 and 3 ast modules with type comment support" 540 | optional = false 541 | python-versions = "*" 542 | files = [ 543 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, 544 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, 545 | {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, 546 | {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, 547 | {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, 548 | {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, 549 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, 550 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, 551 | {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, 552 | {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, 553 | {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, 554 | {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, 555 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, 556 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, 557 | {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, 558 | {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, 559 | {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, 560 | {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, 561 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, 562 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, 563 | {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, 564 | {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, 565 | {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, 566 | {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, 567 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, 568 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, 569 | {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, 570 | {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, 571 | {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, 572 | {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, 573 | ] 574 | 575 | [[package]] 576 | name = "typing-extensions" 577 | version = "3.7.4.3" 578 | description = "Backported and Experimental Type Hints for Python 3.5+" 579 | optional = false 580 | python-versions = "*" 581 | files = [ 582 | {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, 583 | {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, 584 | {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, 585 | ] 586 | 587 | [[package]] 588 | name = "tzdata" 589 | version = "2023.3" 590 | description = "Provider of IANA time zone data" 591 | optional = false 592 | python-versions = ">=2" 593 | files = [ 594 | {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, 595 | {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, 596 | ] 597 | 598 | [[package]] 599 | name = "urllib3" 600 | version = "1.26.3" 601 | description = "HTTP library with thread-safe connection pooling, file post, and more." 602 | optional = false 603 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" 604 | files = [ 605 | {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, 606 | {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, 607 | ] 608 | 609 | [package.extras] 610 | brotli = ["brotlipy (>=0.6.0)"] 611 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] 612 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 613 | 614 | [[package]] 615 | name = "virtualenv" 616 | version = "20.4.2" 617 | description = "Virtual Python Environment builder" 618 | optional = false 619 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 620 | files = [ 621 | {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, 622 | {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, 623 | ] 624 | 625 | [package.dependencies] 626 | appdirs = ">=1.4.3,<2" 627 | distlib = ">=0.3.1,<1" 628 | filelock = ">=3.0.0,<4" 629 | six = ">=1.9.0,<2" 630 | 631 | [package.extras] 632 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] 633 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "xonsh (>=0.9.16)"] 634 | 635 | [metadata] 636 | lock-version = "2.0" 637 | python-versions = "^3.8" 638 | content-hash = "6fec58163765d7428478c12bb6c66ebbad24d3937aa841f17d089c1acbabae35" 639 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "open-event-scripts" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Areeb Jamal "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.8" 9 | requests = "^2.25.1" 10 | pytz = "^2021.1" 11 | python-dateutil = "^2.8.1" 12 | sendgrid = "^6.10.0" 13 | pandas = "^2.0.3" 14 | openpyxl = "^3.1.2" 15 | 16 | [tool.poetry.dev-dependencies] 17 | pre-commit = "^2.10.1" 18 | black = {version = "^20.8b1", allow-prereleases = true} 19 | 20 | [build-system] 21 | requires = ["poetry-core>=1.0.0"] 22 | build-backend = "poetry.core.masonry.api" 23 | 24 | [tool.black] 25 | line-length = 90 26 | target-version = ['py38'] 27 | skip-string-normalization = true 28 | exclude = ''' 29 | 30 | ( 31 | /( 32 | \.eggs # exclude a few common directories in the 33 | | \.git # root of the project 34 | | \.hg 35 | | \.mypy_cache 36 | | \.tox 37 | | \.venv 38 | | _build 39 | | buck-out 40 | | build 41 | | dist 42 | )/ 43 | ) 44 | ''' 45 | 46 | [tool.isort] 47 | profile = "black" 48 | line_length = 90 49 | skip = '.venv/' 50 | multi_line_output = 3 51 | include_trailing_comma = true 52 | force_grid_wrap = 0 53 | use_parentheses = true 54 | 55 | [tool.pycln] 56 | path = "scripts/" 57 | exclude = "(.venv/)" 58 | all = true 59 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2020.12.5; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" 2 | chardet==4.0.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" 3 | idna==2.10; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" 4 | python-dateutil==2.8.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.3.0") 5 | pytz==2021.1 6 | requests==2.25.1; (python_version >= "2.7" and python_full_version < "3.0.0") or (python_full_version >= "3.5.0") 7 | six==1.15.0; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.3.0" 8 | urllib3==1.26.3; python_version >= "2.7" and python_full_version < "3.0.0" or python_full_version >= "3.5.0" and python_version < "4" 9 | -------------------------------------------------------------------------------- /scripts/create-session-speakers.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from datetime import datetime 3 | import readline # noqa 4 | import sys 5 | from getpass import getpass 6 | from dataclasses import dataclass 7 | 8 | import requests 9 | import csv 10 | import re 11 | import sys 12 | import json 13 | 14 | event_id = '5' 15 | fn = 'Wikimania Sessions and Speakers.xlsx' 16 | 17 | session_type_times = { 18 | "Entertainment session": 30, 19 | "Lecture": 20, 20 | "Lecture 15 minute": 15, 21 | "Lecture 30 minute": 30, 22 | "Lecture 60 minute": 60, 23 | "Lightning talk": 10, 24 | "Lightning talk 5 minute": 5, 25 | "Meetup": 60, 26 | "Meetup 120 minute": 120, 27 | "Open discussion 20 minute": 20, 28 | "Panel": 60, 29 | "Panel 30 minute": 30, 30 | "Panel 40 minute": 40, 31 | "Panel 45 minute": 45, 32 | "Poster session": 2, 33 | "Roundtable / open discussion": 60, 34 | "Roundtable discussion 30 minute": 30, 35 | "Roundtable discussion 40 minute": 40, 36 | "Roundtable discussion 45 minute": 45, 37 | "Roundtable discussion 90 minute": 90, 38 | "Summit": 135, 39 | "Wikiwomen Lecture": 12, 40 | "Workshop": 60, 41 | "Workshop 120 minute": 120, 42 | "Workshop 30 minute": 30, 43 | "Workshop 40 minute": 40, 44 | "Workshop 45 minute": 45, 45 | "Workshop 90 minute": 90, 46 | "Workshop All day 390 minutes": 390, 47 | "Workshop Half Day 165 minutes": 165, 48 | } 49 | 50 | speakers = pd.read_excel(fn, index_col='ID', sheet_name="Speakers", ) 51 | sessions = pd.read_excel(fn, index_col='ID', sheet_name="Sessions") 52 | 53 | # print(speakers) 54 | 55 | # print(sessions) 56 | 57 | # collect tracks 58 | tracks = sessions['Track'].unique() 59 | session_types = sessions['Session type'].unique() 60 | # print(f"Tracks: {tracks}") 61 | 62 | 63 | # event_identifier = '41dbcda9' 64 | api_url = 'http://localhost:8080/v1' 65 | # event_url = api_url + '/events/' + event_identifier 66 | 67 | token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpYXQiOjE2OTAxODQxNzgsIm5iZiI6MTY5MDE4NDE3OCwianRpIjoiODA3MDNiYjctM2NhNC00NmY4LWJjMDAtZWU5ODRiMTQ2MjdkIiwiZXhwIjoxNjkwMjcwNTc4LCJpZGVudGl0eSI6MSwiZnJlc2giOnRydWUsInR5cGUiOiJhY2Nlc3MiLCJjc3JmIjoiMTRjOWMxNDYtNDA0ZS00ZjZiLTk0NTUtYWI5MTU3YzMyNjNhIn0.wVErUWDRsTJolNmIb_4rC4Uynf0ZlQcU-XQIfXGWdUk" 68 | 69 | 70 | # event = requests.get(event_url, timeout=10).json() 71 | 72 | 73 | create_track_url = api_url + '/tracks' 74 | create_session_type_url = api_url + '/session-types' 75 | create_speaker_url = api_url + '/speakers' 76 | create_session_url = api_url + '/sessions' 77 | 78 | speaker_id_to_eventyay_id = dict() 79 | track_to_eventyay_id = dict() 80 | session_type_to_eventyay_id = dict() 81 | session_id_to_eventyay_id = dict() 82 | 83 | session_type_to_session_eventyay_ids = dict() 84 | speaker_id_to_session_eventyay_ids = dict() 85 | 86 | # for dry running a fake response class 87 | NEXT_NR = 1 88 | 89 | @dataclass 90 | class FakeResponse: 91 | status_code: int = 201 92 | def json(self): 93 | global NEXT_NR 94 | n = NEXT_NR 95 | NEXT_NR += 1 96 | return({"data": {"id": n}}) 97 | 98 | # try to get a token 99 | def get_token(): 100 | username = 'open_event_test_User@fossasia.org' 101 | password = 'fossasia' 102 | 103 | auth = requests.post( 104 | 'http://localhost:8080/auth/session', 105 | json={'email': username, 'password': password}, 106 | timeout=10 107 | ) 108 | if auth.status_code != 200: 109 | print('Auth Error:', auth.json()) 110 | sys.exit(-1) 111 | token = auth.json()['access_token'] 112 | # print (f"ACCESS TOKEN = {token}") 113 | return token 114 | 115 | def create_tracks(): 116 | for t in tracks: 117 | data = { 118 | "data": { 119 | "relationships": { 120 | "event": { 121 | "data": { 122 | "type": "event", 123 | "id": event_id 124 | } 125 | } 126 | }, 127 | "attributes": { 128 | "name": t, 129 | # "description": "description", 130 | # TODO separate colors for tracks? 131 | "color": "#a04b4b" 132 | }, 133 | "type": "track" 134 | } 135 | } 136 | if DRY_RUN:v 137 | response = FakeResponse() 138 | else: 139 | response = requests.post( 140 | create_track_url, 141 | json=data, 142 | headers={ 143 | 'Content-Type': 'application/vnd.api+json', 144 | 'Authorization': 'JWT ' + token, 145 | }, 146 | timeout=10 147 | ) 148 | if response.status_code == 201: 149 | # print('Track created') 150 | track_to_eventyay_id[t] = response.json()['data']['id'] 151 | elif response.status_code == 403: 152 | # Handle Error 153 | # TODO we need API to get track id!! 154 | print(f'Track already exists: {t}') 155 | track_to_eventyay_id[t] = 'unknown' 156 | elif response.status_code != 201: 157 | print('Error: ', t, response, response.content) 158 | track_to_eventyay_id[t] = 'error' 159 | 160 | def create_sessions(): 161 | for idx, row in sessions.iterrows(): 162 | data = { 163 | "data": { 164 | "relationships": { 165 | "event": { 166 | "data": { 167 | "type": "event", 168 | "id": event_id 169 | } 170 | }, 171 | "track": { 172 | "data": { 173 | "type": "track", 174 | "id": track_to_eventyay_id[row['Track']] 175 | } 176 | } 177 | }, 178 | "attributes": { 179 | "title": row['Proposal title'], 180 | # "subtitle": "Title", 181 | # "level": "Expert", 182 | "short-abstract": row['Abstract'], 183 | "long-abstract": row['Description'], 184 | #"comments": "Comment", 185 | #"starts-at": "2099-06-01T10:00:00.500127+00:00", 186 | #"ends-at": "2099-06-01T11:00:00.500127+00:00", 187 | "language": row['Language'], 188 | #"slides-url": "http://example.com/example", 189 | #"video-url": "http://example.com/example", 190 | #"audio-url": "http://example.com/example", 191 | #"signup-url": "http://example.com/example", 192 | "state": "accepted", 193 | #"created-at": "2017-05-01T01:24:47.500127+00:00", 194 | # "deleted-at": null, 195 | #"submitted-at": "2017-05-01T01:24:47.500127+00:00", 196 | #"is-mail-sent": false, 197 | #"last-modified-at": "2017-05-01T01:24:47.500127+00:00" 198 | }, 199 | "type": "session" 200 | } 201 | } 202 | if DRY_RUN: 203 | response = FakeResponse() 204 | else: 205 | response = requests.post( 206 | create_session_url, 207 | json=data, 208 | headers={ 209 | 'Content-Type': 'application/vnd.api+json', 210 | 'Authorization': 'JWT ' + token, 211 | }, 212 | timeout=10 213 | ) 214 | 215 | if response.status_code == 201: 216 | # print('Session created') 217 | id = response.json()['data']['id'] 218 | session_id_to_eventyay_id[idx] = id 219 | for speaker_id in row['Speaker IDs'].split(): 220 | if speaker_id in speaker_id_to_session_eventyay_ids: 221 | speaker_id_to_session_eventyay_ids[speaker_id].append(id) 222 | else: 223 | speaker_id_to_session_eventyay_ids[speaker_id] = [id] 224 | 225 | session_type = row['Session type'] 226 | if session_type in session_type_to_session_eventyay_ids: 227 | session_type_to_session_eventyay_ids[session_type].append(id) 228 | else: 229 | session_type_to_session_eventyay_ids[session_type] = [id] 230 | elif response.status_code == 403: 231 | # Handle Error 232 | # TODO we need API to get session id!!! 233 | print(f"Session already exists {idx}") 234 | session_id_to_eventyay_id[idx] = 'unknown' 235 | elif response.status_code != 201: 236 | print('Error: ', response, response.content) 237 | session_id_to_eventyay_id[idx] = 'error' 238 | 239 | 240 | def create_session_types(): 241 | for t in session_types: 242 | minutes = session_type_times[t] 243 | hours = minutes // 60 244 | remmin = minutes % 60 245 | session_time = "{:02d}:{:02d}".format(hours, remmin) 246 | data = { 247 | "data": { 248 | "relationships": { 249 | "event": { 250 | "data": { 251 | "type": "event", 252 | "id": event_id 253 | } 254 | }, 255 | "sessions": { 256 | "data": [ 257 | { 258 | "type": "session", 259 | "id": id 260 | } for id in session_type_to_session_eventyay_ids[t] 261 | ] 262 | } 263 | }, 264 | "attributes": { 265 | "name": t, 266 | "length": session_time, 267 | }, 268 | "type": "session-type" 269 | } 270 | } 271 | if DRY_RUN: 272 | response = FakeResponse() 273 | else: 274 | response = requests.post( 275 | create_session_type_url, 276 | json=data, 277 | headers={ 278 | 'Content-Type': 'application/vnd.api+json', 279 | 'Authorization': 'JWT ' + token, 280 | }, 281 | timeout=10 282 | ) 283 | if response.status_code == 201: 284 | # print('Session Type created') 285 | session_type_to_eventyay_id[t] = response.json()['data']['id'] 286 | elif response.status_code == 403: 287 | # Handle Error 288 | # TODO we need API to get session type id!! 289 | print(f"Session Type already exists: {t}") 290 | session_type_to_eventyay_id[t] = 'unknown' 291 | elif response.status_code != 201: 292 | print('Error: ', t, response, response.content) 293 | session_type_to_eventyay_id[t] = 'error' 294 | 295 | 296 | 297 | def create_speakers(): 298 | for idx, row in speakers.iterrows(): 299 | data = { 300 | "data": { 301 | "type": "speaker", 302 | "relationships": { 303 | "event": { 304 | "data": { 305 | "type": "event", 306 | "id": event_id 307 | } 308 | }, 309 | "sessions": { 310 | "data": [ 311 | { 312 | "type": "session", 313 | "id": id 314 | } for id in speaker_id_to_session_eventyay_ids[idx] 315 | ] 316 | } 317 | }, 318 | "attributes": { 319 | "email": row['E-Mail'] 320 | } 321 | } 322 | } 323 | if not pd.isna(row['Biography']): 324 | data['data']['attributes']['short-biography'] = row['Biography'] 325 | if not pd.isna(row['Picture']): 326 | data['data']['attributes']['photo-url'] = row['Picture'] 327 | if not pd.isna(row['Name']): 328 | data['data']['attributes']['name'] = row['Name'] 329 | else: 330 | data['data']['attributes']['name'] = "Anonymous" 331 | 332 | #print("Creating speaker:") 333 | #print(f" name: {row['Name']}") 334 | #print(f" email: {row['E-Mail']}") 335 | #print(f" photo-url: {row['Picture']}") 336 | if DRY_RUN: 337 | response = FakeResponse() 338 | else: 339 | response = requests.post( 340 | create_speaker_url, 341 | json=data, 342 | headers={ 343 | 'Content-Type': 'application/vnd.api+json', 344 | 'Authorization': 'JWT ' + token, 345 | }, 346 | timeout=10 347 | ) 348 | 349 | if response.status_code == 201: 350 | # print('Speaker created') 351 | speaker_id_to_eventyay_id[idx] = response.json()['data']['id'] 352 | elif response.status_code == 403: 353 | # Handle Error 354 | # TODO we need API to get speaker id!!! 355 | print(f"Speaker already exists: {row['E-Mail']}") 356 | speaker_id_to_eventyay_id[idx] = 'unknown' 357 | elif response.status_code != 201: 358 | print('Error: ', row['E-Mail'], response, response.content) 359 | speaker_id_to_eventyay_id[idx] = 'error' 360 | 361 | 362 | 363 | 364 | # print(f"speaker_id_to_eventyay_id = {speaker_id_to_event_id}") 365 | 366 | def do_checks(): 367 | ret = True 368 | # checks 369 | # - session speaker ids can be found 370 | # - session speaker name is the same as the names found via speakers 371 | for idx, row in sessions.iterrows(): 372 | #print(f"Session Id = {idx}") 373 | #print(f" speaker id = {row['Speaker IDs']}") 374 | #print(f" session speaker names: {row['Speaker names']}") 375 | speaker_ids = row['Speaker IDs'].split() 376 | speaker_name_combined = '' 377 | for sid in speaker_ids: 378 | #print(f" speaker id: {sid}") 379 | #print(f" speaker email: {speakers['E-Mail'][sid]}") 380 | sn = "" if pd.isna(speakers['Name'][sid]) else speakers['Name'][sid] 381 | #print(f" speaker name: {speakers['Name'][sid]}") 382 | if speaker_name_combined: 383 | speaker_name_combined += f"\n{sn}" 384 | else: 385 | speaker_name_combined = sn 386 | if not speaker_name_combined == row['Speaker names']: 387 | print(f"WARN name discrepancies in session {idx}!") 388 | ret = False 389 | return(ret) 390 | 391 | 392 | def check_session_type_time_vs_duration(): 393 | for idx, row in sessions.iterrows(): 394 | t = row['Session type'] 395 | du = row['Duration'] 396 | minutes = session_type_times[t] 397 | hours = minutes // 60 398 | remmin = minutes % 60 399 | session_time = "{:02d}:{:02d}".format(hours, remmin) 400 | if not minutes == du: 401 | print("Time discrepancy") 402 | print(f" Session ID = {idx}") 403 | print(f" session type = {t}") 404 | print(f" session type length = {minutes}") 405 | print(f" duration = {du}") 406 | 407 | 408 | 409 | DRY_RUN = True 410 | if not do_checks(): 411 | print("Inconsistencies found, aborting") 412 | sys.exit(1) 413 | print("=========== checking session times against durations of sessions =====") 414 | check_session_type_time_vs_duration() 415 | create_tracks() 416 | print(json.dumps({"track_ids": track_to_eventyay_id})) 417 | create_sessions() 418 | print(json.dumps({"session_ids": session_id_to_eventyay_id})) 419 | create_session_types() 420 | print(json.dumps({"session_type_ids": session_type_to_eventyay_id})) 421 | print(json.dumps({"session_type_to_session_eventyay_ids": session_type_to_session_eventyay_ids})) 422 | create_speakers() 423 | print(json.dumps({"speaker_ids": speaker_id_to_eventyay_id})) 424 | print(json.dumps({"speaker_to_session_ids": speaker_id_to_session_eventyay_ids})) 425 | 426 | DRY_RUN = False 427 | speaker_id_to_eventyay_id = dict() 428 | track_to_eventyay_id = dict() 429 | session_type_to_eventyay_id = dict() 430 | session_id_to_eventyay_id = dict() 431 | 432 | session_type_to_session_eventyay_ids = dict() 433 | speaker_id_to_session_eventyay_ids = dict() 434 | 435 | yn = input("Actually do the import? [y/N] ") 436 | if yn == 'y' or yn == 'Y': 437 | pass 438 | else: 439 | print("Ok, stopping here!") 440 | sys.exit(0) 441 | create_tracks() 442 | print(json.dumps({"track_ids": track_to_eventyay_id})) 443 | create_sessions() 444 | print(json.dumps({"session_ids": session_id_to_eventyay_id})) 445 | create_session_types() 446 | print(json.dumps({"session_type_ids": session_type_to_eventyay_id})) 447 | print(json.dumps({"session_type_to_session_eventyay_ids": session_type_to_session_eventyay_ids})) 448 | create_speakers() 449 | print(json.dumps({"speaker_ids": speaker_id_to_eventyay_id})) 450 | print(json.dumps({"speaker_to_session_ids": speaker_id_to_session_eventyay_ids})) 451 | -------------------------------------------------------------------------------- /scripts/generate_session_tweet_csv.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import readline # noqa 3 | import sys 4 | from datetime import timedelta 5 | 6 | import pytz 7 | import requests 8 | from dateutil import parser 9 | 10 | event_identifier = sys.argv[1] 11 | event_url = 'https://api.eventyay.com/v1/events/' + event_identifier 12 | 13 | event = requests.get(event_url).json() 14 | 15 | event_name = event['data']['attributes']['name'] 16 | timezone = pytz.timezone(event['data']['attributes']['timezone']) 17 | print('Event:', event_name) 18 | 19 | generate = input('Generate Social Media CSV? (y/N)? ') 20 | 21 | if generate.lower() != 'y': 22 | sys.exit() 23 | 24 | default_template = '{speaker_names} will share about "{session_title}" tomorrow at {event_name} #{track_name} {session_link}' 25 | 26 | template = input( 27 | f'Default Template: {default_template}\nPress enter to use default, or type to override: ' 28 | ) 29 | 30 | template = template.strip() 31 | 32 | print(f'Using template: { template or default_template }') 33 | 34 | duration_string = input( 35 | 'When should the session post on social media be scheduled before the actual session time?\nFor example, 4 hours, 5 minutes, 2 hours 30 minutes, etc.\n' 36 | ) 37 | time = parser.parse(duration_string) 38 | delta = timedelta(hours=time.hour, minutes=time.minute, seconds=time.second) 39 | 40 | sessions = requests.get( 41 | event_url 42 | + '/sessions?page[size]=0&include=track,speakers&filter=[{"or":[{"name":"state","op":"eq","val":"confirmed"},{"name":"state","op":"eq","val":"accepted"}]}]' 43 | ).json() 44 | 45 | 46 | def generate_row(session, track, speakers, template=None): 47 | if not template: 48 | template = default_template 49 | speaker_names = [] 50 | for speaker in speakers: 51 | speaker_name = speaker['attributes']['name'] 52 | if twitter_link := speaker['attributes']['twitter']: 53 | speaker_name += ' @' + twitter_link.split('https://twitter.com/')[1] 54 | speaker_names.append(speaker_name) 55 | 56 | speaker_names = ', '.join(speaker_names) 57 | session_title = session['attributes']['title'] 58 | track_name = track['attributes']['name'] 59 | session_link = ( 60 | 'https://eventyay.com/e/' + event_identifier + '/session/' + session['id'] 61 | ) 62 | 63 | text = template.format( 64 | speaker_names=speaker_names, 65 | session_title=session_title, 66 | event_name=event_name, 67 | track_name=track_name.replace(' ', '_'), 68 | session_link=session_link, 69 | ) 70 | 71 | photos = list(filter(bool, map(lambda sp: sp['attributes']['photo-url'], speakers))) 72 | 73 | starts_at = session['attributes']['starts-at'] 74 | if starts_at: 75 | starts_at_time = parser.parse(starts_at).astimezone(timezone) - delta 76 | 77 | return ( 78 | text, 79 | photos[0] if photos else None, 80 | starts_at_time.strftime('%Y-%m-%d %H:%M') if starts_at else None, 81 | ) 82 | 83 | 84 | with open(f'event-{event_identifier}-social-media.csv', mode='w') as csv_file: 85 | writer = csv.writer(csv_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) 86 | 87 | for session in sessions['data']: 88 | track_link = session['relationships']['track']['data'] 89 | track = list( 90 | filter( 91 | lambda data: data['id'] == track_link['id'] and data['type'] == 'track', 92 | sessions['included'], 93 | ) 94 | )[0] 95 | speaker_links = session['relationships']['speakers']['data'] 96 | speakers = [] 97 | for speaker_link in speaker_links: 98 | speaker = list( 99 | filter( 100 | lambda data: data['id'] == speaker_link['id'] 101 | and data['type'] == 'speaker', 102 | sessions['included'], 103 | ) 104 | ) 105 | speakers += speaker 106 | 107 | writer.writerow(generate_row(session, track, speakers, template=template)) 108 | -------------------------------------------------------------------------------- /scripts/import_access_codes.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import readline # noqa 3 | import sys 4 | from getpass import getpass 5 | 6 | import requests 7 | import csv 8 | import json 9 | 10 | event_identifier = sys.argv[1] 11 | api_url = 'https://api.eventyay.com/v1' 12 | event_url = api_url + '/events/' + event_identifier 13 | 14 | event = requests.get(event_url).json() 15 | 16 | print('Event:', event['data']['attributes']['name']) 17 | 18 | csv_file_name = sys.argv[2] 19 | 20 | access_codes = [] 21 | 22 | with open(csv_file_name) as csv_file: 23 | csv_reader = csv.reader(csv_file, delimiter=',') 24 | next(csv_reader, None) 25 | for row in csv_reader: 26 | access_codes.append( 27 | dict(code=row[0], max_quantity=int(row[1]), marketer_id=row[2], ticket_type=row[3]) 28 | ) 29 | 30 | print( 31 | f'Data to be imported {len(access_codes)}:\n\n', 32 | json.dumps(access_codes, indent=2), 33 | ) 34 | 35 | import_ = input('Import? (y/N)? ') 36 | 37 | if import_.lower() != 'y': 38 | sys.exit() 39 | 40 | username = input('Email: ') 41 | password = getpass() 42 | 43 | auth = requests.post( 44 | 'https://api.eventyay.com/auth/session', 45 | json={'email': username, 'password': password}, 46 | ) 47 | if auth.status_code != 200: 48 | print('Auth Error:', auth.json()) 49 | sys.exit(-1) 50 | token = auth.json()['access_token'] 51 | 52 | 53 | created = 0 54 | for code in access_codes: 55 | data = { 56 | "data": { 57 | "attributes": { 58 | "code": code["code"], 59 | "max_quantity": code["max_quantity"], 60 | "is-active": True, 61 | "tickets-number": code['max_quantity'], 62 | "access-url": f'https://eventyay.com/e/{event_identifier}?code={code["code"]}', 63 | "valid-from": datetime.now().astimezone().isoformat(), 64 | "valid-till": event['data']['attributes']['ends-at'] 65 | }, 66 | "type": "access-code", 67 | "relationships": { 68 | "event": {"data": {"id": event['data']['id'], "type": "event"}}, 69 | "marketer": {"data": {"id": code["marketer_id"], "type": "user"}}, 70 | "tickets": {"data": [{"id": code["ticket_type"], "type": "ticket"}]} 71 | }, 72 | } 73 | } 74 | 75 | access_code_url = api_url + '/access-codes' 76 | 77 | response = requests.post( 78 | access_code_url, 79 | json=data, 80 | headers={ 81 | 'Content-Type': 'application/vnd.api+json', 82 | 'Authorization': 'JWT ' + token, 83 | }, 84 | ) 85 | 86 | if response.status_code == 201: 87 | print(f'{code["code"]}: Access Code created') 88 | created += 1 89 | elif response.status_code == 409: 90 | # Handle Error 91 | print(f'{code["code"]}: Access Code already exists') 92 | elif response.status_code != 201: 93 | print('Error: ', response, response.content) 94 | 95 | print(f'{created} access codes created') 96 | -------------------------------------------------------------------------------- /scripts/import_access_codes_test.csv: -------------------------------------------------------------------------------- 1 | # code, max number, marketer user id, ticket type id 2 | ASDF1234,1,1032,3282 3 | -------------------------------------------------------------------------------- /scripts/import_discount_codes.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import readline # noqa 3 | import sys 4 | from getpass import getpass 5 | 6 | import requests 7 | import csv 8 | import json 9 | 10 | event_identifier = sys.argv[1] 11 | api_url = 'https://api.eventyay.com/v1' 12 | event_url = api_url + '/events/' + event_identifier 13 | 14 | event = requests.get(event_url).json() 15 | 16 | print('Event:', event['data']['attributes']['name']) 17 | 18 | csv_file_name = sys.argv[2] 19 | 20 | discount_codes = [] 21 | 22 | with open(csv_file_name) as csv_file: 23 | csv_reader = csv.reader(csv_file, delimiter=',') 24 | next(csv_reader, None) 25 | for row in csv_reader: 26 | discount_codes.append( 27 | dict(code=row[0], value=float(row[2]), max_quantity=int(row[3])) 28 | ) 29 | 30 | print( 31 | f'Data to be imported {len(discount_codes)}:\n\n', 32 | json.dumps(discount_codes, indent=2), 33 | ) 34 | 35 | import_ = input('Import? (y/N)? ') 36 | 37 | if import_.lower() != 'y': 38 | sys.exit() 39 | 40 | username = input('Email: ') 41 | password = getpass() 42 | 43 | auth = requests.post( 44 | 'https://api.eventyay.com/auth/session', 45 | json={'email': username, 'password': password}, 46 | ) 47 | if auth.status_code != 200: 48 | print('Auth Error:', auth.json()) 49 | sys.exit(-1) 50 | token = auth.json()['access_token'] 51 | 52 | tickets_url = event_url + '/tickets?fields[ticket]=id&page[size]=0' 53 | 54 | tickets = requests.get(tickets_url).json() 55 | ticket_ids = [{"id": ticket['id'], "type": "ticket"} for ticket in tickets['data']] 56 | 57 | created = 0 58 | for code in discount_codes: 59 | data = { 60 | "data": { 61 | "attributes": { 62 | **code, 63 | "type": "percent", 64 | "is-active": True, 65 | "used-for": "ticket", 66 | "tickets-number": code['max_quantity'], 67 | "discount-url": f'https://eventyay.com/e/{event_identifier}?code={code["code"]}', 68 | "valid-from": datetime.now().astimezone().isoformat(), 69 | "valid-till": event['data']['attributes']['ends-at'] 70 | }, 71 | "type": "discount-code", 72 | "relationships": { 73 | "event": {"data": {"id": event['data']['id'], "type": "event"}}, 74 | "tickets": {"data": ticket_ids}, 75 | }, 76 | } 77 | } 78 | 79 | discount_code_url = api_url + '/discount-codes' 80 | 81 | response = requests.post( 82 | discount_code_url, 83 | json=data, 84 | headers={ 85 | 'Content-Type': 'application/vnd.api+json', 86 | 'Authorization': 'JWT ' + token, 87 | }, 88 | ) 89 | 90 | if response.status_code == 201: 91 | print(f'{code["code"]}: Discount Code created') 92 | created += 1 93 | elif response.status_code == 409: 94 | # Handle Error 95 | print(f'{code["code"]}: Discount Code already exists') 96 | elif response.status_code != 201: 97 | print('Error: ', response, response.content) 98 | 99 | print(f'{created} discount codes created') 100 | -------------------------------------------------------------------------------- /scripts/reschedule.py: -------------------------------------------------------------------------------- 1 | import readline # noqa 2 | import sys 3 | from getpass import getpass 4 | 5 | import pytz 6 | import requests 7 | from dateutil import parser 8 | 9 | event_identifier = sys.argv[1] 10 | event_url = 'https://api.eventyay.com/v1/events/' + event_identifier 11 | 12 | event = requests.get(event_url).json() 13 | 14 | starts_at = event['data']['attributes']['starts-at'] 15 | ends_at = event['data']['attributes']['ends-at'] 16 | timezone = pytz.timezone(event['data']['attributes']['timezone']) 17 | 18 | starts_at = parser.parse(starts_at).astimezone(timezone) 19 | ends_at = parser.parse(ends_at).astimezone(timezone) 20 | 21 | print('Event:', event['data']['attributes']['name']) 22 | print('Scheduled Time: ', starts_at, 'to', ends_at) 23 | 24 | new_starts_at = parser.parse(input('new starts at: '), fuzzy=True).astimezone(timezone) 25 | new_ends_at = parser.parse(input('new ends at: '), fuzzy=True).astimezone(timezone) 26 | 27 | print('New Scheduled Time: ', new_starts_at, 'to', new_ends_at) 28 | 29 | reschedule = input('Reschedule? (y/N)? ') 30 | 31 | if reschedule.lower() != 'y': 32 | sys.exit() 33 | 34 | username = input('Email: ') 35 | password = getpass() 36 | 37 | auth = requests.post( 38 | 'https://api.eventyay.com/auth/session', 39 | json={'email': username, 'password': password}, 40 | ) 41 | if auth.status_code != 200: 42 | print('Auth Error:', auth.json()) 43 | sys.exit(-1) 44 | token = auth.json()['access_token'] 45 | 46 | data = { 47 | "data": { 48 | "attributes": { 49 | "starts-at": new_starts_at.isoformat(), 50 | "ends-at": new_ends_at.isoformat(), 51 | }, 52 | "id": event['data']['id'], 53 | "type": "event", 54 | } 55 | } 56 | 57 | response = requests.patch( 58 | event_url, 59 | json=data, 60 | headers={'Content-Type': 'application/vnd.api+json', 'Authorization': 'JWT ' + token}, 61 | ) 62 | if response.status_code != 200: 63 | print('Error:', response.json()) 64 | sys.exit(-1) 65 | 66 | print('Reschedule Successful!') 67 | 68 | reschedule_tickets = input('Change ticket sales time? (y/N) ') 69 | 70 | if reschedule_tickets.lower() != 'y': 71 | sys.exit(0) 72 | 73 | tickets_url = event_url + '/tickets?fields[ticket]=id&page[size]=0' 74 | 75 | tickets = requests.get(tickets_url).json() 76 | ticket_ids = [ticket['id'] for ticket in tickets['data']] 77 | 78 | for id in ticket_ids: 79 | data = { 80 | "data": { 81 | "attributes": {"sales-ends-at": new_ends_at.isoformat()}, 82 | "id": id, 83 | "type": "ticket", 84 | } 85 | } 86 | 87 | response = requests.patch( 88 | 'https://api.eventyay.com/v1/tickets/' + id, 89 | json=data, 90 | headers={ 91 | 'Content-Type': 'application/vnd.api+json', 92 | 'Authorization': 'JWT ' + token, 93 | }, 94 | ) 95 | if response.status_code != 200: 96 | print('Error:', response.json()) 97 | sys.exit(-1) 98 | 99 | print('Ticket Sales End time rescheduled!') 100 | -------------------------------------------------------------------------------- /scripts/send_emails_via_sendgrid.py: -------------------------------------------------------------------------------- 1 | import os 2 | import csv 3 | import sys 4 | from sendgrid import SendGridAPIClient 5 | from sendgrid.helpers.mail import Mail 6 | from sendgrid.helpers.mail import To 7 | 8 | csv_file_name = sys.argv[1] 9 | to_emails = [] 10 | 11 | with open(csv_file_name) as csv_file: 12 | csv_reader = csv.reader(csv_file, delimiter=',') 13 | next(csv_reader, None) 14 | for row in csv_reader: 15 | to_emails.append( 16 | To(email=row[0], 17 | substitutions={ 18 | '-name-': row[1], 19 | '-url-': row[5], 20 | }) 21 | ) 22 | 23 | print(f'Emails to be sent to the following email adresses (total: {len(to_emails)}):\n') 24 | for i in to_emails: 25 | print(i.email) 26 | 27 | do_send_ = input('Really send emails? (y/N)? ') 28 | 29 | if do_send_.lower() != 'y': 30 | sys.exit() 31 | 32 | 33 | # Replace these with your email addresses and names 34 | to_emails_test = [ 35 | To(email='norbert@preining.info', 36 | name='Norbert Preining', 37 | substitutions={ 38 | '-name-': 'Norbert', 39 | '-url-': 'URL1', 40 | }), 41 | To(email='preining@logic.at', 42 | name='Norbert Preining', 43 | substitutions={ 44 | '-name-': 'NorbertLogic', 45 | '-url-': 'URL2', 46 | }), 47 | ] 48 | 49 | message = Mail( 50 | from_email=('info@eventyay.com', 'Mario Behling - Eventyay'), 51 | to_emails=to_emails, 52 | subject='Your Wikimania Free Ticket', 53 | plain_text_content="""Hello, 54 | 55 | You are receiving this email from the Eventyay platform, which is used to run Wikimania 2023. 56 | 57 | As part of your scholarship, the Wikimania team has reserved a free ticket for you. Please register for free by following this link: 58 | 59 | -url- 60 | 61 | The link is valid for one ticket. 62 | 63 | Wikimania 2023 will run from 16–19 August in Singapore at the Suntec Singapore Convention and Exhibition Centre and online. Workshops, hackathon and pre-conference activities happen on 15 August while post-conference and city tours happen on 20 August, so please plan to stay for those if you are interested! 64 | 65 | Should you encounter any issues, please respond to this email. 66 | 67 | Thank you and best regards, 68 | 69 | Mario and the Eventyay Team 70 | """, 71 | is_multiple=True) 72 | 73 | try: 74 | sendgrid_client = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY')) 75 | response = sendgrid_client.send(message) 76 | print(response.status_code) 77 | print(response.body) 78 | print(response.headers) 79 | except Exception as e: 80 | print(e.message) 81 | 82 | --------------------------------------------------------------------------------