├── .flaskenv ├── .gcloudignore ├── .gitignore ├── LICENSE ├── Makefile ├── Pipfile ├── Pipfile.lock ├── README.md ├── api ├── __init__.py ├── app.py ├── config.py ├── db │ ├── __init__.py │ ├── init-dev.sql │ ├── migrations │ │ ├── README │ │ ├── alembic.ini │ │ ├── env.py │ │ ├── script.py.mako │ │ └── versions │ │ │ ├── 18b25b3efd9b_.py │ │ │ ├── 21ad3631a30b_.py │ │ │ ├── 2501865a09b1_.py │ │ │ ├── 27ff0681ea2d_.py │ │ │ ├── 2b5e44902390_.py │ │ │ ├── 410649dcf541_.py │ │ │ ├── 5838d398351f_.py │ │ │ ├── 796e284aa6dc_.py │ │ │ ├── 89ff59bac28a_.py │ │ │ ├── 93866a5dad44_.py │ │ │ ├── 965f54a262aa_.py │ │ │ ├── bb6244d75df4_.py │ │ │ ├── c800ac2fe1b9_.py │ │ │ ├── c8090718abc3_.py │ │ │ ├── e134be65efb1_.py │ │ │ └── e8a76b87ea48_.py │ └── models │ │ ├── __init__.py │ │ ├── action.py │ │ ├── action_type.py │ │ ├── base.py │ │ ├── child_datum.py │ │ ├── datum.py │ │ ├── ec_token.py │ │ ├── exceptions.py │ │ ├── test.py │ │ ├── test_datum.py │ │ └── user.py ├── handlers │ ├── __init__.py │ ├── auth.py │ ├── data.py │ ├── general.py │ ├── search.py │ ├── tests.py │ ├── users.py │ └── util.py ├── mail.py ├── main.py ├── openapi │ ├── api.yaml │ ├── base.yaml │ ├── data.yaml │ ├── search.yaml │ ├── tests.yaml │ └── users.yaml └── tests │ ├── __init__.py │ ├── conftest.py │ ├── fixtures │ ├── spidey.png │ └── spidey_post.png │ ├── pytest.ini │ ├── test_auth.py │ ├── test_basic.py │ ├── test_data.py │ ├── test_search.py │ ├── test_tests.py │ ├── test_users.py │ └── utils.py ├── app.yaml ├── requirements-dev.txt └── requirements.txt /.flaskenv: -------------------------------------------------------------------------------- 1 | FLASK_ENV=development 2 | FLASK_APP=api.app 3 | -------------------------------------------------------------------------------- /.gcloudignore: -------------------------------------------------------------------------------- 1 | # This file specifies files that are *not* uploaded to Google Cloud Platform 2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of 3 | # "#!include" directives (which insert the entries of the given .gitignore-style 4 | # file at that point). 5 | # 6 | # For more information, run: 7 | # $ gcloud topic gcloudignore 8 | # 9 | .gcloudignore 10 | # If you would like to upload your .git directory, .gitignore file or files 11 | # from your .gitignore file, remove the corresponding line 12 | # below: 13 | .git 14 | .gitignore 15 | 16 | # Python pycache: 17 | __pycache__/ 18 | # Ignored by the build system 19 | /setup.cfg 20 | .venv 21 | Pipfile* 22 | Makefile 23 | public* 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv 2 | .swp 3 | .swo 4 | env 5 | app_secrets.yaml 6 | public* 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 test.ai 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | dev: 2 | pipenv install --dev 3 | 4 | reqs: 5 | pipenv run pipenv_to_requirements -f 6 | 7 | serve: 8 | pipenv run flask run 9 | 10 | deploy: reqs 11 | gcloud app deploy app.yaml --project ${OTD_PROJECT_ID} 12 | 13 | migrate-init: 14 | pipenv run flask db init 15 | 16 | migrate-new: 17 | @echo "Enter migration message: " 18 | @read NAME; \ 19 | pipenv run flask db migrate --message "${NAME}" 20 | 21 | migrate-rev: 22 | @echo "Enter migration message: " 23 | @read NAME; \ 24 | pipenv run flask db revision --message "${NAME}" 25 | 26 | migrate: 27 | pipenv run flask db upgrade 28 | 29 | migrate-history: 30 | pipenv run flask db history 31 | 32 | 33 | test: 34 | FLASK_ENV=testing pipenv run pytest ./api/tests/test_*.py 35 | 36 | lint: 37 | pipenv run flake8 --ignore=E501 --exclude=".svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg,api/db/migrations/*.py,api/db/migrations/versions/*.py,api/db/models/__init__.py" api 38 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | pipenv-to-requirements = "*" 8 | pytest = "*" 9 | flake8 = "*" 10 | pyyaml = "*" 11 | 12 | [packages] 13 | flask = "*" 14 | flask-migrate = "*" 15 | flask-sqlalchemy = "*" 16 | python-dotenv = "*" 17 | mysqlclient = "*" 18 | pymysql = "*" 19 | gunicorn = "*" 20 | connexion = {extras = ["swagger-ui"],version = "*"} 21 | pyjwt = "*" 22 | libgravatar = "*" 23 | pillow = "*" 24 | google-cloud-storage = "*" 25 | flask-cors = "*" 26 | prance = "*" 27 | 28 | [requires] 29 | python_version = "3.7" 30 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "bc382ab351b34410c55c7517cf56a85d30e178cde5f0c18d417f4da0d3039566" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "alembic": { 20 | "hashes": [ 21 | "sha256:9f907d7e8b286a1cfb22db9084f9ce4fde7ad7956bb496dc7c952e10ac90e36a" 22 | ], 23 | "version": "==1.2.1" 24 | }, 25 | "attrs": { 26 | "hashes": [ 27 | "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", 28 | "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" 29 | ], 30 | "version": "==19.3.0" 31 | }, 32 | "cachetools": { 33 | "hashes": [ 34 | "sha256:428266a1c0d36dc5aca63a2d7c5942e88c2c898d72139fca0e97fdd2380517ae", 35 | "sha256:8ea2d3ce97850f31e4a08b0e2b5e6c34997d7216a9d2c98e0f3978630d4da69a" 36 | ], 37 | "version": "==3.1.1" 38 | }, 39 | "certifi": { 40 | "hashes": [ 41 | "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", 42 | "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" 43 | ], 44 | "version": "==2019.9.11" 45 | }, 46 | "chardet": { 47 | "hashes": [ 48 | "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", 49 | "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" 50 | ], 51 | "version": "==3.0.4" 52 | }, 53 | "click": { 54 | "hashes": [ 55 | "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", 56 | "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" 57 | ], 58 | "version": "==7.0" 59 | }, 60 | "clickclick": { 61 | "hashes": [ 62 | "sha256:4a890aaa9c3990cfabd446294eb34e3dc89701101ac7b41c1bff85fc210f6d23", 63 | "sha256:ab8f229fb9906a86634bdfc6fabfc2b665f44804170720db4f6e1d98f8a58f3d" 64 | ], 65 | "version": "==1.2.2" 66 | }, 67 | "connexion": { 68 | "extras": [ 69 | "swagger-ui" 70 | ], 71 | "hashes": [ 72 | "sha256:6e0569b646f2e6229923dc4e4c6e0325e223978bd19105779fd81e16bcb22fdf", 73 | "sha256:7b4268e9ea837241e530738b35040345b78c8748d05d2c22805350aca0cd5b1c" 74 | ], 75 | "index": "pypi", 76 | "version": "==2.4.0" 77 | }, 78 | "flask": { 79 | "hashes": [ 80 | "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", 81 | "sha256:45eb5a6fd193d6cf7e0cf5d8a5b31f83d5faae0293695626f539a823e93b13f6" 82 | ], 83 | "index": "pypi", 84 | "version": "==1.1.1" 85 | }, 86 | "flask-cors": { 87 | "hashes": [ 88 | "sha256:72170423eb4612f0847318afff8c247b38bd516b7737adfc10d1c2cdbb382d16", 89 | "sha256:f4d97201660e6bbcff2d89d082b5b6d31abee04b1b3003ee073a6fd25ad1d69a" 90 | ], 91 | "index": "pypi", 92 | "version": "==3.0.8" 93 | }, 94 | "flask-migrate": { 95 | "hashes": [ 96 | "sha256:6fb038be63d4c60727d5dfa5f581a6189af5b4e2925bc378697b4f0a40cfb4e1", 97 | "sha256:a96ff1875a49a40bd3e8ac04fce73fdb0870b9211e6168608cbafa4eb839d502" 98 | ], 99 | "index": "pypi", 100 | "version": "==2.5.2" 101 | }, 102 | "flask-sqlalchemy": { 103 | "hashes": [ 104 | "sha256:0078d8663330dc05a74bc72b3b6ddc441b9a744e2f56fe60af1a5bfc81334327", 105 | "sha256:6974785d913666587949f7c2946f7001e4fa2cb2d19f4e69ead02e4b8f50b33d" 106 | ], 107 | "index": "pypi", 108 | "version": "==2.4.1" 109 | }, 110 | "google-api-core": { 111 | "hashes": [ 112 | "sha256:b95895a9398026bc0500cf9b4a3f82c3f72c3f9150b26ff53af40c74e91c264a", 113 | "sha256:df8adc4b97f5ab4328a0e745bee77877cf4a7d4601cb1cd5959d2bbf8fba57aa" 114 | ], 115 | "version": "==1.14.3" 116 | }, 117 | "google-auth": { 118 | "hashes": [ 119 | "sha256:0f7c6a64927d34c1a474da92cfc59e552a5d3b940d3266606c6a28b72888b9e4", 120 | "sha256:20705f6803fd2c4d1cc2dcb0df09d4dfcb9a7d51fd59e94a3a28231fd93119ed" 121 | ], 122 | "version": "==1.6.3" 123 | }, 124 | "google-cloud-core": { 125 | "hashes": [ 126 | "sha256:0ee17abc74ff02176bee221d4896a00a3c202f3fb07125a7d814ccabd20d7eb5", 127 | "sha256:10750207c1a9ad6f6e082d91dbff3920443bdaf1c344a782730489a9efa802f1" 128 | ], 129 | "version": "==1.0.3" 130 | }, 131 | "google-cloud-storage": { 132 | "hashes": [ 133 | "sha256:13a6a820311662eb91a99810568c2bca5ddc7e44e2163fed4cb3f4d47da132cf", 134 | "sha256:2e7e2435978bda1c209b70a9a00b8cbc53c3b00d6f09eb2c991ebba857babf24" 135 | ], 136 | "index": "pypi", 137 | "version": "==1.20.0" 138 | }, 139 | "google-resumable-media": { 140 | "hashes": [ 141 | "sha256:5fd2e641f477e50be925a55bcfdf0b0cb97c2b92aacd7b15c1d339f70d55c1c7", 142 | "sha256:cdeb8fbb3551a665db921023603af2f0d6ac59ad8b48259cb510b8799505775f" 143 | ], 144 | "version": "==0.4.1" 145 | }, 146 | "googleapis-common-protos": { 147 | "hashes": [ 148 | "sha256:e61b8ed5e36b976b487c6e7b15f31bb10c7a0ca7bd5c0e837f4afab64b53a0c6" 149 | ], 150 | "version": "==1.6.0" 151 | }, 152 | "gunicorn": { 153 | "hashes": [ 154 | "sha256:aa8e0b40b4157b36a5df5e599f45c9c76d6af43845ba3b3b0efe2c70473c2471", 155 | "sha256:fa2662097c66f920f53f70621c6c58ca4a3c4d3434205e608e121b5b3b71f4f3" 156 | ], 157 | "index": "pypi", 158 | "version": "==19.9.0" 159 | }, 160 | "idna": { 161 | "hashes": [ 162 | "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", 163 | "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" 164 | ], 165 | "version": "==2.8" 166 | }, 167 | "importlib-metadata": { 168 | "hashes": [ 169 | "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", 170 | "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" 171 | ], 172 | "version": "==0.23" 173 | }, 174 | "inflection": { 175 | "hashes": [ 176 | "sha256:18ea7fb7a7d152853386523def08736aa8c32636b047ade55f7578c4edeb16ca" 177 | ], 178 | "version": "==0.3.1" 179 | }, 180 | "itsdangerous": { 181 | "hashes": [ 182 | "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", 183 | "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" 184 | ], 185 | "version": "==1.1.0" 186 | }, 187 | "jinja2": { 188 | "hashes": [ 189 | "sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f", 190 | "sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de" 191 | ], 192 | "version": "==2.10.3" 193 | }, 194 | "jsonschema": { 195 | "hashes": [ 196 | "sha256:2fa0684276b6333ff3c0b1b27081f4b2305f0a36cf702a23db50edb141893c3f", 197 | "sha256:94c0a13b4a0616458b42529091624e66700a17f847453e52279e35509a5b7631" 198 | ], 199 | "version": "==3.1.1" 200 | }, 201 | "libgravatar": { 202 | "hashes": [ 203 | "sha256:3da09feed1abbb903463a4c6c3de80fdbf83490fbeee2ff55d39a3a80878318f", 204 | "sha256:672116492aa483c957069ee2b3d130780fef2f5d49b2a3f8e16561bb2aa1c0db" 205 | ], 206 | "index": "pypi", 207 | "version": "==0.2.3" 208 | }, 209 | "mako": { 210 | "hashes": [ 211 | "sha256:a36919599a9b7dc5d86a7a8988f23a9a3a3d083070023bab23d64f7f1d1e0a4b" 212 | ], 213 | "version": "==1.1.0" 214 | }, 215 | "markupsafe": { 216 | "hashes": [ 217 | "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", 218 | "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", 219 | "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", 220 | "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", 221 | "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", 222 | "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", 223 | "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", 224 | "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", 225 | "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", 226 | "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", 227 | "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", 228 | "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", 229 | "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", 230 | "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", 231 | "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", 232 | "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", 233 | "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", 234 | "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", 235 | "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", 236 | "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", 237 | "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", 238 | "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", 239 | "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", 240 | "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", 241 | "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", 242 | "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", 243 | "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", 244 | "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" 245 | ], 246 | "version": "==1.1.1" 247 | }, 248 | "more-itertools": { 249 | "hashes": [ 250 | "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", 251 | "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" 252 | ], 253 | "version": "==7.2.0" 254 | }, 255 | "mysqlclient": { 256 | "hashes": [ 257 | "sha256:79a498ddda955e488f80c82a6392bf6e07c323d48db236033f33825665d8ba5c", 258 | "sha256:8c3b61d89f7daaeab6aad6bf4c4bc3ef30bec1a8169f94dc59aea87ba2fabf80", 259 | "sha256:9c737cc55a5dc8dd3583a942d5a9b21be58d16f00f5fefca4e575e7d9682e98c" 260 | ], 261 | "index": "pypi", 262 | "version": "==1.4.4" 263 | }, 264 | "openapi-spec-validator": { 265 | "hashes": [ 266 | "sha256:0caacd9829e9e3051e830165367bf58d436d9487b29a09220fa7edb9f47ff81b", 267 | "sha256:d4da8aef72bf5be40cf0df444abd20009a41baf9048a8e03750c07a934f1bdd8", 268 | "sha256:e489c7a273284bc78277ac22791482e8058d323b4a265015e9fcddf6a8045bcd" 269 | ], 270 | "version": "==0.2.8" 271 | }, 272 | "pillow": { 273 | "hashes": [ 274 | "sha256:047d9473cf68af50ac85f8ee5d5f21a60f849bc17d348da7fc85711287a75031", 275 | "sha256:0f66dc6c8a3cc319561a633b6aa82c44107f12594643efa37210d8c924fc1c71", 276 | "sha256:12c9169c4e8fe0a7329e8658c7e488001f6b4c8e88740e76292c2b857af2e94c", 277 | "sha256:248cffc168896982f125f5c13e9317c059f74fffdb4152893339f3be62a01340", 278 | "sha256:27faf0552bf8c260a5cee21a76e031acaea68babb64daf7e8f2e2540745082aa", 279 | "sha256:285edafad9bc60d96978ed24d77cdc0b91dace88e5da8c548ba5937c425bca8b", 280 | "sha256:384b12c9aa8ef95558abdcb50aada56d74bc7cc131dd62d28c2d0e4d3aadd573", 281 | "sha256:38950b3a707f6cef09cd3cbb142474357ad1a985ceb44d921bdf7b4647b3e13e", 282 | "sha256:4aad1b88933fd6dc2846552b89ad0c74ddbba2f0884e2c162aa368374bf5abab", 283 | "sha256:4ac6148008c169603070c092e81f88738f1a0c511e07bd2bb0f9ef542d375da9", 284 | "sha256:4deb1d2a45861ae6f0b12ea0a786a03d19d29edcc7e05775b85ec2877cb54c5e", 285 | "sha256:59aa2c124df72cc75ed72c8d6005c442d4685691a30c55321e00ed915ad1a291", 286 | "sha256:5a47d2123a9ec86660fe0e8d0ebf0aa6bc6a17edc63f338b73ea20ba11713f12", 287 | "sha256:5cc901c2ab9409b4b7ac7b5bcc3e86ac14548627062463da0af3b6b7c555a871", 288 | "sha256:6c1db03e8dff7b9f955a0fb9907eb9ca5da75b5ce056c0c93d33100a35050281", 289 | "sha256:7ce80c0a65a6ea90ef9c1f63c8593fcd2929448613fc8da0adf3e6bfad669d08", 290 | "sha256:809c19241c14433c5d6135e1b6c72da4e3b56d5c865ad5736ab99af8896b8f41", 291 | "sha256:83792cb4e0b5af480588601467c0764242b9a483caea71ef12d22a0d0d6bdce2", 292 | "sha256:846fa202bd7ee0f6215c897a1d33238ef071b50766339186687bd9b7a6d26ac5", 293 | "sha256:9f5529fc02009f96ba95bea48870173426879dc19eec49ca8e08cd63ecd82ddb", 294 | "sha256:a423c2ea001c6265ed28700df056f75e26215fd28c001e93ef4380b0f05f9547", 295 | "sha256:ac4428094b42907aba5879c7c000d01c8278d451a3b7cccd2103e21f6397ea75", 296 | "sha256:b1ae48d87f10d1384e5beecd169c77502fcc04a2c00a4c02b85f0a94b419e5f9", 297 | "sha256:bf4e972a88f8841d8fdc6db1a75e0f8d763e66e3754b03006cbc3854d89f1cb1", 298 | "sha256:c6414f6aad598364aaf81068cabb077894eb88fed99c6a65e6e8217bab62ae7a", 299 | "sha256:c710fcb7ee32f67baf25aa9ffede4795fd5d93b163ce95fdc724383e38c9df96", 300 | "sha256:c7be4b8a09852291c3c48d3c25d1b876d2494a0a674980089ac9d5e0d78bd132", 301 | "sha256:c9e5ffb910b14f090ac9c38599063e354887a5f6d7e6d26795e916b4514f2c1a", 302 | "sha256:e0697b826da6c2472bb6488db4c0a7fa8af0d52fa08833ceb3681358914b14e5", 303 | "sha256:e9a3edd5f714229d41057d56ac0f39ad9bdba6767e8c888c951869f0bdd129b0" 304 | ], 305 | "index": "pypi", 306 | "version": "==6.2.1" 307 | }, 308 | "prance": { 309 | "hashes": [ 310 | "sha256:807d17cbf5e33e20615f2681ea3b1a535b4a27112df8d5253378a42f70cc4bcd", 311 | "sha256:d9925c28e794300fdcafe09f7f97d16b604e5f03db1b10fe5b9d186f8434ab10" 312 | ], 313 | "index": "pypi", 314 | "version": "==0.16.1" 315 | }, 316 | "protobuf": { 317 | "hashes": [ 318 | "sha256:125713564d8cfed7610e52444c9769b8dcb0b55e25cc7841f2290ee7bc86636f", 319 | "sha256:1accdb7a47e51503be64d9a57543964ba674edac103215576399d2d0e34eac77", 320 | "sha256:27003d12d4f68e3cbea9eb67427cab3bfddd47ff90670cb367fcd7a3a89b9657", 321 | "sha256:3264f3c431a631b0b31e9db2ae8c927b79fc1a7b1b06b31e8e5bcf2af91fe896", 322 | "sha256:3c5ab0f5c71ca5af27143e60613729e3488bb45f6d3f143dc918a20af8bab0bf", 323 | "sha256:45dcf8758873e3f69feab075e5f3177270739f146255225474ee0b90429adef6", 324 | "sha256:56a77d61a91186cc5676d8e11b36a5feb513873e4ae88d2ee5cf530d52bbcd3b", 325 | "sha256:5984e4947bbcef5bd849d6244aec507d31786f2dd3344139adc1489fb403b300", 326 | "sha256:6b0441da73796dd00821763bb4119674eaf252776beb50ae3883bed179a60b2a", 327 | "sha256:6f6677c5ade94d4fe75a912926d6796d5c71a2a90c2aeefe0d6f211d75c74789", 328 | "sha256:84a825a9418d7196e2acc48f8746cf1ee75877ed2f30433ab92a133f3eaf8fbe", 329 | "sha256:b842c34fe043ccf78b4a6cf1019d7b80113707d68c88842d061fa2b8fb6ddedc", 330 | "sha256:ca33d2f09dae149a1dcf942d2d825ebb06343b77b437198c9e2ef115cf5d5bc1", 331 | "sha256:db83b5c12c0cd30150bb568e6feb2435c49ce4e68fe2d7b903113f0e221e58fe", 332 | "sha256:f50f3b1c5c1c1334ca7ce9cad5992f098f460ffd6388a3cabad10b66c2006b09", 333 | "sha256:f99f127909731cafb841c52f9216e447d3e4afb99b17bebfad327a75aee206de" 334 | ], 335 | "version": "==3.10.0" 336 | }, 337 | "pyasn1": { 338 | "hashes": [ 339 | "sha256:62cdade8b5530f0b185e09855dd422bc05c0bbff6b72ff61381c09dac7befd8c", 340 | "sha256:a9495356ca1d66ed197a0f72b41eb1823cf7ea8b5bd07191673e8147aecf8604" 341 | ], 342 | "version": "==0.4.7" 343 | }, 344 | "pyasn1-modules": { 345 | "hashes": [ 346 | "sha256:0c35a52e00b672f832e5846826f1fb7507907f7d52fba6faa9e3c4cbe874fe4b", 347 | "sha256:b6ada4f840fe51abf5a6bd545b45bf537bea62221fa0dde2e8a553ed9f06a4e3" 348 | ], 349 | "version": "==0.2.7" 350 | }, 351 | "pyjwt": { 352 | "hashes": [ 353 | "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e", 354 | "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96" 355 | ], 356 | "index": "pypi", 357 | "version": "==1.7.1" 358 | }, 359 | "pymysql": { 360 | "hashes": [ 361 | "sha256:3943fbbbc1e902f41daf7f9165519f140c4451c179380677e6a848587042561a", 362 | "sha256:d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7" 363 | ], 364 | "index": "pypi", 365 | "version": "==0.9.3" 366 | }, 367 | "pyrsistent": { 368 | "hashes": [ 369 | "sha256:eb6545dbeb1aa69ab1fb4809bfbf5a8705e44d92ef8fc7c2361682a47c46c778" 370 | ], 371 | "version": "==0.15.5" 372 | }, 373 | "python-dateutil": { 374 | "hashes": [ 375 | "sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", 376 | "sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e" 377 | ], 378 | "version": "==2.8.0" 379 | }, 380 | "python-dotenv": { 381 | "hashes": [ 382 | "sha256:debd928b49dbc2bf68040566f55cdb3252458036464806f4094487244e2a4093", 383 | "sha256:f157d71d5fec9d4bd5f51c82746b6344dffa680ee85217c123f4a0c8117c4544" 384 | ], 385 | "index": "pypi", 386 | "version": "==0.10.3" 387 | }, 388 | "python-editor": { 389 | "hashes": [ 390 | "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", 391 | "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", 392 | "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8" 393 | ], 394 | "version": "==1.0.4" 395 | }, 396 | "pytz": { 397 | "hashes": [ 398 | "sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d", 399 | "sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be" 400 | ], 401 | "version": "==2019.3" 402 | }, 403 | "pyyaml": { 404 | "hashes": [ 405 | "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", 406 | "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", 407 | "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", 408 | "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", 409 | "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", 410 | "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", 411 | "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", 412 | "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", 413 | "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", 414 | "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", 415 | "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", 416 | "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", 417 | "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" 418 | ], 419 | "version": "==5.1.2" 420 | }, 421 | "requests": { 422 | "hashes": [ 423 | "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4", 424 | "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31" 425 | ], 426 | "version": "==2.22.0" 427 | }, 428 | "rsa": { 429 | "hashes": [ 430 | "sha256:14ba45700ff1ec9eeb206a2ce76b32814958a98e372006c8fb76ba820211be66", 431 | "sha256:1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487" 432 | ], 433 | "version": "==4.0" 434 | }, 435 | "semver": { 436 | "hashes": [ 437 | "sha256:41c9aa26c67dc16c54be13074c352ab666bce1fa219c7110e8f03374cd4206b0", 438 | "sha256:5b09010a66d9a3837211bb7ae5a20d10ba88f8cb49e92cb139a69ef90d5060d8" 439 | ], 440 | "version": "==2.8.1" 441 | }, 442 | "six": { 443 | "hashes": [ 444 | "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", 445 | "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" 446 | ], 447 | "version": "==1.12.0" 448 | }, 449 | "sqlalchemy": { 450 | "hashes": [ 451 | "sha256:0f0768b5db594517e1f5e1572c73d14cf295140756431270d89496dc13d5e46c" 452 | ], 453 | "version": "==1.3.10" 454 | }, 455 | "swagger-ui-bundle": { 456 | "hashes": [ 457 | "sha256:01ae8fdb1fa4e034933e0874afdda0d433dcb94476fccb231b66fd5f49dac96c", 458 | "sha256:802f160dd6fe1d6b8fa92c6a40f593ef52f87ad0f507b1170ad2067f03de4c01", 459 | "sha256:e88bd0d8334d685440a85210ff1e1083a0caabd4c36fa061843067ff4c2ac680" 460 | ], 461 | "version": "==0.0.5" 462 | }, 463 | "urllib3": { 464 | "hashes": [ 465 | "sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398", 466 | "sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86" 467 | ], 468 | "version": "==1.25.6" 469 | }, 470 | "werkzeug": { 471 | "hashes": [ 472 | "sha256:7280924747b5733b246fe23972186c6b348f9ae29724135a6dfc1e53cea433e7", 473 | "sha256:e5f4a1f98b52b18a93da705a7458e55afb26f32bff83ff5d19189f92462d65c4" 474 | ], 475 | "version": "==0.16.0" 476 | }, 477 | "zipp": { 478 | "hashes": [ 479 | "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", 480 | "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" 481 | ], 482 | "version": "==0.6.0" 483 | } 484 | }, 485 | "develop": { 486 | "atomicwrites": { 487 | "hashes": [ 488 | "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", 489 | "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" 490 | ], 491 | "version": "==1.3.0" 492 | }, 493 | "attrs": { 494 | "hashes": [ 495 | "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", 496 | "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" 497 | ], 498 | "version": "==19.3.0" 499 | }, 500 | "certifi": { 501 | "hashes": [ 502 | "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", 503 | "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" 504 | ], 505 | "version": "==2019.9.11" 506 | }, 507 | "entrypoints": { 508 | "hashes": [ 509 | "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19", 510 | "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451" 511 | ], 512 | "version": "==0.3" 513 | }, 514 | "flake8": { 515 | "hashes": [ 516 | "sha256:45681a117ecc81e870cbf1262835ae4af5e7a8b08e40b944a8a6e6b895914cfb", 517 | "sha256:49356e766643ad15072a789a20915d3c91dc89fd313ccd71802303fd67e4deca" 518 | ], 519 | "index": "pypi", 520 | "version": "==3.7.9" 521 | }, 522 | "importlib-metadata": { 523 | "hashes": [ 524 | "sha256:aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26", 525 | "sha256:d5f18a79777f3aa179c145737780282e27b508fc8fd688cb17c7a813e8bd39af" 526 | ], 527 | "version": "==0.23" 528 | }, 529 | "mccabe": { 530 | "hashes": [ 531 | "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", 532 | "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" 533 | ], 534 | "version": "==0.6.1" 535 | }, 536 | "more-itertools": { 537 | "hashes": [ 538 | "sha256:409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832", 539 | "sha256:92b8c4b06dac4f0611c0729b2f2ede52b2e1bac1ab48f089c7ddc12e26bb60c4" 540 | ], 541 | "version": "==7.2.0" 542 | }, 543 | "packaging": { 544 | "hashes": [ 545 | "sha256:28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47", 546 | "sha256:d9551545c6d761f3def1677baf08ab2a3ca17c56879e70fecba2fc4dde4ed108" 547 | ], 548 | "version": "==19.2" 549 | }, 550 | "pbr": { 551 | "hashes": [ 552 | "sha256:2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8", 553 | "sha256:b32c8ccaac7b1a20c0ce00ce317642e6cf231cf038f9875e0280e28af5bf7ac9" 554 | ], 555 | "version": "==5.4.3" 556 | }, 557 | "pipenv": { 558 | "hashes": [ 559 | "sha256:56ad5f5cb48f1e58878e14525a6e3129d4306049cb76d2f6a3e95df0d5fc6330", 560 | "sha256:7df8e33a2387de6f537836f48ac6fcd94eda6ed9ba3d5e3fd52e35b5bc7ff49e", 561 | "sha256:a673e606e8452185e9817a987572b55360f4d28b50831ef3b42ac3cab3fee846" 562 | ], 563 | "version": "==2018.11.26" 564 | }, 565 | "pipenv-to-requirements": { 566 | "hashes": [ 567 | "sha256:115390158232f53983f1d989f922d890adbdaaaa95c8b2357a77b9f5fe647862", 568 | "sha256:5b7349e76d2c511e8b4a723495311b310e4e33bb716437b30cc18ecb0b0b5e29" 569 | ], 570 | "index": "pypi", 571 | "version": "==0.8.2" 572 | }, 573 | "pluggy": { 574 | "hashes": [ 575 | "sha256:0db4b7601aae1d35b4a033282da476845aa19185c1e6964b25cf324b5e4ec3e6", 576 | "sha256:fa5fa1622fa6dd5c030e9cad086fa19ef6a0cf6d7a2d12318e10cb49d6d68f34" 577 | ], 578 | "version": "==0.13.0" 579 | }, 580 | "py": { 581 | "hashes": [ 582 | "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", 583 | "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" 584 | ], 585 | "version": "==1.8.0" 586 | }, 587 | "pycodestyle": { 588 | "hashes": [ 589 | "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", 590 | "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" 591 | ], 592 | "version": "==2.5.0" 593 | }, 594 | "pyflakes": { 595 | "hashes": [ 596 | "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0", 597 | "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2" 598 | ], 599 | "version": "==2.1.1" 600 | }, 601 | "pyparsing": { 602 | "hashes": [ 603 | "sha256:6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80", 604 | "sha256:d9338df12903bbf5d65a0e4e87c2161968b10d2e489652bb47001d82a9b028b4" 605 | ], 606 | "version": "==2.4.2" 607 | }, 608 | "pytest": { 609 | "hashes": [ 610 | "sha256:27abc3fef618a01bebb1f0d6d303d2816a99aa87a5968ebc32fe971be91eb1e6", 611 | "sha256:58cee9e09242937e136dbb3dab466116ba20d6b7828c7620f23947f37eb4dae4" 612 | ], 613 | "index": "pypi", 614 | "version": "==5.2.2" 615 | }, 616 | "pyyaml": { 617 | "hashes": [ 618 | "sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9", 619 | "sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4", 620 | "sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8", 621 | "sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696", 622 | "sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34", 623 | "sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9", 624 | "sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73", 625 | "sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299", 626 | "sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b", 627 | "sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae", 628 | "sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681", 629 | "sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41", 630 | "sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8" 631 | ], 632 | "version": "==5.1.2" 633 | }, 634 | "six": { 635 | "hashes": [ 636 | "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", 637 | "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" 638 | ], 639 | "version": "==1.12.0" 640 | }, 641 | "virtualenv": { 642 | "hashes": [ 643 | "sha256:11cb4608930d5fd3afb545ecf8db83fa50e1f96fc4fca80c94b07d2c83146589", 644 | "sha256:d257bb3773e48cac60e475a19b608996c73f4d333b3ba2e4e57d5ac6134e0136" 645 | ], 646 | "version": "==16.7.7" 647 | }, 648 | "virtualenv-clone": { 649 | "hashes": [ 650 | "sha256:532f789a5c88adf339506e3ca03326f20ee82fd08ee5586b44dc859b5b4468c5", 651 | "sha256:c88ae171a11b087ea2513f260cdac9232461d8e9369bcd1dc143fc399d220557" 652 | ], 653 | "version": "==0.5.3" 654 | }, 655 | "wcwidth": { 656 | "hashes": [ 657 | "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e", 658 | "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c" 659 | ], 660 | "version": "==0.1.7" 661 | }, 662 | "zipp": { 663 | "hashes": [ 664 | "sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e", 665 | "sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335" 666 | ], 667 | "version": "==0.6.0" 668 | } 669 | } 670 | } 671 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Open Test Data API 2 | 3 | This is the code that powers the API for [opentestdata.org](https://opentestdata.org), the free and open database of automated test fixture data. The canonical publicly hosted instance of this API is at [api.opentestdata.org](https://api.opentestdata.org) (it is an API server, though, so loading that URL in a web browser will not show anything interesting. But see the API docs section below... 4 | 5 | ## `!!!` Under Construction `!!!` 6 | 7 | *This project is very much in early development. Before you submit a PR, talk to us. We might be working on it already.* 8 | 9 | If you're interested in contributing, check out the list of open issues. 10 | 11 | ## Usage 12 | 13 | Follow the development instructions below to set up an instance of this API server. 14 | 15 | ## API Documentation 16 | 17 | API docs are returned by this api server at the `/ui` url. You can find them hosted publicly [here](http://api.opentestdata.org/ui/). 18 | 19 | ## Development 20 | 21 | At a high level, the basic tech stack looks like this: 22 | 23 | * Python (programming language) 24 | * Flask (HTTP server) 25 | * SQLAlchemy (ORM) 26 | * OpenAPI (API specification) 27 | 28 | In production, the site runs on Google Cloud services. 29 | 30 | ### Local development setup 31 | 32 | 0. Clone the repo, and navigate into it 33 | 0. Ensure you have Python 3.7+, Pip, and Pipenv 34 | 0. Ensure mysql is installed and running, then run the following script to set up development and test databases 35 | ``` 36 | mysql -u -p < api/db/init-dev.sql 37 | ``` 38 | 0. Run `make dev` to get dependencies set up. (This includes mysql client dependencies, which build native bindings. If you installed mysql/openssl via homebrew, you may need to `export LDFLAGS=-L/usr/local/opt/openssl/lib` before running this to make sure it can find openssl. 39 | 40 | Anytime you check out new code, you should: 41 | 42 | 0. Re-run `make dev` 43 | 0. Re-run `make migrate` to make sure your db is upgraded with any new schemas 44 | 0. Re-run `make test` to ensure all the tests pass before you begin work 45 | 46 | Anytime you are about to submit a pull request, you should: 47 | 48 | 0. Check if a database schema migration is required (and if so, generate, verify, and commit it) 49 | 0. Re-run `make test` to ensure tests pass before you commit 50 | 0. Re-run `make lint` to ensure code style conforms to the standard. 51 | 52 | ### Dev tasks 53 | 54 | |Command|Action| 55 | |-------|------| 56 | |`make dev`|Install dev deps| 57 | |`make reqs`|Generate requirements files from pipenv, for use with e.g. appengine| 58 | |`make serve`|Run the development server on the default port of 5000| 59 | |`make migrate-init`|Initialize the migration system| 60 | |`make migrate-new`|Create a new migration file to review and commit| 61 | |`make migrate-rev`|Create an empty migration revision file to be filled out manually (useful when writing a custom migration script, for example to add options to an enum field type| 62 | |`make migrate`|Update the database based on all the current migration files| 63 | |`make migrate-history`|Show the migration history| 64 | |`make test`|Run the API tests using the test database (requires that the db initialization script have been run)| 65 | |`make lint`|Run the lint script to ensure you aren't going to commit any style errors| 66 | 67 | ## Production 68 | 69 | You probably won't have access to production services, but if you do: 70 | 71 | * In production, we run on Google AppEngine, in a Flask-like environment. Assume you have the [gcloud](https://cloud.google.com/sdk/docs/quickstart-macos) client ready. 72 | * `app.yaml` defines the AppEngine config 73 | * `app_secrets.yaml` is not checked into git but contains secret environment variables used in production. 74 | 75 | ### Production tasks 76 | 77 | |Command|Action| 78 | |-------|------| 79 | |`make deploy`|Deploy the app to AppEngine. Requires the `OTD_PROJECT_ID` env var to be set correctly, and the `app_secrets.yaml` file to be in the root of the repo. This file contains the environment variables used to connect to the production database.| 80 | |`FLASK_ENV=prod_migration make migrate`|Migrate the production database. Must have correct db data set in `app_secrets.yaml` and have the access to make a connection to the prod db| 81 | 82 | Note that care must be taken regarding DB migrations, including using a phased deploy strategy. If a breaking change is made to a DB schema, code must first be deployed that can handle either version of the schema. Then the migration can be run, and only after that can new code be deployed that is responsible only for handling the new version of the schema. 83 | -------------------------------------------------------------------------------- /api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdotai/opentestdata-api/d88474d59775c77264ba78166b5633937a45b85a/api/__init__.py -------------------------------------------------------------------------------- /api/app.py: -------------------------------------------------------------------------------- 1 | import connexion 2 | import prance 3 | 4 | from pathlib import Path 5 | from .config import get_config 6 | from .db import db, migrate 7 | from flask_cors import CORS 8 | 9 | 10 | def get_bundled_specs(main_file): 11 | parser = prance.ResolvingParser(str(main_file.absolute()), lazy=True, strict=True) 12 | parser.parse() 13 | return parser.specification 14 | 15 | 16 | def create_app(): 17 | connexionApp = connexion.FlaskApp(__name__) 18 | app = connexionApp.app 19 | connexionApp.add_api(get_bundled_specs(Path('api/openapi/api.yaml'))) 20 | CORS(app) 21 | app.config.from_object(get_config()) 22 | db.init_app(app) 23 | migrate.init_app(app, db, directory='api/db/migrations') 24 | return app 25 | -------------------------------------------------------------------------------- /api/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import yaml 3 | 4 | 5 | APP_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..') 6 | 7 | 8 | class Config(object): 9 | DEBUG = False 10 | TESTING = False 11 | SQLALCHEMY_TRACK_MODIFICATIONS = False 12 | JWT_SECRET = 'justfordevandtest' 13 | AVATAR_STORAGE = 'local' 14 | AVATAR_PATH = os.path.join(APP_ROOT, 'public', 'avatars') 15 | EMAIL_SENDING = 'local' 16 | EMAIL_FROM = 'no-reply@opentestdata.org' 17 | 18 | 19 | class ProdConfig(Config): 20 | DB_INSTANCE_CONN_NAME = os.environ.get('DB_INSTANCE_CONN_NAME') 21 | DB_USER = os.environ.get('DB_USER') 22 | DB_PASS = os.environ.get('DB_PASS') 23 | DB_NAME = os.environ.get('DB_NAME') 24 | SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://%s:%s@/%s?unix_socket=/cloudsql/%s' % (DB_USER, DB_PASS, DB_NAME, DB_INSTANCE_CONN_NAME) 25 | JWT_SECRET = os.environ.get('JWT_SECRET') 26 | AVATAR_STORAGE = 'cloud' 27 | AVATAR_BUCKET = 'otd-avatars' 28 | EMAIL_SENDING = 'cloud' 29 | 30 | 31 | class ProdMigrationConfig(Config): 32 | 33 | def __init__(self): 34 | # this config assumes a local sql connection to cloud sql server 35 | with open(os.path.join(APP_ROOT, 'app_secrets.yaml')) as secrets_file: 36 | secrets = yaml.safe_load(secrets_file)['env_variables'] 37 | ProdMigrationConfig.DB_USER = os.environ.get('DB_USER') 38 | ProdMigrationConfig.DB_PASS = os.environ.get('DB_PASS') 39 | ProdMigrationConfig.DB_NAME = os.environ.get('DB_NAME') 40 | ProdMigrationConfig.SQLALCHEMY_DATABASE_URI = 'mysql://%s:%s@%s/%s' % (secrets['DB_USER'], secrets['DB_PASS'], secrets['DB_SERVER'], secrets['DB_NAME']) 41 | 42 | 43 | class DevConfig(Config): 44 | DEBUG = True 45 | SQLALCHEMY_DATABASE_URI = 'mysql://otd_user:otd_pass@localhost/otd_dev' 46 | AVATAR_PATH = os.path.join(APP_ROOT, 'public_dev', 'avatars') 47 | 48 | 49 | class TestConfig(Config): 50 | TESTING = True 51 | SQLALCHEMY_DATABASE_URI = 'mysql://otd_user:otd_pass@localhost/otd_test' 52 | AVATAR_PATH = os.path.join(APP_ROOT, 'public_test', 'avatars') 53 | 54 | 55 | def get_config(): 56 | # if we're in prod, the GAE_ENV var will be set 57 | if os.environ.get('GAE_ENV') == 'standard': 58 | return ProdConfig() 59 | 60 | # otherwise we'll be running locally and should look at FLASK_ENV 61 | env = os.environ.get('FLASK_ENV') 62 | if env == 'development': 63 | return DevConfig() 64 | elif env == 'testing': 65 | return TestConfig() 66 | elif env == 'prod_migration': 67 | return ProdMigrationConfig() 68 | 69 | raise Exception('Could not find config; did you set FLASK_ENV?') 70 | -------------------------------------------------------------------------------- /api/db/__init__.py: -------------------------------------------------------------------------------- 1 | from flask_sqlalchemy import SQLAlchemy 2 | from flask_migrate import Migrate 3 | 4 | 5 | db = SQLAlchemy() 6 | migrate = Migrate() 7 | -------------------------------------------------------------------------------- /api/db/init-dev.sql: -------------------------------------------------------------------------------- 1 | DROP DATABASE IF EXISTS otd_dev; 2 | DROP DATABASE IF EXISTS otd_test; 3 | DROP USER IF EXISTS 'otd_user'@'localhost'; 4 | 5 | CREATE DATABASE otd_dev; 6 | CREATE DATABASE otd_test; 7 | 8 | CREATE USER 'otd_user'@'localhost' IDENTIFIED BY 'otd_pass'; 9 | GRANT ALL ON otd_dev.* to 'otd_user'@'localhost'; 10 | GRANT ALL ON otd_test.* to 'otd_user'@'localhost'; 11 | -------------------------------------------------------------------------------- /api/db/migrations/README: -------------------------------------------------------------------------------- 1 | Generic single-database configuration. -------------------------------------------------------------------------------- /api/db/migrations/alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # template used to generate migration files 5 | # file_template = %%(rev)s_%%(slug)s 6 | 7 | # set to 'true' to run the environment during 8 | # the 'revision' command, regardless of autogenerate 9 | # revision_environment = false 10 | 11 | 12 | # Logging configuration 13 | [loggers] 14 | keys = root,sqlalchemy,alembic 15 | 16 | [handlers] 17 | keys = console 18 | 19 | [formatters] 20 | keys = generic 21 | 22 | [logger_root] 23 | level = WARN 24 | handlers = console 25 | qualname = 26 | 27 | [logger_sqlalchemy] 28 | level = WARN 29 | handlers = 30 | qualname = sqlalchemy.engine 31 | 32 | [logger_alembic] 33 | level = INFO 34 | handlers = 35 | qualname = alembic 36 | 37 | [handler_console] 38 | class = StreamHandler 39 | args = (sys.stderr,) 40 | level = NOTSET 41 | formatter = generic 42 | 43 | [formatter_generic] 44 | format = %(levelname)-5.5s [%(name)s] %(message)s 45 | datefmt = %H:%M:%S 46 | -------------------------------------------------------------------------------- /api/db/migrations/env.py: -------------------------------------------------------------------------------- 1 | from __future__ import with_statement 2 | 3 | import logging 4 | from logging.config import fileConfig 5 | 6 | from sqlalchemy import engine_from_config 7 | from sqlalchemy import pool 8 | 9 | from alembic import context 10 | 11 | # this is the Alembic Config object, which provides 12 | # access to the values within the .ini file in use. 13 | config = context.config 14 | 15 | # Interpret the config file for Python logging. 16 | # This line sets up loggers basically. 17 | fileConfig(config.config_file_name) 18 | logger = logging.getLogger('alembic.env') 19 | 20 | # add your model's MetaData object here 21 | # for 'autogenerate' support 22 | # from myapp import mymodel 23 | # target_metadata = mymodel.Base.metadata 24 | from flask import current_app 25 | config.set_main_option( 26 | 'sqlalchemy.url', current_app.config.get( 27 | 'SQLALCHEMY_DATABASE_URI').replace('%', '%%')) 28 | target_metadata = current_app.extensions['migrate'].db.metadata 29 | 30 | # other values from the config, defined by the needs of env.py, 31 | # can be acquired: 32 | # my_important_option = config.get_main_option("my_important_option") 33 | # ... etc. 34 | 35 | 36 | def run_migrations_offline(): 37 | """Run migrations in 'offline' mode. 38 | 39 | This configures the context with just a URL 40 | and not an Engine, though an Engine is acceptable 41 | here as well. By skipping the Engine creation 42 | we don't even need a DBAPI to be available. 43 | 44 | Calls to context.execute() here emit the given string to the 45 | script output. 46 | 47 | """ 48 | url = config.get_main_option("sqlalchemy.url") 49 | context.configure( 50 | url=url, target_metadata=target_metadata, literal_binds=True 51 | ) 52 | 53 | with context.begin_transaction(): 54 | context.run_migrations() 55 | 56 | 57 | def run_migrations_online(): 58 | """Run migrations in 'online' mode. 59 | 60 | In this scenario we need to create an Engine 61 | and associate a connection with the context. 62 | 63 | """ 64 | 65 | # this callback is used to prevent an auto-migration from being generated 66 | # when there are no changes to the schema 67 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html 68 | def process_revision_directives(context, revision, directives): 69 | if getattr(config.cmd_opts, 'autogenerate', False): 70 | script = directives[0] 71 | if script.upgrade_ops.is_empty(): 72 | directives[:] = [] 73 | logger.info('No changes in schema detected.') 74 | 75 | connectable = engine_from_config( 76 | config.get_section(config.config_ini_section), 77 | prefix='sqlalchemy.', 78 | poolclass=pool.NullPool, 79 | ) 80 | 81 | with connectable.connect() as connection: 82 | context.configure( 83 | connection=connection, 84 | target_metadata=target_metadata, 85 | process_revision_directives=process_revision_directives, 86 | **current_app.extensions['migrate'].configure_args 87 | ) 88 | 89 | with context.begin_transaction(): 90 | context.run_migrations() 91 | 92 | 93 | if context.is_offline_mode(): 94 | run_migrations_offline() 95 | else: 96 | run_migrations_online() 97 | -------------------------------------------------------------------------------- /api/db/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade(): 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade(): 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /api/db/migrations/versions/18b25b3efd9b_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 18b25b3efd9b 4 | Revises: 5838d398351f 5 | Create Date: 2019-09-30 22:14:05.646663 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '18b25b3efd9b' 14 | down_revision = '5838d398351f' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | # adding new actiontypes: 20 | # 21 | # CREATE_DATUM = 200 22 | # UPDATE_DATUM = 202 23 | # LIST_DATUM = 204 24 | # CREATE_TEST = 300 25 | # LIST_TEST = 304 26 | # SEARCH = 500 27 | def upgrade(): 28 | op.alter_column('action', 'type', 29 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', name='actiontype'), 30 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', 'CREATE_DATUM', 'UPDATE_DATUM', 'LIST_DATUM', 'CREATE_TEST', 'LIST_TEST', 'SEARCH', name='actiontype'), 31 | existing_nullable=False, 32 | existing_server_default=None, 33 | ) 34 | 35 | 36 | def downgrade(): 37 | op.alter_column('action', 'type', 38 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', name='actiontype'), 39 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', 'CREATE_DATUM', 'UPDATE_DATUM', 'LIST_DATUM', 'CREATE_TEST', 'LIST_TEST', 'SEARCH', name='actiontype'), 40 | existing_nullable=False, 41 | existing_server_default=None, 42 | ) 43 | -------------------------------------------------------------------------------- /api/db/migrations/versions/21ad3631a30b_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 21ad3631a30b 4 | Revises: c8090718abc3 5 | Create Date: 2019-07-31 15:57:39.244849 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '21ad3631a30b' 14 | down_revision = 'c8090718abc3' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | op.alter_column('action', 'type', 21 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', name='actiontype'), 22 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', name='actiontype'), 23 | existing_nullable=False, 24 | existing_server_default=None, 25 | ) 26 | 27 | 28 | def downgrade(): 29 | op.alter_column('action', 'type', 30 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', 'RESEND_EMAIL_CONFIRM', name='actiontype'), 31 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', name='actiontype'), 32 | existing_nullable=False, 33 | existing_server_default=None, 34 | ) 35 | -------------------------------------------------------------------------------- /api/db/migrations/versions/2501865a09b1_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 2501865a09b1 4 | Revises: c800ac2fe1b9 5 | Create Date: 2019-06-21 15:57:34.716115 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '2501865a09b1' 14 | down_revision = 'c800ac2fe1b9' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('users', sa.Column('has_avatar', sa.Boolean(), nullable=True)) 22 | op.add_column('users', sa.Column('is_admin', sa.Boolean(), nullable=True)) 23 | op.drop_column('users', 'deleted_at') 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.add_column('users', sa.Column('deleted_at', mysql.DATETIME(), nullable=True)) 30 | op.drop_column('users', 'is_admin') 31 | op.drop_column('users', 'has_avatar') 32 | # ### end Alembic commands ### 33 | -------------------------------------------------------------------------------- /api/db/migrations/versions/27ff0681ea2d_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 27ff0681ea2d 4 | Revises: e8a76b87ea48 5 | Create Date: 2019-08-05 16:20:11.404035 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '27ff0681ea2d' 14 | down_revision = 'e8a76b87ea48' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('child_data', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 24 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 25 | sa.Column('parent_id', sa.Integer(), nullable=False), 26 | sa.Column('datum_id', sa.Integer(), nullable=False), 27 | sa.Column('name', sa.String(length=100), nullable=False), 28 | sa.ForeignKeyConstraint(['datum_id'], ['data.id'], ), 29 | sa.ForeignKeyConstraint(['parent_id'], ['data.id'], ), 30 | sa.PrimaryKeyConstraint('id') 31 | ) 32 | op.alter_column('data', 'value', 33 | existing_type=mysql.VARCHAR(collation='utf8mb4_general_ci', length=1024), 34 | nullable=True) 35 | 36 | op.alter_column('data', 'type', 37 | existing_type=sa.Enum('NULL', 'BOOLEAN', 'NUMBER', 'STRING', name='datumtype'), 38 | type_=sa.Enum('NULL', 'BOOLEAN', 'NUMBER', 'STRING', 'OBJECT', name='datumtype'), 39 | existing_nullable=False, 40 | existing_server_default=None) 41 | 42 | # ### end Alembic commands ### 43 | 44 | 45 | def downgrade(): 46 | # ### commands auto generated by Alembic - please adjust! ### 47 | op.alter_column('data', 'value', 48 | existing_type=mysql.VARCHAR(collation='utf8mb4_general_ci', length=1024), 49 | nullable=False) 50 | op.drop_table('child_data') 51 | 52 | op.alter_column('data', 'type', 53 | type_=sa.Enum('NULL', 'BOOLEAN', 'NUMBER', 'STRING', name='datumtype'), 54 | existing_type=sa.Enum('NULL', 'BOOLEAN', 'NUMBER', 'STRING', 'OBJECT', name='datumtype'), 55 | existing_nullable=False, 56 | existing_server_default=None) 57 | # ### end Alembic commands ### 58 | -------------------------------------------------------------------------------- /api/db/migrations/versions/2b5e44902390_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 2b5e44902390 4 | Revises: 27ff0681ea2d 5 | Create Date: 2019-09-18 20:52:47.924615 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '2b5e44902390' 14 | down_revision = '27ff0681ea2d' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('tests', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 24 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 25 | sa.Column('user_id', sa.Integer(), nullable=False), 26 | sa.Column('name', sa.String(length=160), nullable=False), 27 | sa.Column('hashed_data', sa.String(length=128), nullable=False), 28 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), 29 | sa.PrimaryKeyConstraint('id') 30 | ) 31 | op.create_table('test_data', 32 | sa.Column('id', sa.Integer(), nullable=False), 33 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 34 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 35 | sa.Column('test_id', sa.Integer(), nullable=False), 36 | sa.Column('datum_id', sa.Integer(), nullable=False), 37 | sa.Column('label', sa.String(length=80), nullable=False), 38 | sa.Column('disposition', sa.Enum('NEUTRAL', 'POSITIVE', 'NEGATIVE', 'EDGE', 'DESTRUCTIVE', name='disposition'), nullable=False), 39 | sa.ForeignKeyConstraint(['datum_id'], ['data.id'], ), 40 | sa.ForeignKeyConstraint(['test_id'], ['tests.id'], ), 41 | sa.PrimaryKeyConstraint('id') 42 | ) 43 | # ### end Alembic commands ### 44 | 45 | 46 | def downgrade(): 47 | # ### commands auto generated by Alembic - please adjust! ### 48 | op.drop_table('test_data') 49 | op.drop_table('tests') 50 | # ### end Alembic commands ### 51 | -------------------------------------------------------------------------------- /api/db/migrations/versions/410649dcf541_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 410649dcf541 4 | Revises: 965f54a262aa 5 | Create Date: 2019-07-25 14:07:03.370266 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '410649dcf541' 14 | down_revision = '965f54a262aa' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('users', sa.Column('bio', sa.String(length=220), nullable=False)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('users', 'bio') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /api/db/migrations/versions/5838d398351f_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 5838d398351f 4 | Revises: 2b5e44902390 5 | Create Date: 2019-09-18 21:08:06.042197 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '5838d398351f' 14 | down_revision = '2b5e44902390' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('data', sa.Column('child_ids_hash', sa.String(length=128), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('data', 'child_ids_hash') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /api/db/migrations/versions/796e284aa6dc_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 796e284aa6dc 4 | Revises: bb6244d75df4 5 | Create Date: 2019-07-22 15:09:45.782251 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '796e284aa6dc' 14 | down_revision = 'bb6244d75df4' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('action', 'created_at', 22 | existing_type=mysql.DATETIME(), 23 | type_=sa.DateTime(timezone=True), 24 | existing_nullable=False, 25 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 26 | op.alter_column('data', 'created_at', 27 | existing_type=mysql.DATETIME(), 28 | type_=sa.DateTime(timezone=True), 29 | existing_nullable=False, 30 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 31 | op.alter_column('data', 'updated_at', 32 | existing_type=mysql.DATETIME(), 33 | type_=sa.DateTime(timezone=True), 34 | existing_nullable=False, 35 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 36 | op.alter_column('ec_tokens', 'created_at', 37 | existing_type=mysql.DATETIME(), 38 | type_=sa.DateTime(timezone=True), 39 | existing_nullable=False, 40 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 41 | op.alter_column('ec_tokens', 'token', 42 | existing_type=mysql.VARCHAR(collation='utf8mb4_general_ci', length=16), 43 | type_=sa.String(length=20), 44 | existing_nullable=False) 45 | op.alter_column('ec_tokens', 'updated_at', 46 | existing_type=mysql.DATETIME(), 47 | type_=sa.DateTime(timezone=True), 48 | existing_nullable=False, 49 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 50 | op.alter_column('users', 'created_at', 51 | existing_type=mysql.DATETIME(), 52 | type_=sa.DateTime(timezone=True), 53 | existing_nullable=False, 54 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 55 | op.alter_column('users', 'has_avatar', 56 | existing_type=mysql.TINYINT(display_width=1), 57 | type_=sa.Boolean(), 58 | existing_nullable=True) 59 | op.alter_column('users', 'is_admin', 60 | existing_type=mysql.TINYINT(display_width=1), 61 | type_=sa.Boolean(), 62 | existing_nullable=True) 63 | op.alter_column('users', 'is_email_confirmed', 64 | existing_type=mysql.TINYINT(display_width=1), 65 | type_=sa.Boolean(), 66 | existing_nullable=True) 67 | op.alter_column('users', 'updated_at', 68 | existing_type=mysql.DATETIME(), 69 | type_=sa.DateTime(timezone=True), 70 | existing_nullable=False, 71 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 72 | # ### end Alembic commands ### 73 | 74 | 75 | def downgrade(): 76 | # ### commands auto generated by Alembic - please adjust! ### 77 | op.alter_column('users', 'updated_at', 78 | existing_type=sa.DateTime(timezone=True), 79 | type_=mysql.DATETIME(), 80 | existing_nullable=False, 81 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 82 | op.alter_column('users', 'is_email_confirmed', 83 | existing_type=sa.Boolean(), 84 | type_=mysql.TINYINT(display_width=1), 85 | existing_nullable=True) 86 | op.alter_column('users', 'is_admin', 87 | existing_type=sa.Boolean(), 88 | type_=mysql.TINYINT(display_width=1), 89 | existing_nullable=True) 90 | op.alter_column('users', 'has_avatar', 91 | existing_type=sa.Boolean(), 92 | type_=mysql.TINYINT(display_width=1), 93 | existing_nullable=True) 94 | op.alter_column('users', 'created_at', 95 | existing_type=sa.DateTime(timezone=True), 96 | type_=mysql.DATETIME(), 97 | existing_nullable=False, 98 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 99 | op.alter_column('ec_tokens', 'updated_at', 100 | existing_type=sa.DateTime(timezone=True), 101 | type_=mysql.DATETIME(), 102 | existing_nullable=False, 103 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 104 | op.alter_column('ec_tokens', 'token', 105 | existing_type=sa.String(length=20), 106 | type_=mysql.VARCHAR(collation='utf8mb4_general_ci', length=16), 107 | existing_nullable=False) 108 | op.alter_column('ec_tokens', 'created_at', 109 | existing_type=sa.DateTime(timezone=True), 110 | type_=mysql.DATETIME(), 111 | existing_nullable=False, 112 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 113 | op.alter_column('data', 'updated_at', 114 | existing_type=sa.DateTime(timezone=True), 115 | type_=mysql.DATETIME(), 116 | existing_nullable=False, 117 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 118 | op.alter_column('data', 'created_at', 119 | existing_type=sa.DateTime(timezone=True), 120 | type_=mysql.DATETIME(), 121 | existing_nullable=False, 122 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 123 | op.alter_column('action', 'created_at', 124 | existing_type=sa.DateTime(timezone=True), 125 | type_=mysql.DATETIME(), 126 | existing_nullable=False, 127 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 128 | # ### end Alembic commands ### 129 | -------------------------------------------------------------------------------- /api/db/migrations/versions/89ff59bac28a_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 89ff59bac28a 4 | Revises: 5 | Create Date: 2019-06-21 13:21:09.354173 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '89ff59bac28a' 14 | down_revision = None 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('user', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('username', sa.String(length=80), nullable=False), 24 | sa.Column('email', sa.String(length=120), nullable=False), 25 | sa.Column('password_hash', sa.String(length=128), nullable=True), 26 | sa.PrimaryKeyConstraint('id'), 27 | sa.UniqueConstraint('email'), 28 | sa.UniqueConstraint('username') 29 | ) 30 | # ### end Alembic commands ### 31 | 32 | 33 | def downgrade(): 34 | # ### commands auto generated by Alembic - please adjust! ### 35 | op.drop_table('user') 36 | # ### end Alembic commands ### 37 | -------------------------------------------------------------------------------- /api/db/migrations/versions/93866a5dad44_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 93866a5dad44 4 | Revises: e134be65efb1 5 | Create Date: 2019-06-26 16:10:35.837355 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '93866a5dad44' 14 | down_revision = 'e134be65efb1' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('data', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 24 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 25 | sa.Column('name', sa.String(length=100), nullable=False), 26 | sa.Column('user_id', sa.Integer(), nullable=False), 27 | sa.Column('type', sa.Enum('NULL', 'BOOLEAN', 'NUMBER', 'STRING', name='datumtype'), nullable=False), 28 | sa.Column('mode', sa.Enum('NEUTRAL', 'POSITIVE', 'NEGATIVE', 'EDGE', 'DESTRUCTIVE', name='datummode'), nullable=False), 29 | sa.Column('datum', sa.String(length=1024), nullable=False), 30 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), 31 | sa.PrimaryKeyConstraint('id') 32 | ) 33 | op.create_table('ec_tokens', 34 | sa.Column('id', sa.Integer(), nullable=False), 35 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 36 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 37 | sa.Column('token', sa.String(length=16), nullable=False), 38 | sa.Column('user_id', sa.Integer(), nullable=False), 39 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), 40 | sa.PrimaryKeyConstraint('id'), 41 | sa.UniqueConstraint('token') 42 | ) 43 | # ### end Alembic commands ### 44 | 45 | 46 | def downgrade(): 47 | # ### commands auto generated by Alembic - please adjust! ### 48 | op.drop_table('ec_tokens') 49 | op.drop_table('data') 50 | # ### end Alembic commands ### 51 | -------------------------------------------------------------------------------- /api/db/migrations/versions/965f54a262aa_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: 965f54a262aa 4 | Revises: 796e284aa6dc 5 | Create Date: 2019-07-22 15:12:43.966888 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = '965f54a262aa' 14 | down_revision = '796e284aa6dc' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('action', 'created_at', 22 | existing_type=mysql.DATETIME(), 23 | type_=sa.DateTime(timezone=True), 24 | existing_nullable=False, 25 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 26 | op.alter_column('data', 'created_at', 27 | existing_type=mysql.DATETIME(), 28 | type_=sa.DateTime(timezone=True), 29 | existing_nullable=False, 30 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 31 | op.alter_column('data', 'updated_at', 32 | existing_type=mysql.DATETIME(), 33 | type_=sa.DateTime(timezone=True), 34 | existing_nullable=False, 35 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 36 | op.alter_column('ec_tokens', 'created_at', 37 | existing_type=mysql.DATETIME(), 38 | type_=sa.DateTime(timezone=True), 39 | existing_nullable=False, 40 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 41 | op.alter_column('ec_tokens', 'token', 42 | existing_type=mysql.VARCHAR(collation='utf8mb4_general_ci', length=20), 43 | type_=sa.String(length=32), 44 | existing_nullable=False) 45 | op.alter_column('ec_tokens', 'updated_at', 46 | existing_type=mysql.DATETIME(), 47 | type_=sa.DateTime(timezone=True), 48 | existing_nullable=False, 49 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 50 | op.alter_column('users', 'created_at', 51 | existing_type=mysql.DATETIME(), 52 | type_=sa.DateTime(timezone=True), 53 | existing_nullable=False, 54 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 55 | op.alter_column('users', 'has_avatar', 56 | existing_type=mysql.TINYINT(display_width=1), 57 | type_=sa.Boolean(), 58 | existing_nullable=True) 59 | op.alter_column('users', 'is_admin', 60 | existing_type=mysql.TINYINT(display_width=1), 61 | type_=sa.Boolean(), 62 | existing_nullable=True) 63 | op.alter_column('users', 'is_email_confirmed', 64 | existing_type=mysql.TINYINT(display_width=1), 65 | type_=sa.Boolean(), 66 | existing_nullable=True) 67 | op.alter_column('users', 'updated_at', 68 | existing_type=mysql.DATETIME(), 69 | type_=sa.DateTime(timezone=True), 70 | existing_nullable=False, 71 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 72 | # ### end Alembic commands ### 73 | 74 | 75 | def downgrade(): 76 | # ### commands auto generated by Alembic - please adjust! ### 77 | op.alter_column('users', 'updated_at', 78 | existing_type=sa.DateTime(timezone=True), 79 | type_=mysql.DATETIME(), 80 | existing_nullable=False, 81 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 82 | op.alter_column('users', 'is_email_confirmed', 83 | existing_type=sa.Boolean(), 84 | type_=mysql.TINYINT(display_width=1), 85 | existing_nullable=True) 86 | op.alter_column('users', 'is_admin', 87 | existing_type=sa.Boolean(), 88 | type_=mysql.TINYINT(display_width=1), 89 | existing_nullable=True) 90 | op.alter_column('users', 'has_avatar', 91 | existing_type=sa.Boolean(), 92 | type_=mysql.TINYINT(display_width=1), 93 | existing_nullable=True) 94 | op.alter_column('users', 'created_at', 95 | existing_type=sa.DateTime(timezone=True), 96 | type_=mysql.DATETIME(), 97 | existing_nullable=False, 98 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 99 | op.alter_column('ec_tokens', 'updated_at', 100 | existing_type=sa.DateTime(timezone=True), 101 | type_=mysql.DATETIME(), 102 | existing_nullable=False, 103 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 104 | op.alter_column('ec_tokens', 'token', 105 | existing_type=sa.String(length=32), 106 | type_=mysql.VARCHAR(collation='utf8mb4_general_ci', length=20), 107 | existing_nullable=False) 108 | op.alter_column('ec_tokens', 'created_at', 109 | existing_type=sa.DateTime(timezone=True), 110 | type_=mysql.DATETIME(), 111 | existing_nullable=False, 112 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 113 | op.alter_column('data', 'updated_at', 114 | existing_type=sa.DateTime(timezone=True), 115 | type_=mysql.DATETIME(), 116 | existing_nullable=False, 117 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 118 | op.alter_column('data', 'created_at', 119 | existing_type=sa.DateTime(timezone=True), 120 | type_=mysql.DATETIME(), 121 | existing_nullable=False, 122 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 123 | op.alter_column('action', 'created_at', 124 | existing_type=sa.DateTime(timezone=True), 125 | type_=mysql.DATETIME(), 126 | existing_nullable=False, 127 | existing_server_default=sa.text('CURRENT_TIMESTAMP')) 128 | # ### end Alembic commands ### 129 | -------------------------------------------------------------------------------- /api/db/migrations/versions/bb6244d75df4_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: bb6244d75df4 4 | Revises: 93866a5dad44 5 | Create Date: 2019-07-16 20:41:24.437124 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'bb6244d75df4' 14 | down_revision = '93866a5dad44' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('action', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 24 | sa.Column('type', sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', name='actiontype'), nullable=False), 25 | sa.Column('user_id', sa.Integer(), nullable=False), 26 | sa.Column('obj1_id', sa.Integer(), nullable=True), 27 | sa.Column('obj2_id', sa.Integer(), nullable=True), 28 | sa.Column('obj3_id', sa.Integer(), nullable=True), 29 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), 30 | sa.PrimaryKeyConstraint('id') 31 | ) 32 | # ### end Alembic commands ### 33 | 34 | 35 | def downgrade(): 36 | # ### commands auto generated by Alembic - please adjust! ### 37 | op.drop_table('action') 38 | # ### end Alembic commands ### 39 | -------------------------------------------------------------------------------- /api/db/migrations/versions/c800ac2fe1b9_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: c800ac2fe1b9 4 | Revises: 89ff59bac28a 5 | Create Date: 2019-06-21 15:12:24.090582 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c800ac2fe1b9' 14 | down_revision = '89ff59bac28a' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.create_table('users', 22 | sa.Column('id', sa.Integer(), nullable=False), 23 | sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 24 | sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False), 25 | sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True), 26 | sa.Column('username', sa.String(length=80), nullable=False), 27 | sa.Column('email', sa.String(length=120), nullable=False), 28 | sa.Column('password_hash', sa.String(length=128), nullable=True), 29 | sa.PrimaryKeyConstraint('id'), 30 | sa.UniqueConstraint('email'), 31 | sa.UniqueConstraint('username') 32 | ) 33 | op.drop_index('email', table_name='user') 34 | op.drop_index('username', table_name='user') 35 | op.drop_table('user') 36 | # ### end Alembic commands ### 37 | 38 | 39 | def downgrade(): 40 | # ### commands auto generated by Alembic - please adjust! ### 41 | op.create_table('user', 42 | sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), 43 | sa.Column('username', mysql.VARCHAR(collation='utf8mb4_general_ci', length=80), nullable=False), 44 | sa.Column('email', mysql.VARCHAR(collation='utf8mb4_general_ci', length=120), nullable=False), 45 | sa.Column('password_hash', mysql.VARCHAR(collation='utf8mb4_general_ci', length=128), nullable=True), 46 | sa.PrimaryKeyConstraint('id'), 47 | mysql_collate='utf8mb4_general_ci', 48 | mysql_default_charset='utf8mb4', 49 | mysql_engine='InnoDB' 50 | ) 51 | op.create_index('username', 'user', ['username'], unique=True) 52 | op.create_index('email', 'user', ['email'], unique=True) 53 | op.drop_table('users') 54 | # ### end Alembic commands ### 55 | -------------------------------------------------------------------------------- /api/db/migrations/versions/c8090718abc3_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: c8090718abc3 4 | Revises: 410649dcf541 5 | Create Date: 2019-07-31 09:37:10.557240 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'c8090718abc3' 14 | down_revision = '410649dcf541' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.alter_column('action', 'type', 22 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', name='actiontype'), 23 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', name='actiontype'), 24 | existing_nullable=False, 25 | existing_server_default=None, 26 | ) 27 | # ### end Alembic commands ### 28 | 29 | 30 | def downgrade(): 31 | # ### commands auto generated by Alembic - please adjust! ### 32 | op.alter_column('action', 'type', 33 | type_=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', name='actiontype'), 34 | existing_type=sa.Enum('SIGNUP', 'CONFIRM_EMAIL', 'UPDATE_USER', 'PROMOTE_USER', 'LIST_USER', 'UPLOAD_AVATAR', name='actiontype'), 35 | existing_nullable=False, 36 | existing_server_default=None, 37 | ) 38 | # ### end Alembic commands ### 39 | -------------------------------------------------------------------------------- /api/db/migrations/versions/e134be65efb1_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: e134be65efb1 4 | Revises: 2501865a09b1 5 | Create Date: 2019-06-21 16:18:03.055685 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'e134be65efb1' 14 | down_revision = '2501865a09b1' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('users', sa.Column('is_email_confirmed', sa.Boolean(), nullable=True)) 22 | # ### end Alembic commands ### 23 | 24 | 25 | def downgrade(): 26 | # ### commands auto generated by Alembic - please adjust! ### 27 | op.drop_column('users', 'is_email_confirmed') 28 | # ### end Alembic commands ### 29 | -------------------------------------------------------------------------------- /api/db/migrations/versions/e8a76b87ea48_.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | Revision ID: e8a76b87ea48 4 | Revises: 21ad3631a30b 5 | Create Date: 2019-08-02 14:32:24.103442 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | from sqlalchemy.dialects import mysql 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = 'e8a76b87ea48' 14 | down_revision = '21ad3631a30b' 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade(): 20 | # ### commands auto generated by Alembic - please adjust! ### 21 | op.add_column('data', sa.Column('value', sa.String(length=1024), nullable=False)) 22 | op.drop_column('data', 'datum') 23 | op.drop_column('data', 'mode') 24 | # ### end Alembic commands ### 25 | 26 | 27 | def downgrade(): 28 | # ### commands auto generated by Alembic - please adjust! ### 29 | op.add_column('data', sa.Column('mode', mysql.ENUM('NEUTRAL', 'POSITIVE', 'NEGATIVE', 'EDGE', 'DESTRUCTIVE', collation='utf8mb4_general_ci'), nullable=False)) 30 | op.add_column('data', sa.Column('datum', mysql.VARCHAR(collation='utf8mb4_general_ci', length=1024), nullable=False)) 31 | op.drop_column('data', 'value') 32 | # ### end Alembic commands ### 33 | -------------------------------------------------------------------------------- /api/db/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .exceptions import ItemExistsException, ItemDoesNotExistException 2 | from .ec_token import EmailConfirmationToken 3 | from .user import User 4 | from .child_datum import ChildDatum 5 | from .test_datum import TestDatum, Disposition 6 | from .datum import Datum, DatumType 7 | from .test import Test 8 | from .action import Action 9 | from .action_type import ActionType 10 | -------------------------------------------------------------------------------- /api/db/models/action.py: -------------------------------------------------------------------------------- 1 | from .. import db 2 | from . import User, Test, Datum 3 | from .action_type import ActionType 4 | 5 | 6 | # This is a list of actions together with the type of objects that participate 7 | # in that action. 8 | ACTION_SCHEMAS = [ 9 | (ActionType.SIGNUP, None, None, None, 10 | lambda u: f'{u} signed up'), 11 | (ActionType.CONFIRM_EMAIL, None, None, None, 12 | lambda u: f'{u} confirmed e-mail address {u.email}'), 13 | (ActionType.UPDATE_USER, User, None, None, 14 | lambda u, u2: f'{u} updated {u2}'), 15 | (ActionType.PROMOTE_USER, User, None, None, 16 | lambda u, u2: f'{u} promoted {u2} to admin'), 17 | (ActionType.LIST_USER, User, None, None, 18 | lambda u, u2: f'{u} requested details for {u2}'), 19 | (ActionType.UPLOAD_AVATAR, None, None, None, 20 | lambda u: f'{u} uploaded an avatar'), 21 | (ActionType.RESEND_EMAIL_CONFIRM, None, None, None, 22 | lambda u: f'{u} requested the e-mail confirmation e-mail to be resent'), 23 | (ActionType.CREATE_DATUM, User, Datum, None, 24 | lambda u, d: f'{u} added a new datum: {d}'), 25 | (ActionType.LIST_DATUM, User, Datum, None, 26 | lambda u, d: f'{u} requested details for datum {d}'), 27 | (ActionType.UPDATE_DATUM, User, Datum, None, 28 | lambda u, d: f'{u} updated datum {d}'), 29 | (ActionType.CREATE_TEST, User, Test, None, 30 | lambda u, d: f'{u} added a new test: {d}'), 31 | (ActionType.LIST_TEST, User, Test, None, 32 | lambda u, d: f'{u} requested details for test {d}'), 33 | ] 34 | 35 | 36 | def get_action_message(action_type, user, *objs): 37 | schema = get_schema(action_type) 38 | objs = filter(lambda obj: obj is not None, objs) 39 | return schema[4](user, *objs) 40 | 41 | 42 | def get_schema(type): 43 | for schema in ACTION_SCHEMAS: 44 | if schema[0] == type: 45 | return schema 46 | raise Exception(f'Could not find action schema with type {type}') 47 | 48 | 49 | class Action(db.Model): 50 | """ 51 | This class doesn't inherit from BaseModel because we don't want an 52 | updated_at field. 53 | 54 | Each action has one actor (a user), and up to 3 other interacted objects, 55 | which could be of any type, thus we can't make use of foreign keys for them. 56 | """ 57 | 58 | id = db.Column(db.Integer, primary_key=True) 59 | created_at = db.Column(db.DateTime(timezone=True), 60 | server_default=db.func.now(), 61 | nullable=False) 62 | type = db.Column(db.Enum(ActionType), nullable=False) 63 | user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) 64 | obj1_id = db.Column(db.Integer, default=None) 65 | obj2_id = db.Column(db.Integer, default=None) 66 | obj3_id = db.Column(db.Integer, default=None) 67 | 68 | user = db.relationship('User', back_populates='actions', foreign_keys=[user_id]) 69 | 70 | def __repr__(self): 71 | return get_action_message(self.type, self.user, self.obj1, self.obj2, 72 | self.obj3) 73 | 74 | @property 75 | def obj1(self): 76 | schema = get_schema(self.type) 77 | if schema[1] is not None: 78 | return schema[1].query.filter_by(id=self.obj1_id).first() 79 | 80 | @property 81 | def obj2(self): 82 | schema = get_schema(self.type) 83 | if schema[2] is not None: 84 | return schema[2].query.filter_by(id=self.obj2_id).first() 85 | 86 | @property 87 | def obj3(self): 88 | schema = get_schema(self.type) 89 | if schema[3] is not None: 90 | return schema[3].query.filter_by(id=self.obj3_id).first() 91 | 92 | def to_obj(self, whos_asking=None): 93 | res_obj = dict( 94 | id=self.id, 95 | created_at=self.created_at, 96 | type=self.type, 97 | user=self.user.to_obj(whos_asking=whos_asking) 98 | ) 99 | res_obj['obj1'] = self.obj1.to_obj(whos_asking=whos_asking) 100 | res_obj['obj2'] = self.obj2.to_obj(whos_asking=whos_asking) 101 | res_obj['obj3'] = self.obj3.to_obj(whos_asking=whos_asking) 102 | res_obj['message'] = self.__repr__() 103 | return res_obj 104 | 105 | @staticmethod 106 | def create(type, user, *args): 107 | a = Action(type=type, user=user) 108 | if len(args) >= 1: 109 | a.obj1_id = args[0].id 110 | if len(args) >= 2: 111 | a.obj2_id = args[1].id 112 | if len(args) >= 3: 113 | a.obj3_id = args[2].id 114 | db.session.add(a) 115 | db.session.commit() 116 | -------------------------------------------------------------------------------- /api/db/models/action_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class ActionType(Enum): 5 | """ 6 | This is an enum class denoting any actions taken within the API. 7 | Enumerated values correspond to integers. For some semblance of reason, 8 | this is the key for them: 9 | 10 | 0-99: general actions 11 | 100-199: user-related actions 12 | 200-299: datum-related actions 13 | 300-399: test-related actions 14 | 400-499: tag-related actions 15 | 500-599: misc 16 | """ 17 | 18 | SIGNUP = 100 19 | CONFIRM_EMAIL = 101 20 | UPDATE_USER = 102 21 | PROMOTE_USER = 103 22 | LIST_USER = 104 23 | UPLOAD_AVATAR = 105 24 | RESEND_EMAIL_CONFIRM = 106 25 | CREATE_DATUM = 200 26 | UPDATE_DATUM = 202 27 | LIST_DATUM = 204 28 | CREATE_TEST = 300 29 | # UPDATE_TEST = 302 # TODO 30 | LIST_TEST = 304 31 | SEARCH = 500 32 | -------------------------------------------------------------------------------- /api/db/models/base.py: -------------------------------------------------------------------------------- 1 | from .. import db 2 | 3 | 4 | class BaseModel(db.Model): 5 | """ 6 | This is the base for all our database models. It has common fields and 7 | functionality. 8 | """ 9 | __abstract__ = True 10 | 11 | id = db.Column(db.Integer, primary_key=True) 12 | created_at = db.Column(db.DateTime(timezone=True), 13 | server_default=db.func.now(), 14 | nullable=False) 15 | updated_at = db.Column(db.DateTime(timezone=True), 16 | server_default=db.func.now(), 17 | onupdate=db.func.now(), 18 | nullable=False) 19 | 20 | # These are designed to be overridden, as a list of field names which can 21 | # be returned in a JSON API response. Public fields will always be returned 22 | # in a JSON response for a given object, whereas private fields will trigger 23 | # a call to has_access_to_field to see if the requesting user can see the 24 | # data in that field 25 | private_fields = None 26 | public_fields = None 27 | 28 | def __repr__(self): 29 | return '' % self.id 30 | 31 | # Designed to be overridden. Check whether a user can see the contents of 32 | # a field on this object 33 | def has_access_to_field(self, user, field): 34 | return False 35 | 36 | # Convert a dict suitable for JSONification as API response 37 | def to_obj(self, whos_asking=None): 38 | if self.private_fields is None or self.public_fields is None: 39 | raise Exception('Models must declare private_fields and public_fields') 40 | ret = {'id': self.id} # should always return object ids 41 | for field in self.public_fields: 42 | ret[field] = getattr(self, field) 43 | for field in self.private_fields: 44 | if whos_asking is None or self.has_access_to_field(whos_asking, field): 45 | ret[field] = getattr(self, field) 46 | # ensure we convert datetime objects proactively 47 | if 'created_at' in ret: 48 | ret['created_at'] = ret['created_at'].isoformat() 49 | if 'updated_at' in ret: 50 | ret['updated_at'] = ret['updated_at'].isoformat() 51 | return ret 52 | -------------------------------------------------------------------------------- /api/db/models/child_datum.py: -------------------------------------------------------------------------------- 1 | from .. import db 2 | from .base import BaseModel 3 | 4 | 5 | class ChildDatum(BaseModel): 6 | __tablename__ = 'child_data' 7 | 8 | # fields 9 | parent_id = db.Column(db.Integer, db.ForeignKey('data.id'), nullable=False) 10 | datum_id = db.Column(db.Integer, db.ForeignKey('data.id'), nullable=False) 11 | name = db.Column(db.String(length=100), nullable=False) 12 | 13 | # relationships 14 | parent = db.relationship('Datum', back_populates='children', foreign_keys=[parent_id]) 15 | datum = db.relationship('Datum', back_populates='included_in', foreign_keys=[datum_id]) 16 | 17 | def __repr__(self): 18 | return ( 19 | "" % 20 | (self.name, self.parent) 21 | ) 22 | -------------------------------------------------------------------------------- /api/db/models/datum.py: -------------------------------------------------------------------------------- 1 | import json 2 | import hashlib 3 | 4 | from enum import Enum 5 | from .. import db 6 | from . import ChildDatum, TestDatum, ItemExistsException 7 | from .base import BaseModel 8 | 9 | 10 | class DatumType(Enum): 11 | NULL = 'NULL' 12 | BOOLEAN = 'BOOL' 13 | NUMBER = 'NUM' 14 | STRING = 'STR' 15 | OBJECT = 'OBJ' 16 | 17 | 18 | def parse_object_json(obj_str, parent=None): 19 | obj = json.loads(obj_str) 20 | data = {} 21 | 22 | if len(obj.values()) != len(set(obj.values())): 23 | raise Exception('You have a duplicate child object; you can only ' 24 | 'include a child object once per parent object') 25 | 26 | for key in obj.keys(): 27 | if type(obj[key]) != int: 28 | raise Exception(f'JSON object key {key} was not an integer datum id') 29 | 30 | data[key] = Datum.query.filter_by(id=obj[key]).first() 31 | if data[key] is None: 32 | raise Exception(f'Datum id {obj[key]} referenced with object key {key} did not exist') 33 | 34 | if parent is not None and object_is_cyclical(parent, data): 35 | raise Exception('Cannot create a datum with cyclical child references') 36 | 37 | return data 38 | 39 | 40 | def object_is_cyclical(parent, data): 41 | for child_datum in data.values(): 42 | if parent.id == child_datum.id: 43 | return True 44 | if child_datum.type == DatumType.OBJECT: 45 | return object_is_cyclical(parent, child_datum.object_value()) 46 | return False 47 | 48 | 49 | class Datum(BaseModel): 50 | __tablename__ = 'data' 51 | 52 | # fields 53 | user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) 54 | name = db.Column(db.String(length=100), nullable=False) 55 | type = db.Column(db.Enum(DatumType), default=DatumType.STRING, nullable=False) 56 | value = db.Column(db.String(length=1024), nullable=True) 57 | 58 | # the purpose of child_ids_hash is to store a hash of a string. This string 59 | # is just a sorted, comma-separated list of all the ids belonging to the 60 | # children of this datum (meaning this is of course an object datum). The 61 | # reason we want to store this information is because we need to check 62 | # uniqueness of combinations of children, but this is obscenely difficult 63 | # to do with a SQL query given the use of the intermediary ChildDatum 64 | # table. It's easier to just keep a hash of the ids here so we can 65 | # calculate the similar hash for any new datum and check that it doesn't 66 | # match the hash of an existing datum. 67 | child_ids_hash = db.Column(db.String(length=128), nullable=True) 68 | 69 | private_fields = [] 70 | # type is not included in public_fields because as an enum we must unwrap 71 | # it in 'to_obj' 72 | public_fields = ['name', 'value', 'user_id', 'created_at'] 73 | 74 | # relationships 75 | author = db.relationship('User', back_populates='data', foreign_keys=[user_id]) 76 | children = db.relationship('ChildDatum', back_populates='parent', uselist=True, foreign_keys=lambda: ChildDatum.parent_id) 77 | included_in = db.relationship('ChildDatum', back_populates='datum', uselist=True, foreign_keys=lambda: ChildDatum.datum_id) 78 | tests = db.relationship('TestDatum', back_populates='datum', uselist=True, foreign_keys=lambda: TestDatum.datum_id) 79 | # TODO tags many-many via join table 80 | 81 | def __repr__(self, depth=1): 82 | if self.type == DatumType.OBJECT: 83 | children_repr = map(lambda cd: f'{cd.name}: {cd.datum.__repr__(depth + 1)}', self.children) 84 | space_for_depth = " " * depth 85 | children_repr = space_for_depth + ("\n" + space_for_depth).join(children_repr) 86 | return f"" 87 | else: 88 | return (f"") 90 | 91 | def object_value(self): 92 | if self.type != DatumType.OBJECT: 93 | return self.value 94 | 95 | value = {} 96 | for child in self.children: 97 | value[child.name] = child.datum 98 | 99 | return value 100 | 101 | def to_obj(self, *args, **kwargs): 102 | obj = super().to_obj(*args, **kwargs) 103 | obj['type'] = self.type.value 104 | if self.type == DatumType.OBJECT: 105 | obj['value'] = {} 106 | for child in self.children: 107 | obj['value'][child.name] = child.datum.to_obj(**kwargs) 108 | return obj 109 | 110 | def update(self, **kwargs): 111 | if kwargs.get('name') is not None: 112 | self.name = kwargs['name'] 113 | if kwargs.get('type') is not None: 114 | self.type = kwargs['type'] 115 | if kwargs.get('value') is not None: 116 | value = kwargs['value'] 117 | if self.type == DatumType.OBJECT: 118 | value = parse_object_json(value, parent=self) 119 | Datum.ensure_uniqueness(value, self.type) 120 | 121 | if self.type != DatumType.OBJECT: 122 | self.value = value 123 | else: 124 | # we have an object type, which means we may need to modify, add, 125 | # or delete ChildDatum rows corresponding to the child data. The 126 | # simplest thing is just to delete them all and re-add them all, 127 | # so do that unless and until it becomes an issue. 128 | self.remove_children(commit=False) 129 | self.add_children(value, commit=False) 130 | self.child_ids_hash = Datum.get_child_ids_hash(value) 131 | 132 | db.session.commit() 133 | 134 | def remove_children(self, commit=True): 135 | for child in self.children: 136 | db.session.delete(child) 137 | if commit: 138 | db.session.commit() 139 | 140 | def add_children(self, value, commit=True): 141 | for name, child_datum in value.items(): 142 | child_datum_obj = ChildDatum(parent=self, name=name, datum=child_datum) 143 | db.session.add(child_datum_obj) 144 | 145 | if commit: 146 | db.session.commit() 147 | 148 | @staticmethod 149 | def find(value, type): 150 | if type == DatumType.OBJECT: 151 | ids_hash = Datum.get_child_ids_hash(value) 152 | return Datum.query.filter_by(child_ids_hash=ids_hash).first() 153 | 154 | return Datum.query.filter_by(value=value, type=type).first() 155 | 156 | @staticmethod 157 | def create(*args, **kwargs): 158 | value = kwargs['value'] 159 | type = kwargs['type'] 160 | if type == DatumType.OBJECT: 161 | value = parse_object_json(value) 162 | Datum.ensure_uniqueness(value, type) 163 | if type == DatumType.OBJECT: 164 | kwargs['value'] = None 165 | kwargs['child_ids_hash'] = Datum.get_child_ids_hash(value) 166 | datum = Datum(**kwargs) 167 | db.session.add(datum) 168 | db.session.flush() 169 | datum.add_children(value, commit=False) 170 | else: 171 | datum = Datum(**kwargs) 172 | db.session.add(datum) 173 | 174 | db.session.commit() 175 | 176 | return datum 177 | 178 | @staticmethod 179 | def get_child_ids_hash(obj_value): 180 | """ 181 | This method creates a (hopefully unique) hash based on the combination 182 | of child ids. The input to this method is the datum value after it has 183 | passed through parse_object_json() 184 | """ 185 | child_ids = list(map(lambda child: child.id, obj_value.values())) 186 | child_ids.sort() 187 | 188 | ids_str = ",".join(map(lambda i: str(i), child_ids)) 189 | return hashlib.sha1(ids_str.encode('utf-8')).hexdigest() 190 | 191 | @staticmethod 192 | def ensure_uniqueness(value, type): 193 | old_datum = Datum.find(value, type) 194 | if old_datum is not None: 195 | err = ('The combination of value and type must be unique ' 196 | 'and datum with id %s already contains these values' % 197 | old_datum.id) 198 | raise ItemExistsException(err, item_id=old_datum.id) 199 | -------------------------------------------------------------------------------- /api/db/models/ec_token.py: -------------------------------------------------------------------------------- 1 | from secrets import token_urlsafe 2 | from .. import db 3 | from ...mail import Email 4 | from .base import BaseModel 5 | 6 | 7 | class EmailConfirmationToken(BaseModel): 8 | __tablename__ = 'ec_tokens' 9 | 10 | # fields 11 | token = db.Column(db.String(32), unique=True, nullable=False) 12 | user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) 13 | 14 | private_fields = ['email', 'password_hash', 'is_admin', 15 | 'is_email_confirmed'] 16 | public_fields = ['username', 'avatar_url'] 17 | 18 | # relationships 19 | user = db.relationship('User', back_populates='ec_token') 20 | 21 | def __repr__(self): 22 | return ( 23 | "" % 24 | (self.token, self.user.username, self.user.email) 25 | ) 26 | 27 | def generate_token(self): 28 | self.token = token_urlsafe(16) 29 | 30 | def send_email(self, confirmation_url): 31 | Email.send_confirmation_email(self.user.email, confirmation_url, self.token) 32 | 33 | @staticmethod 34 | def confirm(token): 35 | ec_token = EmailConfirmationToken.query.filter_by(token=token).first() 36 | if ec_token is None: 37 | raise Exception('Tried to confirm email with token "%s" but token ' 38 | 'was not found in the database.' % token) 39 | 40 | user = ec_token.user 41 | user.is_email_confirmed = True 42 | db.session.delete(ec_token) 43 | db.session.commit() 44 | return user 45 | -------------------------------------------------------------------------------- /api/db/models/exceptions.py: -------------------------------------------------------------------------------- 1 | class ItemExistsException(Exception): 2 | def __init__(self, message, item_id): 3 | super().__init__(message) 4 | self.item_id = item_id 5 | 6 | 7 | class ItemDoesNotExistException(Exception): 8 | def __init__(self, message, item_id): 9 | super().__init__(message) 10 | self.item_id = item_id 11 | -------------------------------------------------------------------------------- /api/db/models/test.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from .. import db 3 | from . import (TestDatum, Datum, Disposition, ItemDoesNotExistException, 4 | ItemExistsException) 5 | from .base import BaseModel 6 | 7 | 8 | class Test(BaseModel): 9 | __tablename__ = 'tests' 10 | 11 | # fields 12 | user_id = db.Column(db.Integer, db.ForeignKey('users.id'), nullable=False) 13 | name = db.Column(db.String(length=160), nullable=False) 14 | 15 | # the purpose of hashed_data is to store a hash of a string. This string is 16 | # just a sorted, comma-separated list of all the data belonging to this 17 | # test object, along with their label and disposition. The reason we want 18 | # to store this information is because we need to check uniqueness of 19 | # combinations of test data, but this is obscenely difficult to do with 20 | # a SQL query given the use of the intermediary TestDatum table. It's 21 | # easier to just keep a hash of the ids here so we can calculate the 22 | # similar hash for any new tests and check that it doesn't match the hash 23 | # of an existing test. See also the similar behavior in Datum 24 | hashed_data = db.Column(db.String(128), nullable=False) 25 | 26 | private_fields = [] 27 | # data is added manually in to_obj 28 | public_fields = ['name', 'user_id', 'created_at'] 29 | 30 | # relationships 31 | author = db.relationship('User', back_populates='tests', foreign_keys=[user_id]) 32 | data = db.relationship('TestDatum', back_populates='test', uselist=True, foreign_keys=lambda: TestDatum.test_id) 33 | 34 | def __repr__(self): 35 | data_repr = list(map(lambda td: f'{td.label}: {td.datum.__repr__()}', self.data)) 36 | return (f"") 38 | 39 | def to_obj(self, *args, **kwargs): 40 | obj = super().to_obj(*args, **kwargs) 41 | obj['data'] = {} 42 | for td in self.data: 43 | obj['data'][td.label] = td.datum.to_obj(*args, **kwargs) 44 | obj['data'][td.label]['disposition'] = td.disposition.value 45 | return obj 46 | 47 | def add_datum(self, label, disposition, datum): 48 | # TODO see if we want piecemeal addition 49 | pass 50 | 51 | def remove_datum(self, datum): 52 | # TODO see if we want piecemeal deletion 53 | pass 54 | 55 | def update(self, *args, **kwargs): 56 | # TODO share code with create to ensure uniqueness etc 57 | pass 58 | 59 | @staticmethod 60 | def parse_test_data_obj(data_obj): 61 | """ 62 | Take the data object that comes in as JSON and turn it into a more 63 | useful object that actually references datum objects themselves 64 | """ 65 | def to_test_datum(data_obj): 66 | id = data_obj['datum_id'] 67 | datum = Datum.query.filter_by(id=id).first() 68 | if datum is None: 69 | msg = (f'You referred to datum with id "{id}" but it ' 70 | f'does not exist') 71 | raise ItemDoesNotExistException(msg, id) 72 | return { 73 | 'label': data_obj['label'], 74 | 'disposition': Disposition(data_obj['disposition']), 75 | 'datum': datum 76 | } 77 | 78 | return list(map(to_test_datum, data_obj)) 79 | 80 | @staticmethod 81 | def get_data_hash(data): 82 | """ 83 | This method creates a (hopefully unique) hash based on the combination 84 | of (a) label, (b) disposition, and (c) test datum id, for each bit 85 | of test data. It expects as input something that has gone through 86 | Test.parse_test_data_obj 87 | """ 88 | data.sort(key=lambda d: d['label']) 89 | 90 | def get_data_str(d): 91 | return f"{d['label']}:{d['disposition'].value}:{d['datum'].id}" 92 | 93 | data_str = ",".join(map(get_data_str, data)) 94 | return hashlib.sha1(data_str.encode('utf-8')).hexdigest() 95 | 96 | @staticmethod 97 | def ensure_uniqueness(data): 98 | hashed_data = Test.get_data_hash(data) 99 | existing_test = Test.query.filter_by(hashed_data=hashed_data).first() 100 | if existing_test is not None: 101 | msg = (f"The combination of test data, labels, and dispositions " 102 | f"used in this test already exists for test with id " 103 | f"{existing_test.id}") 104 | raise ItemExistsException(msg, existing_test.id) 105 | 106 | @staticmethod 107 | def create(*args, **kwargs): 108 | if len(kwargs.get('data', [])) == 0: 109 | raise Exception('Cannot create test without test data') 110 | 111 | data = kwargs['data'] 112 | labels = list(map(lambda d: d['label'], data)) 113 | if len(labels) != len(set(labels)): 114 | raise Exception('You have a duplicate test data label. You can only ' 115 | 'use each label once per test') 116 | 117 | data = Test.parse_test_data_obj(data) 118 | 119 | Test.ensure_uniqueness(data) 120 | 121 | del kwargs['data'] 122 | kwargs['hashed_data'] = Test.get_data_hash(data) 123 | test = Test(**kwargs) 124 | db.session.add(test) 125 | db.session.flush() 126 | for td_data in data: 127 | td = TestDatum(test_id=test.id, label=td_data['label'], 128 | disposition=td_data['disposition'], datum=td_data['datum']) 129 | db.session.add(td) 130 | db.session.commit() 131 | return test 132 | -------------------------------------------------------------------------------- /api/db/models/test_datum.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from .. import db 3 | from .base import BaseModel 4 | 5 | 6 | class Disposition(Enum): 7 | NEUTRAL = 'NEUTRAL' 8 | POSITIVE = 'POS' 9 | NEGATIVE = 'NEG' 10 | EDGE = 'EDGE' 11 | DESTRUCTIVE = 'DESTRUCTIVE' 12 | 13 | 14 | class TestDatum(BaseModel): 15 | __tablename__ = 'test_data' 16 | 17 | # fields 18 | test_id = db.Column(db.Integer, db.ForeignKey('tests.id'), nullable=False) 19 | datum_id = db.Column(db.Integer, db.ForeignKey('data.id'), nullable=False) 20 | label = db.Column(db.String(length=80), nullable=False) 21 | disposition = db.Column(db.Enum(Disposition), nullable=False) 22 | 23 | # relationships 24 | test = db.relationship('Test', back_populates='data', foreign_keys=[test_id]) 25 | datum = db.relationship('Datum', back_populates='tests', foreign_keys=[datum_id]) 26 | 27 | @staticmethod 28 | def ensure_uniqueness(test_id, label): 29 | old_td = TestDatum.query.filter_by(test_id=test_id, label=label).first() 30 | if old_td is not None: 31 | err = (f'Each datum within a test must have a unique label, and ' 32 | f'there is already a datum labeled "{label}" within this test') 33 | raise Exception(err) 34 | 35 | @staticmethod 36 | def create(*args, **kwargs): 37 | if kwargs.get('test') is not None: 38 | kwargs['test_id'] = kwargs['test'].id 39 | TestDatum.ensure_uniqueness(kwargs['test_id'], kwargs['label']) 40 | td = TestDatum(**kwargs) 41 | db.session.add(td) 42 | db.session.commit() 43 | -------------------------------------------------------------------------------- /api/db/models/user.py: -------------------------------------------------------------------------------- 1 | from werkzeug.security import generate_password_hash, check_password_hash 2 | from google.cloud import storage 3 | from .. import db 4 | from ...config import get_config 5 | from .base import BaseModel 6 | from . import EmailConfirmationToken 7 | from sqlalchemy import exc 8 | from libgravatar import Gravatar 9 | 10 | 11 | # TODO host a default no-avatar image 12 | DEFAULT_AV_URL = 'https://example.com/image.png' 13 | 14 | 15 | class User(BaseModel): 16 | __tablename__ = 'users' 17 | 18 | # fields 19 | username = db.Column(db.String(80), unique=True, nullable=False) 20 | email = db.Column(db.String(120), unique=True, nullable=False) 21 | password_hash = db.Column(db.String(128)) 22 | bio = db.Column(db.String(220), nullable=False, default="") 23 | has_avatar = db.Column(db.Boolean(), default=False) 24 | # TODO if has_avatar is false, maybe we cache the gravatar image url? 25 | is_admin = db.Column(db.Boolean(), default=False) 26 | is_email_confirmed = db.Column(db.Boolean(), default=False) 27 | 28 | private_fields = ['is_admin', 'is_email_confirmed', 'created_at', 'updated_at'] 29 | public_fields = ['username', 'avatar_url', 'bio'] 30 | 31 | # relationships 32 | ec_token = db.relationship('EmailConfirmationToken', back_populates='user', 33 | uselist=False) 34 | data = db.relationship('Datum', back_populates='author', uselist=True) 35 | actions = db.relationship('Action', back_populates='user', uselist=True) 36 | tests = db.relationship('Test', back_populates='author', uselist=True) 37 | 38 | def __repr__(self): 39 | return '' % self.username 40 | 41 | @property 42 | def avatar_url(self): 43 | if self.has_avatar: 44 | config = get_config() 45 | 46 | if config.AVATAR_STORAGE == 'local': 47 | return f'/users/{self.id}/avatar' 48 | 49 | return self.get_storage_blob().public_url 50 | 51 | # otherwise return a gravatar url 52 | av = Gravatar(self.email) 53 | return av.get_image(size=200, default=DEFAULT_AV_URL, 54 | filetype_extension=True, use_ssl=True) 55 | 56 | @property 57 | def avatar_file(self): 58 | return f'{self.id}.png' 59 | 60 | def set_password(self, password): 61 | self.password_hash = generate_password_hash(password) 62 | 63 | def check_password(self, password): 64 | return check_password_hash(self.password_hash, password) 65 | 66 | def has_access_to_field(self, user, field): 67 | # users always have access to their own data 68 | if user.id == self.id: 69 | return True 70 | 71 | return False 72 | 73 | def to_obj(self, *args, **kwargs): 74 | obj = super().to_obj(*args, **kwargs) 75 | obj['data'] = list(map(lambda d: d.to_obj(*args, **kwargs), self.data)) 76 | return obj 77 | 78 | def update_email(self, email, commit=False): 79 | if self.ec_token: 80 | db.session.delete(self.ec_token) 81 | token = EmailConfirmationToken() 82 | token.generate_token() 83 | self.email = email 84 | self.is_email_confirmed = False 85 | self.ec_token = token 86 | if commit: 87 | db.session.commit() 88 | 89 | def get_storage_blob(self): 90 | config = get_config() 91 | client = storage.Client() 92 | bucket = client.get_bucket(config.AVATAR_BUCKET) 93 | blob = bucket.blob(self.avatar_file) 94 | return blob 95 | 96 | @staticmethod 97 | def create(username, email, password, bio, is_admin=False): 98 | # TODO add avatar upload here? 99 | token = EmailConfirmationToken() 100 | token.generate_token() 101 | user = User(username=username, email=email, is_admin=is_admin, 102 | ec_token=token, bio=bio) 103 | user.set_password(password) 104 | db.session.add(user) 105 | db.session.add(token) 106 | try: 107 | db.session.commit() 108 | except exc.IntegrityError as e: 109 | db.session.rollback() 110 | raise e 111 | 112 | # TODO email confirmation process should actually send an e-mail at this point! 113 | return user 114 | -------------------------------------------------------------------------------- /api/handlers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdotai/opentestdata-api/d88474d59775c77264ba78166b5633937a45b85a/api/handlers/__init__.py -------------------------------------------------------------------------------- /api/handlers/auth.py: -------------------------------------------------------------------------------- 1 | import jwt 2 | from datetime import datetime, timedelta 3 | from werkzeug.exceptions import Unauthorized 4 | from ..db.models import User 5 | from ..config import get_config 6 | 7 | 8 | SECRET = get_config().JWT_SECRET 9 | JWT_ALG = 'HS256' 10 | 11 | 12 | def auth(admin=False): 13 | """Decorator that unwraps JWT token into an auth_user object""" 14 | 15 | def _auth(func): 16 | def wrapper(*args, **kwargs): 17 | if 'user' not in kwargs: 18 | return func(*args, **kwargs) 19 | 20 | auth_user = User.query.filter_by(id=kwargs['user']).first() 21 | 22 | if auth_user is None: 23 | raise Unauthorized('Could not load user %s for auth' % kwargs['user']) 24 | 25 | if admin and auth_user.is_admin is False: 26 | raise Unauthorized('Only an admin can perform that action') 27 | 28 | del kwargs['user'] 29 | del kwargs['token_info'] 30 | kwargs['auth_user'] = auth_user 31 | return func(*args, **kwargs) 32 | return wrapper 33 | return _auth 34 | 35 | 36 | def login(body): 37 | user = User.query.filter_by(username=body.get('username')).first() 38 | if user is None or user.check_password(body.get('password')) is False: 39 | return dict( 40 | error='Could not authenticate you with that username and password' 41 | ), 401 42 | 43 | token = jwt.encode({ 44 | 'iat': datetime.utcnow(), 45 | 'exp': datetime.utcnow() + timedelta(days=7), 46 | 'sub': user.id, 47 | }, SECRET, algorithm=JWT_ALG) 48 | 49 | return {'token': token.decode('utf-8'), 'user_id': user.id} 50 | 51 | 52 | def check_token(token): 53 | try: 54 | return jwt.decode(token, SECRET, algorithms=[JWT_ALG]) 55 | except Exception: 56 | raise Unauthorized() 57 | -------------------------------------------------------------------------------- /api/handlers/data.py: -------------------------------------------------------------------------------- 1 | from json import JSONDecodeError 2 | from ..db.models import Datum, DatumType, Action, ActionType, ItemExistsException 3 | from .auth import auth 4 | from .util import get_datum 5 | 6 | 7 | @auth() 8 | def create(auth_user, body): 9 | try: 10 | datum = Datum.create(author=auth_user, 11 | name=body.get('name'), 12 | type=DatumType(body.get('type')), 13 | value=body.get('value')) 14 | except JSONDecodeError: 15 | return dict(error='Could not decode JSON from the value field'), 500 16 | except ItemExistsException as e: 17 | return dict(error=str(e), datum_id=e.item_id), 500 18 | except Exception as e: 19 | err_msg = str(e) 20 | return dict(error=f'Could not add datum: {err_msg}'), 500 21 | 22 | Action.create(ActionType.CREATE_DATUM, auth_user, datum) 23 | return datum.to_obj(whos_asking=auth_user) 24 | 25 | 26 | @auth() 27 | @get_datum() 28 | def detail(datum, auth_user): 29 | Action.create(ActionType.LIST_DATUM, auth_user, datum) 30 | return datum.to_obj(whos_asking=auth_user) 31 | 32 | 33 | @auth() 34 | @get_datum() 35 | def update(datum, auth_user, body): 36 | if not auth_user.is_admin and auth_user.id != datum.user_id: 37 | return dict(error="You don't have permission to update that datum"), 401 38 | 39 | try: 40 | datum.update(name=body.get('name'), 41 | type=DatumType(body.get('type')), 42 | value=body.get('value')) 43 | except Exception as e: 44 | return dict( 45 | error=f'Could not update datum: {e}', 46 | datum_id=datum.id 47 | ), 500 48 | 49 | Action.create(ActionType.UPDATE_DATUM, auth_user, datum) 50 | return datum.to_obj(whos_asking=auth_user) 51 | -------------------------------------------------------------------------------- /api/handlers/general.py: -------------------------------------------------------------------------------- 1 | def ping(): 2 | return 'PONG' 3 | -------------------------------------------------------------------------------- /api/handlers/search.py: -------------------------------------------------------------------------------- 1 | from .auth import auth 2 | from ..db.models import Test, Datum, TestDatum, ChildDatum 3 | from sqlalchemy import or_ 4 | 5 | 6 | @auth() 7 | def search(auth_user, body): 8 | q = body.get('query') # at this point q is guaranteed to be 2+char string 9 | 10 | results = {'tests': [], 'data': []} 11 | 12 | seen_obj_ids = [] 13 | 14 | def check_obj_uniqueness(obj): 15 | if obj.id in seen_obj_ids: 16 | return False 17 | seen_obj_ids.append(obj.id) 18 | return True 19 | 20 | if body.get('type') in ['ALL', 'TESTS']: 21 | tests = Test.query.filter(Test.name.contains(q, autoescape=True)).all() 22 | tds = TestDatum.query.filter(TestDatum.label.contains(q, autoescape=True)).all() 23 | tests += list(map(lambda td: td.test, tds)) 24 | tests = list(filter(check_obj_uniqueness, tests)) 25 | 26 | results['tests'] = list(map(lambda test: test.to_obj(whos_asking=auth_user), tests)) 27 | 28 | seen_obj_ids = [] 29 | if body.get('type') in ['ALL', 'DATA']: 30 | data = Datum.query.filter(or_( 31 | Datum.name.contains(q, autoescape=True), 32 | Datum.value.contains(q, autoescape=True) 33 | )).all() 34 | cds = ChildDatum.query.filter(ChildDatum.name.contains(q, autoescape=True)).all() 35 | data += list(map(lambda cd: cd.parent, cds)) 36 | data = list(filter(check_obj_uniqueness, data)) 37 | 38 | results['data'] = list(map(lambda datum: datum.to_obj(whos_asking=auth_user), data)) 39 | 40 | return results 41 | -------------------------------------------------------------------------------- /api/handlers/tests.py: -------------------------------------------------------------------------------- 1 | from werkzeug.exceptions import BadRequest 2 | from ..db.models import (Test, Action, ActionType, ItemDoesNotExistException, 3 | ItemExistsException) 4 | from .auth import auth 5 | from .util import get_test 6 | 7 | 8 | @auth() 9 | def create(auth_user, body): 10 | try: 11 | test = Test.create(author=auth_user, 12 | name=body.get('name'), 13 | data=body.get('data')) 14 | except ItemDoesNotExistException as e: 15 | return dict(error=str(e), datum_id=e.item_id), 500 16 | except ItemExistsException as e: 17 | return dict(error=str(e), test_id=e.item_id), 500 18 | except Exception as e: 19 | err_msg = str(e) 20 | return dict(error=f'Could not add test: {err_msg}'), 500 21 | 22 | Action.create(ActionType.CREATE_TEST, auth_user, test) 23 | 24 | return test.to_obj(whos_asking=auth_user) 25 | 26 | 27 | @auth() 28 | @get_test() 29 | def detail(test, auth_user): 30 | Action.create(ActionType.LIST_TEST, auth_user, test) 31 | return test.to_obj(whos_asking=auth_user) 32 | -------------------------------------------------------------------------------- /api/handlers/users.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os 3 | import flask 4 | 5 | from werkzeug.exceptions import NotFound, Unauthorized, BadRequest, InternalServerError 6 | from sqlalchemy import exc 7 | from PIL import Image 8 | from io import BytesIO 9 | 10 | from ..db import db 11 | from ..config import get_config 12 | from ..db.models import User, EmailConfirmationToken, Action, ActionType 13 | from .auth import auth 14 | from .util import get_user 15 | 16 | 17 | def signup(body): 18 | try: 19 | user = User.create(body.get('username'), body.get('email'), 20 | body.get('password'), body.get('bio')) 21 | except exc.IntegrityError: 22 | return dict( 23 | error=f'Username or e-mail already exists', 24 | username=body.get('username'), 25 | email=body.get('email') 26 | ), 500 27 | 28 | user.ec_token.send_email(body.get('email_confirmation_url')) 29 | 30 | Action.create(ActionType.SIGNUP, user) 31 | return user.to_obj(whos_asking=user) 32 | 33 | 34 | def confirm_email(token): 35 | try: 36 | user = EmailConfirmationToken.confirm(token) 37 | Action.create(ActionType.CONFIRM_EMAIL, user) 38 | except Exception as e: 39 | raise NotFound(str(e)) 40 | 41 | return {'confirmed': True} 42 | 43 | 44 | @auth() 45 | def resend_confirmation(auth_user, body): 46 | auth_user.ec_token.send_email(body.get('email_confirmation_url')) 47 | Action.create(ActionType.RESEND_EMAIL_CONFIRM, auth_user) 48 | return {'success': True} 49 | 50 | 51 | @auth() 52 | @get_user() 53 | def detail(user, auth_user): 54 | Action.create(ActionType.LIST_USER, auth_user, user) 55 | return user.to_obj(whos_asking=auth_user) 56 | 57 | 58 | @auth() 59 | @get_user() 60 | def update(user, auth_user, body): 61 | if not auth_user.is_admin and user.id != auth_user.id: 62 | raise Unauthorized("You don't have permission to update that user") 63 | 64 | if body.get('email'): 65 | user.update_email(body.get('email')) 66 | if body.get('password'): 67 | user.set_password(body.get('password')) 68 | db.session.commit() 69 | 70 | Action.create(ActionType.UPDATE_USER, auth_user, user) 71 | return user.to_obj(whos_asking=auth_user) 72 | 73 | 74 | @auth(admin=True) 75 | @get_user() 76 | def promote(user, auth_user, body): 77 | user.is_admin = True 78 | db.session.commit() 79 | Action.create(ActionType.PROMOTE_USER, auth_user, user) 80 | return user.to_obj(whos_asking=auth_user) 81 | 82 | 83 | @auth() 84 | @get_user() 85 | def upload_avatar(user, auth_user, body): 86 | if user.id != auth_user.id: 87 | raise Unauthorized("You can't change the avatar of another user") 88 | 89 | mime_type = body.get('type') 90 | if mime_type not in ['image/png', 'image/gif', 'image/jpeg', 'image/jpg']: 91 | raise BadRequest(f'Only PNG, GIF, or JPG image types are supported. ' 92 | f'You tried to upload a {mime_type}') 93 | 94 | try: 95 | img = Image.open(BytesIO(base64.b64decode(body.get('data')))) 96 | except Exception: 97 | raise BadRequest('There was some error decoding your image data') 98 | 99 | png_info = {} 100 | if mime_type == 'image/png': 101 | png_info = img.info 102 | img.thumbnail((400, 400), Image.ANTIALIAS) 103 | new_img = BytesIO() 104 | img.save(new_img, format='PNG', **png_info) 105 | 106 | config = get_config() 107 | if config.AVATAR_STORAGE == 'local': 108 | if not os.path.exists(config.AVATAR_PATH): 109 | os.makedirs(config.AVATAR_PATH) 110 | 111 | img_path = os.path.join(config.AVATAR_PATH, user.avatar_file) 112 | with open(img_path, 'wb') as out: 113 | out.write(new_img.getvalue()) 114 | else: 115 | new_img.seek(0) 116 | blob = user.get_storage_blob() 117 | blob.upload_from_file(new_img) 118 | blob.make_public() 119 | 120 | if not user.has_avatar: 121 | user.has_avatar = True 122 | db.session.commit() 123 | 124 | Action.create(ActionType.UPLOAD_AVATAR, user) 125 | 126 | return user.avatar_url 127 | 128 | 129 | @get_user() 130 | def get_avatar(user): 131 | config = get_config() 132 | 133 | if config.AVATAR_STORAGE != 'local': 134 | raise Exception('Cannot get avatar data via this API in production') 135 | 136 | if not user.has_avatar: 137 | raise NotFound() 138 | 139 | if not os.path.exists(os.path.join(config.AVATAR_PATH, user.avatar_file)): 140 | raise NotFound() 141 | 142 | return flask.send_from_directory(config.AVATAR_PATH, user.avatar_file) 143 | -------------------------------------------------------------------------------- /api/handlers/util.py: -------------------------------------------------------------------------------- 1 | from werkzeug.exceptions import NotFound 2 | from ..db.models import User, Datum, Test 3 | 4 | 5 | def get_item(Klass, name='obj', param_name='id', filter_key='id'): 6 | """ 7 | Abstract decorator that finds an object by a filter and throws if it can't 8 | 9 | - Klass is the model class to use 10 | - name is the kwarg that the wrapped method will receive this object as 11 | - param_name is the API url parameter where we find the filter restriction 12 | - filter_key is the column name to use for the filter 13 | 14 | Example: 15 | @get_item(User, name='user', param_name='id', filter_key='id') 16 | def handler_method(user): 17 | # now we can do things with 'user' knowing it exists 18 | pass 19 | 20 | This decorator should probably remain abstract; see below for how we use it 21 | to make a more ergonomic @get_user decorator 22 | """ 23 | 24 | def _get_item(func): 25 | def wrapper(*args, **kwargs): 26 | if filter_key not in kwargs: 27 | raise Exception('Guarded handler for "%s" but it was not in params' % filter_key) 28 | 29 | filter_kwargs = {} 30 | filter_kwargs[filter_key] = kwargs[param_name] 31 | obj = Klass.query.filter_by(**filter_kwargs).first() 32 | 33 | if obj is None: 34 | raise NotFound('Could not find %s with %s %s' % (name, filter_key, kwargs[param_name])) 35 | 36 | del kwargs[param_name] 37 | kwargs[name] = obj 38 | return func(*args, **kwargs) 39 | return wrapper 40 | return _get_item 41 | 42 | 43 | def get_user(param_name='id', filter_key='id'): 44 | return get_item(User, 'user', param_name, filter_key) 45 | 46 | 47 | def get_datum(param_name='id', filter_key='id'): 48 | return get_item(Datum, 'datum', param_name, filter_key) 49 | 50 | 51 | def get_test(param_name='id', filter_key='id'): 52 | return get_item(Test, 'test', param_name, filter_key) 53 | -------------------------------------------------------------------------------- /api/mail.py: -------------------------------------------------------------------------------- 1 | from .config import get_config 2 | 3 | 4 | class Email(object): 5 | 6 | def __init__(self, to_addr, subject, body): 7 | self.to_addr = to_addr 8 | self.subject = subject 9 | self.body = body 10 | 11 | def send(self): 12 | config = get_config() 13 | from_addr = f'Open Test Data <{config.EMAIL_FROM}>' 14 | if config.EMAIL_SENDING == 'cloud': 15 | pass # TODO use sendgrid or mailjet, apparently 16 | 17 | # if we're not sending to cloud, just print locally so the dev can see 18 | # it if necessary 19 | print(f'''Sending fake e-mail: 20 | ----------------------- 21 | To: {self.to_addr} 22 | From: {from_addr} 23 | Subject: {self.subject} 24 | ----------------------- 25 | {self.body} 26 | -----------------------''') 27 | 28 | @staticmethod 29 | def send_confirmation_email(to_addr, confirmation_url, token): 30 | subject = 'Confirm your e-mail address for opentestdata.org' 31 | link = f'{confirmation_url}?token={token}' 32 | body = f'''Hi there, 33 | 34 | We're writing because you recently signed up using this e-mail address 35 | at opentestdata.org. Please confirm your address by clicking the link 36 | below! 37 | 38 | {link} 39 | 40 | Thanks, 41 | The Open Test Data team 42 | 43 | (Please do not reply to this e-mail as no one will read it.) 44 | ''' 45 | email = Email(to_addr, subject, body) 46 | email.send() 47 | -------------------------------------------------------------------------------- /api/main.py: -------------------------------------------------------------------------------- 1 | from .app import create_app 2 | 3 | app = create_app() 4 | -------------------------------------------------------------------------------- /api/openapi/api.yaml: -------------------------------------------------------------------------------- 1 | openapi: "3.0.0" 2 | 3 | info: 4 | version: 1.0.0 5 | title: OpenTestData 6 | license: 7 | name: MIT 8 | 9 | paths: 10 | /ping: 11 | $ref: 'base.yaml#/paths/Ping' 12 | /users: 13 | $ref: 'users.yaml#/paths/SignUp' 14 | /users/login: 15 | $ref: 'users.yaml#/paths/Login' 16 | /users/resend_confirmation_email: 17 | $ref: 'users.yaml#/paths/EmailResend' 18 | /users/confirm_email/{token}: 19 | $ref: 'users.yaml#/paths/EmailConfirm' 20 | /users/{id}: 21 | $ref: 'users.yaml#/paths/GetOrUpdateUser' 22 | /users/{id}/avatar: 23 | $ref: 'users.yaml#/paths/GetOrUpdateAvatar' 24 | /users/{id}/promote: 25 | $ref: 'users.yaml#/paths/PromoteUser' 26 | /data: 27 | $ref: 'data.yaml#/paths/NewDatum' 28 | /data/{id}: 29 | $ref: 'data.yaml#/paths/GetOrUpdateDatum' 30 | /tests: 31 | $ref: 'tests.yaml#/paths/NewTest' 32 | /tests/{id}: 33 | $ref: 'tests.yaml#/paths/GetTest' 34 | /search: 35 | $ref: 'search.yaml#/paths/Search' 36 | 37 | components: 38 | securitySchemes: 39 | jwt: 40 | type: http 41 | scheme: bearer 42 | bearerFormat: JWT 43 | x-bearerInfoFunc: api.handlers.auth.check_token 44 | -------------------------------------------------------------------------------- /api/openapi/base.yaml: -------------------------------------------------------------------------------- 1 | paths: 2 | Ping: 3 | get: 4 | summary: server responds with PONG 5 | operationId: api.handlers.general.ping 6 | responses: 7 | '200': 8 | $ref: 'base.yaml#/responses/PingSuccess' 9 | 10 | responses: 11 | PingSuccess: 12 | description: Successful ping 13 | content: 14 | application/json: 15 | schema: 16 | type: string 17 | Error: 18 | description: Error 19 | content: 20 | application/json: 21 | schema: 22 | type: object 23 | properties: 24 | error: 25 | type: string 26 | schemas: 27 | Base: 28 | properties: 29 | id: 30 | type: integer 31 | created_at: 32 | type: string 33 | format: date-time 34 | updated_at: 35 | type: string 36 | format: date-time 37 | -------------------------------------------------------------------------------- /api/openapi/data.yaml: -------------------------------------------------------------------------------- 1 | paths: 2 | NewDatum: 3 | post: 4 | summary: Create a new test datum 5 | operationId: api.handlers.data.create 6 | requestBody: 7 | $ref: '#/requests/DatumDetails' 8 | responses: 9 | '200': 10 | $ref: '#/responses/Datum' 11 | '500': 12 | $ref: 'base.yaml#/responses/Error' 13 | security: 14 | - jwt: [] 15 | GetOrUpdateDatum: 16 | get: 17 | summary: Get details of a datum 18 | parameters: 19 | $ref: '#/parameters/DatumId' 20 | operationId: api.handlers.data.detail 21 | responses: 22 | '200': 23 | $ref: '#/responses/Datum' 24 | '500': 25 | $ref: 'base.yaml#/responses/Error' 26 | security: 27 | - jwt: [] 28 | post: 29 | summary: Update a test datum 30 | operationId: api.handlers.data.update 31 | parameters: 32 | $ref: '#/parameters/DatumId' 33 | requestBody: 34 | $ref: '#/requests/DatumDetails' 35 | responses: 36 | '200': 37 | $ref: '#/responses/Datum' 38 | '500': 39 | $ref: '#/responses/DatumUpdateError' 40 | security: 41 | - jwt: [] 42 | 43 | parameters: 44 | DatumId: 45 | - in: path 46 | name: id 47 | required: true 48 | schema: 49 | type: integer 50 | minimum: 1 51 | description: The datum ID 52 | 53 | requests: 54 | DatumDetails: 55 | description: Datum details 56 | required: true 57 | content: 58 | application/json: 59 | schema: 60 | $ref: '#/schemas/DatumBase' 61 | 62 | responses: 63 | Datum: 64 | description: Datum details 65 | content: 66 | application/json: 67 | schema: 68 | $ref: '#/schemas/Datum' 69 | DatumUpdateError: 70 | description: Datum Update Error 71 | content: 72 | application/json: 73 | schema: 74 | type: object 75 | properties: 76 | datum_id: 77 | type: number 78 | error: 79 | type: string 80 | 81 | schemas: 82 | DatumBase: 83 | properties: 84 | name: 85 | type: string 86 | maxLength: 100 87 | minLength: 1 88 | type: 89 | type: string 90 | enum: 91 | - 'NULL' 92 | - BOOL 93 | - NUM 94 | - STR 95 | - OBJ 96 | value: 97 | type: string 98 | maxLength: 1024 99 | required: 100 | - name 101 | - type 102 | - value 103 | additionalProperties: false 104 | Datum: 105 | allOf: 106 | - $ref: '#/schemas/DatumBase' 107 | - $ref: 'base.yaml#/schemas/Base' 108 | - properties: 109 | user_id: 110 | type: integer 111 | -------------------------------------------------------------------------------- /api/openapi/search.yaml: -------------------------------------------------------------------------------- 1 | paths: 2 | Search: 3 | post: 4 | summary: Search for tests and data 5 | operationId: api.handlers.search.search 6 | requestBody: 7 | $ref: '#/requests/Search' 8 | responses: 9 | '200': 10 | $ref: '#/responses/SearchResults' 11 | security: 12 | - jwt: [] 13 | 14 | requests: 15 | Search: 16 | description: Search parameters 17 | required: true 18 | content: 19 | application/json: 20 | schema: 21 | $ref: '#/schemas/SearchQuery' 22 | 23 | responses: 24 | SearchResults: 25 | description: Search results 26 | content: 27 | application/json: 28 | schema: 29 | $ref: '#/schemas/SearchResults' 30 | 31 | schemas: 32 | SearchQuery: 33 | properties: 34 | type: 35 | type: string 36 | enum: 37 | - ALL 38 | - TESTS 39 | - DATA 40 | query: 41 | type: string 42 | minLength: 2 43 | SearchResults: 44 | properties: 45 | tests: 46 | type: array 47 | items: 48 | $ref: 'tests.yaml#/schemas/Test' 49 | data: 50 | type: array 51 | items: 52 | $ref: 'data.yaml#/schemas/Datum' 53 | -------------------------------------------------------------------------------- /api/openapi/tests.yaml: -------------------------------------------------------------------------------- 1 | paths: 2 | NewTest: 3 | post: 4 | summary: Create a new test 5 | operationId: api.handlers.tests.create 6 | requestBody: 7 | $ref: '#/requests/TestDetails' 8 | responses: 9 | '200': 10 | $ref: '#/responses/TestDetails' 11 | '500': 12 | $ref: 'base.yaml#/responses/Error' 13 | security: 14 | - jwt: [] 15 | GetTest: 16 | get: 17 | summary: Get details of a test 18 | parameters: 19 | $ref: '#/parameters/TestId' 20 | operationId: api.handlers.tests.detail 21 | responses: 22 | '200': 23 | $ref: '#/responses/TestDetails' 24 | '500': 25 | $ref: 'base.yaml#/responses/Error' 26 | security: 27 | - jwt: [] 28 | 29 | parameters: 30 | TestId: 31 | - in: path 32 | name: id 33 | required: true 34 | schema: 35 | type: integer 36 | minimum: 1 37 | description: The test ID 38 | 39 | requests: 40 | TestDetails: 41 | description: Test details 42 | required: true 43 | content: 44 | application/json: 45 | schema: 46 | $ref: '#/schemas/TestBase' 47 | 48 | responses: 49 | TestDetails: 50 | description: Test details 51 | content: 52 | application/json: 53 | schema: 54 | $ref: '#/schemas/Test' 55 | 56 | schemas: 57 | TestBase: 58 | properties: 59 | name: 60 | type: string 61 | maxLength: 160 62 | minLength: 1 63 | data: 64 | type: array 65 | items: 66 | $ref: '#/schemas/TestData' 67 | required: 68 | - name 69 | - data 70 | additionalProperties: false 71 | Test: 72 | allOf: 73 | - $ref: '#/schemas/TestBase' 74 | - $ref: 'base.yaml#/schemas/Base' 75 | - properties: 76 | user_id: 77 | type: integer 78 | TestData: 79 | properties: 80 | label: 81 | type: string 82 | maxLength: 80 83 | minLength: 1 84 | disposition: 85 | type: string 86 | enum: 87 | - NEUTRAL 88 | - POS 89 | - NEG 90 | - EDGE 91 | - DESTRUCTIVE 92 | datum_id: 93 | type: integer 94 | -------------------------------------------------------------------------------- /api/openapi/users.yaml: -------------------------------------------------------------------------------- 1 | paths: 2 | SignUp: 3 | post: 4 | summary: Create a new user 5 | operationId: api.handlers.users.signup 6 | requestBody: 7 | $ref: '#/requests/SignUp' 8 | responses: 9 | '200': 10 | $ref: '#/responses/SignUpSuccess' 11 | '500': 12 | $ref: '#/responses/SignUpError' 13 | Login: 14 | post: 15 | summary: Log in and retrieve an access token 16 | operationId: api.handlers.auth.login 17 | requestBody: 18 | $ref: '#/requests/Login' 19 | responses: 20 | '200': 21 | $ref: '#/responses/LoginSuccess' 22 | '500': 23 | $ref: 'base.yaml#/responses/Error' 24 | EmailResend: 25 | post: 26 | summary: Re-send the confirmation e-mail to the address associated with the logged in user 27 | operationId: api.handlers.users.resend_confirmation 28 | requestBody: 29 | $ref: '#/requests/EmailResend' 30 | responses: 31 | '200': 32 | $ref: '#/responses/EmailResendSuccess' 33 | '500': 34 | $ref: 'base.yaml#/responses/Error' 35 | security: 36 | - jwt: [] 37 | EmailConfirm: 38 | get: 39 | summary: Confirm an e-mail address 40 | operationId: api.handlers.users.confirm_email 41 | parameters: 42 | $ref: '#/parameters/EmailConfirm' 43 | responses: 44 | '200': 45 | $ref: '#/responses/EmailConfirmSuccess' 46 | '500': 47 | $ref: 'base.yaml#/responses/Error' 48 | GetOrUpdateUser: 49 | get: 50 | summary: Get details of a user 51 | operationId: api.handlers.users.detail 52 | parameters: 53 | $ref: '#/parameters/UserId' 54 | responses: 55 | '200': 56 | $ref: '#/responses/GetUserSuccess' 57 | '500': 58 | $ref: 'base.yaml#/responses/Error' 59 | security: 60 | - jwt: [] 61 | post: 62 | summary: Update details for a user 63 | operationId: api.handlers.users.update 64 | parameters: 65 | $ref: '#/parameters/UserId' 66 | requestBody: 67 | $ref: '#/requests/UpdateUser' 68 | responses: 69 | '200': 70 | $ref: '#/responses/UserDetails' 71 | '500': 72 | $ref: 'base.yaml#/responses/Error' 73 | security: 74 | - jwt: [] 75 | GetOrUpdateAvatar: 76 | get: 77 | summary: Get avatar image data (this API is only available in the development version of the server) 78 | operationId: api.handlers.users.get_avatar 79 | parameters: 80 | $ref: '#/parameters/UserId' 81 | responses: 82 | '200': 83 | $ref: '#/responses/Avatar' 84 | '500': 85 | $ref: 'base.yaml#/responses/Error' 86 | post: 87 | summary: Upload image data to be used as the user avatar 88 | operationId: api.handlers.users.upload_avatar 89 | parameters: 90 | $ref: '#/parameters/UserId' 91 | requestBody: 92 | $ref: '#/requests/UpdateAvatar' 93 | responses: 94 | '200': 95 | $ref: '#/responses/UpdateAvatar' 96 | '500': 97 | $ref: 'base.yaml#/responses/Error' 98 | security: 99 | - jwt: [] 100 | PromoteUser: 101 | post: 102 | summary: Promote a user to an admin (admin authorization required) 103 | operationId: api.handlers.users.promote 104 | parameters: 105 | $ref: '#/parameters/UserId' 106 | responses: 107 | '200': 108 | $ref: '#/responses/UserDetails' 109 | '500': 110 | $ref: 'base.yaml#/responses/Error' 111 | security: 112 | - jwt: [] 113 | 114 | parameters: 115 | EmailConfirm: 116 | - in: path 117 | name: token 118 | required: true 119 | schema: 120 | type: string 121 | description: The e-mail confirmation token 122 | UserId: 123 | - in: path 124 | name: id 125 | required: true 126 | schema: 127 | type: integer 128 | minimum: 1 129 | description: The user ID 130 | 131 | requests: 132 | SignUp: 133 | description: Data needed to create a new User 134 | required: true 135 | content: 136 | application/json: 137 | schema: 138 | $ref: '#/schemas/SignUp' 139 | Login: 140 | description: Login details 141 | required: true 142 | content: 143 | application/json: 144 | schema: 145 | $ref: '#/schemas/Login' 146 | EmailResend: 147 | description: Confirmation url to include in the confirmation e-mail 148 | required: true 149 | content: 150 | application/json: 151 | schema: 152 | $ref: '#/schemas/EmailResend' 153 | UpdateUser: 154 | description: User details 155 | required: true 156 | content: 157 | application/json: 158 | schema: 159 | $ref: '#/schemas/UpdateUser' 160 | UpdateAvatar: 161 | description: Image mime type and data 162 | required: true 163 | content: 164 | application/json: 165 | schema: 166 | properties: 167 | type: 168 | type: string 169 | data: 170 | type: string 171 | 172 | 173 | responses: 174 | SignUpSuccess: 175 | description: New User 176 | content: 177 | application/json: 178 | schema: 179 | type: object 180 | $ref: '#/schemas/User' 181 | SignUpError: 182 | description: New User Error 183 | content: 184 | application/json: 185 | schema: 186 | type: object 187 | properties: 188 | username: 189 | type: string 190 | email: 191 | type: string 192 | error: 193 | type: string 194 | LoginSuccess: 195 | description: Auth token 196 | content: 197 | application/json: 198 | schema: 199 | properties: 200 | token: 201 | type: string 202 | user_id: 203 | type: integer 204 | EmailResendSuccess: 205 | description: Successful resend 206 | content: 207 | application/json: 208 | schema: 209 | properties: 210 | success: 211 | type: boolean 212 | EmailConfirmSuccess: 213 | description: Auth token 214 | content: 215 | application/json: 216 | schema: 217 | properties: 218 | confirmed: 219 | type: boolean 220 | GetUserSuccess: 221 | description: User details 222 | content: 223 | application/json: 224 | schema: 225 | $ref: '#/schemas/User' 226 | UserDetails: 227 | description: User details 228 | content: 229 | application/json: 230 | schema: 231 | $ref: '#/schemas/User' 232 | Avatar: 233 | description: The raw avatar image data 234 | content: 235 | image/jpeg: 236 | schema: 237 | type: string 238 | format: binary 239 | UpdateAvatar: 240 | description: The URL for the newly-uploaded image 241 | content: 242 | application/json: 243 | schema: 244 | type: string 245 | 246 | schemas: 247 | SignUp: 248 | properties: 249 | username: 250 | type: string 251 | maxLength: 80 252 | minLength: 1 253 | email: 254 | type: string 255 | maxLength: 120 256 | minLength: 5 257 | password: 258 | type: string 259 | minLength: 8 260 | bio: 261 | type: string 262 | maxLength: 240 263 | email_confirmation_url: 264 | type: string 265 | required: 266 | - username 267 | - email 268 | - password 269 | - bio 270 | - email_confirmation_url 271 | additionalProperties: false 272 | Login: 273 | properties: 274 | username: 275 | type: string 276 | password: 277 | type: string 278 | required: 279 | - username 280 | - password 281 | additionalProperties: false 282 | User: 283 | allOf: 284 | - $ref: 'base.yaml#/schemas/Base' 285 | - properties: 286 | username: 287 | type: string 288 | maxLength: 80 289 | minLength: 1 290 | avatar_url: 291 | type: string 292 | bio: 293 | type: string 294 | data: 295 | type: array 296 | items: 297 | $ref: 'data.yaml#/schemas/Datum' 298 | EmailResend: 299 | properties: 300 | email_confirmation_url: 301 | type: string 302 | required: 303 | - email_confirmation_url 304 | additionalProperties: false 305 | UpdateUser: 306 | properties: 307 | email: 308 | type: string 309 | maxLength: 120 310 | minLength: 5 311 | password: 312 | type: string 313 | minLength: 8 314 | additionalProperties: false 315 | -------------------------------------------------------------------------------- /api/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdotai/opentestdata-api/d88474d59775c77264ba78166b5633937a45b85a/api/tests/__init__.py -------------------------------------------------------------------------------- /api/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import base64 3 | import pytest 4 | import logging 5 | from flask.testing import FlaskClient 6 | from ..app import create_app 7 | from ..config import APP_ROOT, get_config 8 | from ..db import db 9 | from .utils import (make_random_username, make_random_email, make_random_password, 10 | make_user, make_datum_dict, make_datum, make_random_bio, 11 | make_object_datum_dict, make_object_datum, make_test_dict, 12 | make_test) 13 | 14 | 15 | os.environ['FLASK_ENV'] = 'testing' 16 | logger = logging.getLogger() 17 | 18 | 19 | class JsonRespClient(FlaskClient): 20 | """More ergonomic test API client 21 | 22 | Basically traps calls to client.get, client.post, client.delete, and 23 | turns their JSON responses into actual python values, so we can assert 24 | based on responses easily. In case of a non-200 response, an exception is 25 | raised. If this behavior is not desired, pass raw=True. 26 | 27 | We achieve this by overriding the base 'open' method and inspecting the 28 | response. 29 | """ 30 | def open(self, *args, **kwargs): 31 | raw = False 32 | if 'raw' in kwargs: 33 | raw = True 34 | del kwargs['raw'] 35 | 36 | resp = super().open(*args, **kwargs) 37 | 38 | if not raw and kwargs['method'] in ['GET', 'POST', 'DELETE']: 39 | if resp.status != '200 OK': 40 | raise Exception('Got unexpected %s response for request. ' 41 | 'Body was: "%s"' % (resp.status, resp.data)) 42 | 43 | return resp.get_json() 44 | 45 | return resp 46 | 47 | 48 | @pytest.fixture(scope='package') 49 | def client(): 50 | """Fixture to clear and set up the database and init an API client""" 51 | 52 | app = create_app() 53 | app.test_client_class = JsonRespClient 54 | with app.test_client() as c: 55 | with app.app_context(): 56 | logger.info('Dropping all database tables') 57 | db.drop_all() 58 | logger.info('Recreating all database tables') 59 | db.create_all() 60 | yield c 61 | 62 | 63 | @pytest.fixture(scope='function') 64 | def username(): 65 | return make_random_username() 66 | 67 | 68 | @pytest.fixture(scope='function') 69 | def email(): 70 | return make_random_email() 71 | 72 | 73 | @pytest.fixture(scope='function') 74 | def password(): 75 | return make_random_password() 76 | 77 | 78 | @pytest.fixture(scope='function') 79 | def bio(): 80 | return make_random_bio() 81 | 82 | 83 | @pytest.fixture(scope='function') 84 | def user(): 85 | return make_user() 86 | 87 | 88 | @pytest.fixture(scope='function') 89 | def other_user(): 90 | return make_user() 91 | 92 | 93 | @pytest.fixture(scope='function') 94 | def admin_user(): 95 | return make_user(is_admin=True) 96 | 97 | 98 | @pytest.fixture(scope='function') 99 | def datum_dict(): 100 | return make_datum_dict() 101 | 102 | 103 | @pytest.fixture(scope='function') 104 | def datum(): 105 | return make_datum() 106 | 107 | 108 | @pytest.fixture(scope='function') 109 | def object_datum_dict(): 110 | return make_object_datum_dict() 111 | 112 | 113 | @pytest.fixture(scope='function') 114 | def object_datum(): 115 | return make_object_datum() 116 | 117 | 118 | @pytest.fixture(scope='package') 119 | def avatar_png_b64(): 120 | spidey_path = os.path.join(APP_ROOT, 'api', 'tests', 'fixtures', 'spidey.png') 121 | with open(spidey_path, 'rb') as spidey_img: 122 | return base64.b64encode(spidey_img.read()).decode('ascii') 123 | 124 | 125 | @pytest.fixture(scope='package') 126 | def avatar_post_b64(): 127 | spidey_path = os.path.join(APP_ROOT, 'api', 'tests', 'fixtures', 'spidey_post.png') 128 | with open(spidey_path, 'rb') as spidey_img: 129 | return base64.b64encode(spidey_img.read()).decode('ascii') 130 | 131 | 132 | @pytest.fixture(scope='package') 133 | def config(): 134 | return get_config() 135 | 136 | 137 | @pytest.fixture(scope='function') 138 | def test_dict(): 139 | return make_test_dict() 140 | 141 | 142 | @pytest.fixture(scope='function') 143 | def test(): 144 | return make_test() 145 | -------------------------------------------------------------------------------- /api/tests/fixtures/spidey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdotai/opentestdata-api/d88474d59775c77264ba78166b5633937a45b85a/api/tests/fixtures/spidey.png -------------------------------------------------------------------------------- /api/tests/fixtures/spidey_post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testdotai/opentestdata-api/d88474d59775c77264ba78166b5633937a45b85a/api/tests/fixtures/spidey_post.png -------------------------------------------------------------------------------- /api/tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | log_cli=false 3 | log_level=info 4 | -------------------------------------------------------------------------------- /api/tests/test_auth.py: -------------------------------------------------------------------------------- 1 | import jwt 2 | from datetime import datetime, timedelta 3 | from ..handlers.auth import JWT_ALG 4 | 5 | 6 | def test_no_access_without_token(client, user): 7 | resp = client.get('/users/%s' % user.id, raw=True) 8 | assert resp.get_json()['status'] == 401 9 | assert resp.get_json()['detail'] == 'No authorization token provided' 10 | 11 | 12 | def test_no_access_with_bad_token(client, user): 13 | bad_token = 'foo' 14 | resp = client.get('/users/%s' % user.id, raw=True, 15 | headers={'Authorization': 'Bearer %s' % bad_token}) 16 | assert resp.get_json()['status'] == 401 17 | assert 'wrong credentials' in resp.get_json()['detail'] 18 | 19 | 20 | def test_no_access_with_incorrectly_signed_token(client, user): 21 | bad_secret = 'secret' 22 | bad_token = jwt.encode({ 23 | 'iat': datetime.utcnow(), 24 | 'exp': datetime.utcnow() + timedelta(days=7), 25 | 'sub': user.id, 26 | }, bad_secret, algorithm=JWT_ALG) 27 | resp = client.get('/users/%s' % user.id, raw=True, 28 | headers={'Authorization': 'Bearer %s' % bad_token}) 29 | assert resp.get_json()['status'] == 401 30 | assert 'wrong credentials' in resp.get_json()['detail'] 31 | 32 | 33 | def test_login(client, user): 34 | resp = client.post('/users/login', json=dict( 35 | username=user.username, password=user.original_password 36 | )) 37 | 38 | assert 'token' in resp.keys() 39 | assert resp['user_id'] == user.id 40 | 41 | resp = client.get('/users/%s' % user.id, 42 | headers={'Authorization': 'Bearer %s' % resp['token']}) 43 | assert resp['id'] == user.id 44 | 45 | 46 | def test_login_fails_with_bad_username(client, user): 47 | resp = client.post('/users/login', raw=True, json=dict( 48 | username='foo', password=user.original_password 49 | )) 50 | assert resp.status_code == 401 51 | assert 'Could not authenticate' in resp.get_json()['error'] 52 | 53 | 54 | def test_login_fails_with_bad_password(client, user): 55 | resp = client.post('/users/login', raw=True, json=dict( 56 | username=user.username, password='foo' 57 | )) 58 | assert resp.status_code == 401 59 | assert 'Could not authenticate' in resp.get_json()['error'] 60 | -------------------------------------------------------------------------------- /api/tests/test_basic.py: -------------------------------------------------------------------------------- 1 | def test_server_ping(client): 2 | res = client.get('/ping') 3 | assert res == 'PONG' 4 | -------------------------------------------------------------------------------- /api/tests/test_data.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from ..db.models import Datum, Action, ActionType 4 | from .utils import make_datum, make_object_datum, make_object_datum_dict 5 | 6 | 7 | def test_can_create_datum(client, user, datum_dict): 8 | assert len(user.data) == 0 9 | resp = client.post('/data', headers=user.auth_headers, json=datum_dict) 10 | resp_data = dict( 11 | **datum_dict, 12 | user_id=user.id 13 | ) 14 | assert 'created_at' in resp 15 | assert 'id' in resp 16 | 17 | action = Action.query.filter_by(type=ActionType.CREATE_DATUM, user_id=user.id, obj1_id=resp['id']).first() 18 | assert action is not None 19 | 20 | del resp['created_at'] 21 | del resp['id'] 22 | assert resp == resp_data 23 | assert len(user.data) == 1 24 | 25 | 26 | def test_can_create_object_datum(client, user): 27 | child_datum_1 = make_datum() 28 | child_datum_2 = make_datum() 29 | child_datum_3 = make_datum() 30 | value = {'foo': child_datum_1.id, 'bar': child_datum_2.id, 'baz': child_datum_3.id} 31 | datum_dict = dict(name='compound datum', type='OBJ', value=json.dumps(value)) 32 | resp = client.post('/data', headers=user.auth_headers, json=datum_dict) 33 | datum = Datum.query.filter_by(id=resp['id']).first() 34 | children = map(lambda cd: cd.datum, datum.children) 35 | assert set(children) == set([child_datum_1, child_datum_2, child_datum_3]) 36 | assert resp['value']['foo'] == child_datum_1.to_obj(whos_asking=user) 37 | assert resp['value']['bar'] == child_datum_2.to_obj(whos_asking=user) 38 | assert resp['value']['baz'] == child_datum_3.to_obj(whos_asking=user) 39 | 40 | 41 | def test_create_datum_requires_login(client, datum_dict): 42 | resp = client.post('/data', json=datum_dict, raw=True) 43 | assert resp.status_code == 401 44 | assert 'authorization' in resp.get_json()['detail'] 45 | 46 | 47 | def test_cant_create_datum_without_required_fields(client, user, datum_dict): 48 | req_fields = ['name', 'type', 'value'] 49 | for req_field in req_fields: 50 | bad_input = dict(**datum_dict) 51 | del bad_input[req_field] 52 | resp = client.post('/data', headers=user.auth_headers, json=bad_input, raw=True) 53 | assert resp.status_code == 400 54 | assert req_field in resp.get_json()['detail'] 55 | assert user.data == [] 56 | 57 | 58 | def test_cant_create_datum_with_extra_fields(client, user, datum_dict): 59 | bad_input = dict(**datum_dict) 60 | bad_input['extra'] = 'foo' 61 | resp = client.post('/data', headers=user.auth_headers, json=bad_input, raw=True) 62 | assert resp.status_code == 400 63 | assert 'extra' in resp.get_json()['detail'] 64 | 65 | 66 | def test_cant_create_datum_with_bad_input(client, user, datum_dict): 67 | bad_input = dict(**datum_dict) 68 | bad_input['type'] = 'STRRR' 69 | resp = client.post('/data', headers=user.auth_headers, json=bad_input, raw=True) 70 | assert resp.status_code == 400 71 | assert 'STR' in resp.get_json()['detail'] 72 | 73 | 74 | def test_cant_create_datum_that_already_exists(client, user, datum_dict): 75 | assert len(user.data) == 0 76 | datum = client.post('/data', headers=user.auth_headers, json=datum_dict) 77 | assert len(user.data) == 1 78 | new_dict = dict(**datum_dict) 79 | new_dict['name'] = 'a new name' 80 | resp = client.post('/data', headers=user.auth_headers, json=new_dict, raw=True) 81 | assert resp.status_code == 500 82 | resp = resp.get_json() 83 | assert 'already' in resp['error'] 84 | assert resp['datum_id'] == datum['id'] 85 | assert len(user.data) == 1 86 | 87 | 88 | def test_cant_create_object_datum_that_already_exists(client, user): 89 | # make the first copy 90 | child_datum_1 = make_datum() 91 | child_datum_2 = make_datum() 92 | child_datum_3 = make_datum() 93 | value = {'foo': child_datum_1.id, 'bar': child_datum_2.id, 'baz': child_datum_3.id} 94 | datum_dict = dict(name='compound datum', type='OBJ', value=json.dumps(value)) 95 | resp1 = client.post('/data', headers=user.auth_headers, json=datum_dict) 96 | 97 | # make the second copy, with different names but same child data 98 | value = {'oof': child_datum_1.id, 'rab': child_datum_2.id, 'zab': child_datum_3.id} 99 | datum_dict = dict(name='mutad dnuopmoc', type='OBJ', value=json.dumps(value)) 100 | resp2 = client.post('/data', headers=user.auth_headers, json=datum_dict, raw=True) 101 | assert resp2.status_code == 500 102 | assert resp2.get_json()['datum_id'] == resp1['id'] 103 | 104 | 105 | def test_cant_create_object_datum_with_duplicate_children(client, user): 106 | child_datum_1 = make_datum() 107 | child_datum_2 = make_datum() 108 | value = {'foo': child_datum_1.id, 'bar': child_datum_2.id, 'baz': child_datum_2.id} 109 | datum_dict = dict(name='compound datum', type='OBJ', value=json.dumps(value)) 110 | resp = client.post('/data', headers=user.auth_headers, json=datum_dict, raw=True) 111 | assert resp.status_code == 500 112 | assert 'duplicate' in resp.get_json()['error'] 113 | 114 | 115 | def test_can_update_datum(client, datum): 116 | new_datum_dict = dict(name='new datum', type='NUM', value='500') 117 | updated_datum = client.post(f'/data/{datum.id}', headers=datum.author.auth_headers, 118 | json=new_datum_dict) 119 | new_datum_dict['id'] = datum.id 120 | new_datum_dict['user_id'] = datum.author.id 121 | del updated_datum['created_at'] 122 | assert updated_datum == new_datum_dict 123 | 124 | action = Action.query.filter_by(type=ActionType.UPDATE_DATUM, user_id=datum.author.id, obj1_id=datum.id).first() 125 | assert action is not None 126 | 127 | 128 | def test_cant_update_datum_thats_not_yours(client, datum, user, datum_dict): 129 | assert datum.name != datum_dict['name'] 130 | resp = client.post(f'/data/{datum.id}', headers=user.auth_headers, 131 | json=datum_dict, raw=True) 132 | assert resp.status_code == 401 133 | resp = resp.get_json() 134 | assert 'name' not in resp 135 | 136 | 137 | def test_cant_update_datum_with_bad_input(client, datum): 138 | new_datum_dict = dict(name='new datum', type='STRRR', value='500') 139 | resp = client.post(f'/data/{datum.id}', headers=datum.author.auth_headers, json=new_datum_dict, raw=True) 140 | assert resp.status_code == 400 141 | assert 'STR' in resp.get_json()['detail'] 142 | 143 | 144 | def test_can_update_datum_thats_not_yours_if_admin(client, datum, admin_user, datum_dict): 145 | new_datum_dict = dict(name='new datum from admin', type='NUM', value='501') 146 | updated_datum = client.post(f'/data/{datum.id}', headers=admin_user.auth_headers, 147 | json=new_datum_dict) 148 | new_datum_dict['id'] = datum.id 149 | new_datum_dict['user_id'] = datum.author.id 150 | del updated_datum['created_at'] 151 | assert updated_datum == new_datum_dict 152 | 153 | 154 | def test_can_update_object_datum(client, user, object_datum_dict): 155 | object_datum = make_object_datum(author=user) 156 | updated_datum = client.post(f'/data/{object_datum.id}', headers=user.auth_headers, 157 | json=object_datum_dict) 158 | updated_datum['value'].keys() == object_datum_dict.keys() 159 | updated_datum['value'].keys() == object_datum.object_value().keys() 160 | 161 | 162 | def _update_obj_datum_dict(odd, new_child_id): 163 | odd['value'] = json.loads(odd['value']) 164 | first_key = list(odd['value'].keys())[0] 165 | odd['value'][first_key] = new_child_id 166 | odd['value'] = json.dumps(odd['value']) 167 | 168 | 169 | def test_cant_update_datum_with_cyclical_children(client, user, object_datum_dict): 170 | object_datum = make_object_datum(author=user, object_datum_dict=object_datum_dict) 171 | 172 | # first test a 1-level cycle by setting one of the children in our update 173 | # dict to the same id as the updating object datum 174 | _update_obj_datum_dict(object_datum_dict, object_datum.id) 175 | resp = client.post(f'/data/{object_datum.id}', headers=user.auth_headers, 176 | json=object_datum_dict, raw=True) 177 | assert resp.status_code == 500 178 | assert 'cyclical' in resp.get_json()['error'] 179 | 180 | # now test a multi-level cycle by creating a new object datum which 181 | # makes a child out of the first. then try to update the first with the new 182 | # datum as its own child 183 | child_dict = make_object_datum_dict() 184 | _update_obj_datum_dict(child_dict, object_datum.id) 185 | child_datum = make_object_datum(author=user, object_datum_dict=child_dict) 186 | _update_obj_datum_dict(object_datum_dict, child_datum.id) 187 | resp = client.post(f'/data/{object_datum.id}', headers=user.auth_headers, 188 | json=object_datum_dict, raw=True) 189 | assert resp.status_code == 500 190 | assert 'cyclical' in resp.get_json()['error'] 191 | 192 | 193 | def test_cant_update_datum_with_cyclical_parent(client, user, object_datum_dict): 194 | object_datum = make_object_datum(author=user, object_datum_dict=object_datum_dict) 195 | parent_dict = make_object_datum_dict() 196 | _update_obj_datum_dict(parent_dict, object_datum.id) 197 | parent_datum = make_object_datum(author=user, object_datum_dict=parent_dict) 198 | _update_obj_datum_dict(object_datum_dict, parent_datum.id) 199 | 200 | resp = client.post(f'/data/{object_datum.id}', headers=user.auth_headers, 201 | json=object_datum_dict, raw=True) 202 | assert resp.status_code == 500 203 | assert 'cyclical' in resp.get_json()['error'] 204 | 205 | 206 | def test_can_get_datum_details(client, datum, user): 207 | resp = client.get(f'/data/{datum.id}', headers=user.auth_headers) 208 | expected = datum.to_obj(whos_asking=user) 209 | del expected['created_at'] 210 | del resp['created_at'] 211 | assert resp == expected 212 | 213 | action = Action.query.filter_by(type=ActionType.LIST_DATUM, user_id=user.id, obj1_id=datum.id).first() 214 | assert action is not None 215 | 216 | 217 | def test_cant_get_datum_that_doesnt_exist(client, user): 218 | resp = client.get(f'/data/999999', headers=user.auth_headers, raw=True) 219 | assert resp.status_code == 404 220 | -------------------------------------------------------------------------------- /api/tests/test_search.py: -------------------------------------------------------------------------------- 1 | from ..db import db 2 | from secrets import token_urlsafe 3 | from .utils import make_datum, make_object_datum, make_test 4 | 5 | 6 | def test_basic_search_of_all_objects(client, user): 7 | query = token_urlsafe(6) 8 | 9 | # find a datum with the query in its name 10 | d1 = make_datum() 11 | d1.name = f'{query} world' 12 | 13 | # or in its value 14 | d2 = make_datum() 15 | d2.value = f'asdf{query}asdf' 16 | 17 | # or in the name of one of its children 18 | d3 = make_object_datum() 19 | d3.children[0].name = f'{query} child datum' 20 | 21 | # and make one datum _not_ to find 22 | d4 = make_datum() 23 | 24 | # find a test with the query in its name 25 | t1 = make_test() 26 | t1.name = f'a basic {query} world test' 27 | 28 | # or in the label of one of its data 29 | t2 = make_test() 30 | t2.data[0].label = f'the {query} item' 31 | 32 | # and a test _not_ to find 33 | t3 = make_test() 34 | 35 | db.session.commit() 36 | 37 | resp = client.post('/search', headers=user.auth_headers, json=dict( 38 | type='ALL', query=query 39 | )) 40 | 41 | datum_ids = list(map(lambda d: d['id'], resp['data'])) 42 | test_ids = list(map(lambda t: t['id'], resp['tests'])) 43 | 44 | assert set(datum_ids) == set([d1.id, d2.id, d3.id]) 45 | assert set(test_ids) == set([t1.id, t2.id]) 46 | assert d4.id not in datum_ids 47 | assert t3.id not in test_ids 48 | 49 | 50 | def test_can_search_for_only_tests(client, user): 51 | query = token_urlsafe(6) 52 | d1 = make_datum() 53 | d1.name = f'{query} world' 54 | t1 = make_test() 55 | t1.name = f'a basic {query} world test' 56 | db.session.commit() 57 | resp = client.post('/search', headers=user.auth_headers, json=dict( 58 | type='TESTS', query=query 59 | )) 60 | test_ids = list(map(lambda t: t['id'], resp['tests'])) 61 | 62 | assert resp['data'] == [] 63 | assert set(test_ids) == set([t1.id]) 64 | 65 | 66 | def test_can_search_for_only_data(client, user): 67 | query = token_urlsafe(6) 68 | d1 = make_datum() 69 | d1.name = f'{query} world' 70 | t1 = make_test() 71 | t1.name = f'a basic {query} world test' 72 | db.session.commit() 73 | resp = client.post('/search', headers=user.auth_headers, json=dict( 74 | type='DATA', query=query 75 | )) 76 | datum_ids = list(map(lambda d: d['id'], resp['data'])) 77 | 78 | assert resp['tests'] == [] 79 | assert set(datum_ids) == set([d1.id]) 80 | 81 | 82 | def test_results_have_no_duplicate_objects(client, user): 83 | query = token_urlsafe(6) 84 | 85 | # or in the name of one of its children 86 | d1 = make_object_datum() 87 | d1.name = f'yet another {query}' 88 | d1.value = f'a {query} value' 89 | d1.children[0].name = f'{query} child datum' 90 | 91 | # find a test with the query in its name and a label 92 | t1 = make_test() 93 | t1.name = f'a basic {query} world test' 94 | t1.data[0].label = f'the {query} item' 95 | 96 | db.session.commit() 97 | 98 | resp = client.post('/search', headers=user.auth_headers, json=dict( 99 | type='ALL', query=query 100 | )) 101 | 102 | datum_ids = list(map(lambda d: d['id'], resp['data'])) 103 | test_ids = list(map(lambda t: t['id'], resp['tests'])) 104 | 105 | assert datum_ids == [d1.id] 106 | assert test_ids == [t1.id] 107 | 108 | 109 | def test_search_escapes_sql_match_chars(client, user): 110 | rand_str = token_urlsafe(6) 111 | query = f'{rand_str}%foo' 112 | no_match = f'{rand_str}.foo' 113 | d1 = make_datum() 114 | d1.name = f'{query} world' 115 | d2 = make_datum() 116 | d2.name = f'{no_match} world' 117 | db.session.commit() 118 | resp = client.post('/search', headers=user.auth_headers, json=dict( 119 | type='DATA', query=query 120 | )) 121 | datum_ids = list(map(lambda d: d['id'], resp['data'])) 122 | 123 | assert set(datum_ids) == set([d1.id]) 124 | 125 | 126 | def test_search_requires_search_string(client, user): 127 | query = '' 128 | resp = client.post('/search', headers=user.auth_headers, json=dict( 129 | type='ALL', query=query 130 | ), raw=True) 131 | assert resp.status_code == 400 132 | assert 'is too short' in resp.get_json()['detail'] 133 | 134 | # single char queries are also not valid 135 | query = 'a' 136 | resp = client.post('/search', headers=user.auth_headers, json=dict( 137 | type='ALL', query=query 138 | ), raw=True) 139 | assert resp.status_code == 400 140 | assert 'is too short' in resp.get_json()['detail'] 141 | 142 | 143 | def test_cant_search_for_nonexisting_types(client, user): 144 | query = token_urlsafe(6) 145 | resp = client.post('/search', headers=user.auth_headers, json=dict( 146 | type='FOO', query=query 147 | ), raw=True) 148 | assert resp.status_code == 400 149 | assert 'is not one of' in resp.get_json()['detail'] 150 | -------------------------------------------------------------------------------- /api/tests/test_tests.py: -------------------------------------------------------------------------------- 1 | from ..db.models import Datum, TestDatum, Action, ActionType 2 | 3 | 4 | def test_can_create_test(client, user, test_dict): 5 | assert len(user.tests) == 0 6 | resp = client.post('/tests', headers=user.auth_headers, json=test_dict) 7 | assert 'created_at' in resp 8 | assert 'id' in resp 9 | assert set(resp['data'].keys()) == set(map(lambda d: d['label'], test_dict['data'])) 10 | for key in resp['data'].keys(): 11 | datum_id = resp['data'][key]['id'] 12 | datum = Datum.query.filter_by(id=datum_id).first() 13 | test_datum = TestDatum.query.filter_by(test_id=resp['id'], datum_id=datum_id).first() 14 | assert datum.name == resp['data'][key]['name'] 15 | assert datum.type.value == resp['data'][key]['type'] 16 | assert datum.value == resp['data'][key]['value'] 17 | assert test_datum.disposition.value == resp['data'][key]['disposition'] 18 | assert len(user.tests) == 1 19 | 20 | action = Action.query.filter_by(type=ActionType.CREATE_TEST, user_id=user.id, obj1_id=resp['id']).first() 21 | assert action is not None 22 | 23 | 24 | def test_cant_create_test_with_duplicate_labels(client, user, test_dict): 25 | label_to_duplicate = list(map(lambda d: d['label'], test_dict['data']))[0] 26 | test_dict['data'].append({'label': label_to_duplicate}) 27 | resp = client.post('/tests', headers=user.auth_headers, json=test_dict, raw=True) 28 | assert resp.status_code == 500 29 | assert 'duplicate test data label' in resp.get_json()['error'] 30 | 31 | 32 | def test_cant_create_test_with_datum_that_doesnt_exist(client, user, test_dict): 33 | test_dict['data'].append({'label': 'foo', 'datum_id': 0}) 34 | resp = client.post('/tests', headers=user.auth_headers, json=test_dict, raw=True) 35 | assert resp.status_code == 500 36 | resp = resp.get_json() 37 | assert 'it does not exist' in resp['error'] 38 | assert resp['datum_id'] == 0 39 | 40 | 41 | def test_cant_create_duplicate_test(client, user, test_dict): 42 | client.post('/tests', headers=user.auth_headers, json=test_dict) 43 | resp = client.post('/tests', headers=user.auth_headers, json=test_dict, raw=True) 44 | assert resp.status_code == 500 45 | resp = resp.get_json() 46 | assert 'already exists' in resp['error'] 47 | assert resp['test_id'] is not None 48 | 49 | 50 | def test_can_get_test_details(client, user, test): 51 | resp = client.get(f'/tests/{test.id}', headers=user.auth_headers) 52 | assert resp['id'] == test.id 53 | 54 | action = Action.query.filter_by(type=ActionType.LIST_TEST, user_id=user.id, obj1_id=test.id).first() 55 | assert action is not None 56 | -------------------------------------------------------------------------------- /api/tests/test_users.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import base64 3 | import os 4 | from secrets import token_urlsafe 5 | from werkzeug.security import check_password_hash 6 | from datetime import datetime 7 | from ..db.models import User, Action, ActionType 8 | from .utils import make_random_email 9 | 10 | 11 | FAKE_CONFIRMATION_URL = 'https://fake.com/confirm' 12 | 13 | 14 | def test_signup(client, username, email, password, bio): 15 | cur_time = datetime.now() 16 | resp = client.post('/users', json=dict( 17 | username=username, email=email, password=password, bio=bio, 18 | email_confirmation_url=FAKE_CONFIRMATION_URL 19 | )) 20 | assert resp['id'] > 0 21 | 22 | # convert the time response and assert that the user was just created 23 | created_time = datetime.strptime(resp['created_at'], '%Y-%m-%dT%H:%M:%S') 24 | assert (created_time - cur_time).total_seconds() < 5 25 | assert resp['updated_at'] == resp['created_at'] 26 | assert resp['username'] == username 27 | assert resp['bio'] == bio 28 | assert resp['avatar_url'].startswith('https://secure.gravatar') 29 | assert resp['data'] == [] 30 | 31 | # assert that certain fields never show up 32 | assert 'password' not in resp.keys() 33 | assert 'email' not in resp.keys() 34 | assert 'password_hash' not in resp.keys() 35 | 36 | # but since we have DB access, actually verify the password was saved 37 | # correctly, and that no 'password' field exists on the user 38 | user = User.query.filter_by(id=resp['id']).first() 39 | assert pytest.raises(AttributeError, lambda: user.password) 40 | assert check_password_hash(user.password_hash, password) 41 | assert user.email == email 42 | assert user.bio == bio 43 | assert user.ec_token != "" 44 | assert user.is_email_confirmed is False 45 | assert user.is_admin is False 46 | 47 | # verify action was added 48 | action = Action.query.filter_by(type=ActionType.SIGNUP, user_id=user.id).first() 49 | assert action is not None 50 | 51 | # user can not sign up twice 52 | resp = client.post('/users', raw=True, json=dict( 53 | username=username, email=email, password=password, bio=bio, 54 | email_confirmation_url=FAKE_CONFIRMATION_URL 55 | )) 56 | assert resp.status_code == 500 57 | resp = resp.get_json() 58 | assert 'exists' in resp['error'] 59 | 60 | 61 | def test_cant_signup_with_existing_username(client, user, email, password, bio): 62 | resp = client.post('/users', raw=True, json=dict( 63 | username=user.username, 64 | email=email, 65 | password=password, 66 | bio=bio, 67 | email_confirmation_url=FAKE_CONFIRMATION_URL 68 | )) 69 | assert resp.status_code == 500 70 | resp = resp.get_json() 71 | assert 'already exists' in resp['error'] 72 | assert user.username == resp['username'] 73 | 74 | 75 | def test_cant_signup_with_existing_email(client, user, username, password, bio): 76 | resp = client.post('/users', raw=True, json=dict( 77 | username=username, 78 | email=user.email, 79 | password=password, 80 | bio=bio, 81 | email_confirmation_url=FAKE_CONFIRMATION_URL 82 | )) 83 | assert resp.status_code == 500 84 | resp = resp.get_json() 85 | assert 'already exists' in resp['error'] 86 | assert user.email == resp['email'] 87 | 88 | 89 | def test_signup_missing_fields(client, username, email, password): 90 | resp = client.post('/users', raw=True, json=dict()).get_json() 91 | assert resp['status'] == 400 92 | assert 'username' in resp['detail'] 93 | 94 | resp = client.post('/users', raw=True, json=dict( 95 | username=username 96 | )).get_json() 97 | assert resp['status'] == 400 98 | assert 'email' in resp['detail'] 99 | 100 | resp = client.post('/users', raw=True, json=dict( 101 | username=username, email=email 102 | )).get_json() 103 | assert resp['status'] == 400 104 | assert 'password' in resp['detail'] 105 | 106 | resp = client.post('/users', raw=True, json=dict( 107 | username=username, email=email, password=password 108 | )).get_json() 109 | assert resp['status'] == 400 110 | assert 'bio' in resp['detail'] 111 | 112 | 113 | def test_signup_long_username(client, email, password): 114 | username = token_urlsafe(100) 115 | resp = client.post('/users', raw=True, json=dict( 116 | username=username, email=email, password=password 117 | )).get_json() 118 | assert resp['status'] == 400 119 | assert 'too long' in resp['detail'] 120 | 121 | 122 | @pytest.mark.skip(reason="TODO") 123 | def test_signup_sql_injection_attack(client): 124 | pass 125 | 126 | 127 | def test_user_detail(client, user): 128 | resp = client.get('/users/%s' % user.id, headers=user.auth_headers) 129 | 130 | assert 'password' not in resp.keys() 131 | assert 'email' not in resp.keys() 132 | assert 'password_hash' not in resp.keys() 133 | 134 | assert resp['id'] == user.id 135 | assert resp['username'] == user.username 136 | 137 | # verify action was added 138 | action = Action.query.filter_by(type=ActionType.LIST_USER, user_id=user.id, obj1_id=user.id).first() 139 | assert action is not None 140 | 141 | 142 | def test_can_get_user_detail_for_other_user(client, user, other_user): 143 | resp = client.get('/users/%s' % user.id, headers=other_user.auth_headers) 144 | 145 | assert 'password' not in resp.keys() 146 | assert 'email' not in resp.keys() 147 | assert 'password_hash' not in resp.keys() 148 | 149 | assert resp['id'] == user.id 150 | assert resp['username'] == user.username 151 | 152 | # verify action was added 153 | action = Action.query.filter_by(type=ActionType.LIST_USER, user_id=other_user.id, obj1_id=user.id).first() 154 | assert action is not None 155 | 156 | 157 | def test_cant_get_user_that_doesnt_exist(client, user): 158 | resp = client.get('/users/10000000', headers=user.auth_headers, raw=True).get_json() 159 | assert resp['status'] == 404 160 | 161 | 162 | def test_user_confirm_email(client, user): 163 | assert user.is_email_confirmed is False 164 | resp = client.get('/users/confirm_email/%s' % user.ec_token.token) 165 | assert resp['confirmed'] 166 | assert user.is_email_confirmed is True 167 | assert user.ec_token is None 168 | 169 | # verify action was added 170 | action = Action.query.filter_by(type=ActionType.CONFIRM_EMAIL, user_id=user.id).first() 171 | assert action is not None 172 | 173 | 174 | def test_user_confirm_email_fails_bad_token(client, user): 175 | assert user.is_email_confirmed is False 176 | resp = client.get('/users/confirm_email/%s' % 'foobar', raw=True).get_json() 177 | assert resp['status'] == 404 178 | assert 'token' in resp['detail'] 179 | assert user.is_email_confirmed is False 180 | assert user.ec_token.token is not None 181 | 182 | 183 | def test_user_can_update_details(client, user): 184 | new_email = make_random_email() 185 | new_password = 'foobarbaz' 186 | 187 | user.is_email_confirmed = True 188 | assert user.email != new_email 189 | assert user.check_password(new_password) is False 190 | 191 | client.post('/users/%s' % user.id, headers=user.auth_headers, json=dict( 192 | email=new_email, password=new_password 193 | )) 194 | 195 | user = User.query.filter_by(id=user.id).first() 196 | assert user.email == new_email 197 | assert user.is_email_confirmed is False 198 | assert user.check_password(new_password) 199 | 200 | # verify action was added 201 | action = Action.query.filter_by(type=ActionType.UPDATE_USER, user_id=user.id, obj1_id=user.id).first() 202 | assert action is not None 203 | 204 | 205 | def test_user_cannot_update_details_for_other_user(client, user, other_user): 206 | new_email = make_random_email() 207 | new_password = 'foobarbaz' 208 | 209 | user.is_email_confirmed = True 210 | 211 | resp = client.post('/users/%s' % other_user.id, headers=user.auth_headers, raw=True, json=dict( 212 | email=new_email, password=new_password 213 | )).get_json() 214 | 215 | assert resp['status'] == 401 216 | assert 'permission' in resp['detail'] 217 | 218 | other_user = User.query.filter_by(id=other_user.id).first() 219 | assert user.email != new_email 220 | assert not user.check_password(new_password) 221 | 222 | # verify action was not added 223 | action = Action.query.filter_by(type=ActionType.UPDATE_USER, user_id=user.id, obj1_id=other_user.id).first() 224 | assert action is None 225 | 226 | 227 | def test_admin_can_update_details_for_other_user(client, admin_user, other_user): 228 | new_email = make_random_email() 229 | new_password = 'foobarbaz' 230 | 231 | other_user.is_email_confirmed = True 232 | 233 | client.post('/users/%s' % other_user.id, headers=admin_user.auth_headers, raw=True, json=dict( 234 | email=new_email, password=new_password 235 | )) 236 | 237 | user = User.query.filter_by(id=other_user.id).first() 238 | assert user.email == new_email 239 | assert user.is_email_confirmed is False 240 | assert user.check_password(new_password) 241 | 242 | # verify action was added 243 | action = Action.query.filter_by(type=ActionType.UPDATE_USER, user_id=admin_user.id, obj1_id=other_user.id).first() 244 | assert action is not None 245 | 246 | 247 | @pytest.mark.skip(reason="TODO") 248 | def test_user_can_update_one_field_at_a_time(): 249 | pass 250 | 251 | 252 | @pytest.mark.skip(reason="TODO") 253 | def test_user_cant_update_certain_fields(): 254 | # TODO like username etc 255 | pass 256 | 257 | 258 | def test_admin_can_promote_user(client, user, admin_user): 259 | assert not user.is_admin 260 | client.post('/users/%s/promote' % user.id, headers=admin_user.auth_headers) 261 | assert User.query.filter_by(id=user.id).first().is_admin 262 | 263 | # verify action was added 264 | action = Action.query.filter_by(type=ActionType.PROMOTE_USER, user_id=admin_user.id, obj1_id=user.id).first() 265 | assert action is not None 266 | 267 | 268 | def test_non_admin_cant_promote_user(client, user, other_user): 269 | assert not user.is_admin 270 | resp = client.post('/users/%s/promote' % user.id, headers=other_user.auth_headers, raw=True).get_json() 271 | assert resp['status'] == 401 272 | assert 'admin' in resp['detail'] 273 | assert not User.query.filter_by(id=user.id).first().is_admin 274 | 275 | 276 | def test_user_cant_promote_self(client, user): 277 | assert not user.is_admin 278 | resp = client.post('/users/%s/promote' % user.id, headers=user.auth_headers, raw=True).get_json() 279 | assert resp['status'] == 401 280 | assert 'admin' in resp['detail'] 281 | assert not User.query.filter_by(id=user.id).first().is_admin 282 | 283 | 284 | def test_upload_avatar(client, user, avatar_png_b64, avatar_post_b64, config): 285 | assert user.has_avatar is False 286 | resp = client.post('/users/%s/avatar' % user.id, headers=user.auth_headers, json=dict( 287 | data=avatar_png_b64, 288 | type='image/png' 289 | )) 290 | assert user.has_avatar is True 291 | assert resp == user.avatar_url 292 | img_on_disk = os.path.join(config.AVATAR_PATH, user.avatar_file) 293 | assert os.path.exists(img_on_disk) 294 | with open(img_on_disk, 'rb') as img: 295 | img_b64 = base64.b64encode(img.read()).decode('ascii') 296 | assert img_b64 == avatar_post_b64 297 | 298 | # verify action was added 299 | action = Action.query.filter_by(type=ActionType.UPLOAD_AVATAR, user_id=user.id).first() 300 | assert action is not None 301 | 302 | 303 | def test_cant_upload_avatar_for_other_user(client, user, other_user, avatar_png_b64, config): 304 | assert user.has_avatar is False 305 | resp = client.post('/users/%s/avatar' % user.id, headers=other_user.auth_headers, json=dict( 306 | data=avatar_png_b64, 307 | type='image/png' 308 | ), raw=True).get_json() 309 | assert resp['status'] == 401 310 | assert user.has_avatar is False 311 | 312 | 313 | def test_get_avatar_404s_when_no_avatar(client, user): 314 | resp = client.get('users/%s/avatar' % user.id, raw=True).get_json() 315 | assert resp['status'] == 404 316 | 317 | 318 | def test_get_avatar(client, user, avatar_png_b64, avatar_post_b64): 319 | assert user.has_avatar is False 320 | client.post('/users/%s/avatar' % user.id, headers=user.auth_headers, json=dict( 321 | data=avatar_png_b64, 322 | type='image/png' 323 | )) 324 | resp = client.get('/users/%s/avatar' % user.id, raw=True) 325 | assert resp.headers['Content-Type'] == 'image/png' 326 | img_b64 = base64.b64encode(resp.data).decode('ascii') 327 | assert img_b64 == avatar_post_b64 328 | 329 | 330 | def test_resend_confirmation_email(client, user): 331 | resp = client.post('users/resend_confirmation_email', json=dict( 332 | email_confirmation_url=FAKE_CONFIRMATION_URL 333 | ), headers=user.auth_headers) 334 | assert resp['success'] is True 335 | 336 | # verify action was added 337 | action = Action.query.filter_by(type=ActionType.RESEND_EMAIL_CONFIRM, user_id=user.id).first() 338 | assert action is not None 339 | -------------------------------------------------------------------------------- /api/tests/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import random 3 | 4 | from secrets import token_urlsafe 5 | from ..db.models import User, Datum, DatumType, Test 6 | from ..handlers.auth import login 7 | 8 | 9 | def make_random_username(): 10 | return f'test_user_{token_urlsafe(16)}' 11 | 12 | 13 | def make_random_password(): 14 | return token_urlsafe(16) 15 | 16 | 17 | def make_random_email(): 18 | return f'email_{token_urlsafe(16)}@test.com' 19 | 20 | 21 | def make_random_bio(): 22 | return f'I am a {token_urlsafe(8)} and work for {token_urlsafe(8)}' 23 | 24 | 25 | def make_user(username=None, email=None, password=None, bio=None, is_admin=False): 26 | if username is None: 27 | username = make_random_username() 28 | if email is None: 29 | email = make_random_email() 30 | if password is None: 31 | password = make_random_password() 32 | if bio is None: 33 | bio = make_random_bio() 34 | 35 | u = User.create(username, email, password, bio, is_admin) 36 | u.jwt_token = login({ 37 | 'username': u.username, 38 | 'password': password 39 | })['token'] 40 | u.auth_headers = {'Authorization': 'Bearer %s' % u.jwt_token} 41 | u.original_password = password 42 | return u 43 | 44 | 45 | def make_datum_dict(name=None, type=None, value=None): 46 | if type is None: 47 | type = 'STR' 48 | if value is None: 49 | value = token_urlsafe(8) 50 | if name is None: 51 | name = f'Random datum {value}' 52 | return dict(name=name, type=type, value=value) 53 | 54 | 55 | def make_datum(author=None, datum_dict=None): 56 | if datum_dict is None: 57 | datum_dict = make_datum_dict() 58 | if author is None: 59 | author = make_user() 60 | datum = Datum.create(author=author, 61 | value=datum_dict['value'], 62 | name=datum_dict['name'], 63 | type=DatumType(datum_dict['type'])) 64 | # repopulate auth fields on the author object 65 | datum.author = author 66 | return datum 67 | 68 | 69 | def make_object_datum_dict(author=None): 70 | key1 = token_urlsafe(8) 71 | key2 = token_urlsafe(8) 72 | key3 = token_urlsafe(8) 73 | child_datum_1 = make_datum(author=author) 74 | child_datum_2 = make_datum(author=author) 75 | child_datum_3 = make_datum(author=author) 76 | value = {} 77 | value[key1] = child_datum_1.id 78 | value[key2] = child_datum_2.id 79 | value[key3] = child_datum_3.id 80 | return dict(name=f'Random datum {token_urlsafe(8)}', 81 | type='OBJ', 82 | value=json.dumps(value)) 83 | 84 | 85 | def make_object_datum(author=None, object_datum_dict=None): 86 | if object_datum_dict is None: 87 | object_datum_dict = make_object_datum_dict() 88 | return make_datum(author=author, datum_dict=object_datum_dict) 89 | 90 | 91 | def make_test_dict(): 92 | dispositions = ['POS', 'NEG', 'EDGE', 'DESTRUCTIVE', 'NEUTRAL'] 93 | data = [ 94 | { 95 | 'label': token_urlsafe(8), 96 | 'datum_id': make_datum().id, 97 | 'disposition': random.choice(dispositions) 98 | }, 99 | { 100 | 'label': token_urlsafe(8), 101 | 'datum_id': make_datum().id, 102 | 'disposition': random.choice(dispositions) 103 | } 104 | ] 105 | return dict(name=f'Test {token_urlsafe(8)}', 106 | data=data) 107 | 108 | 109 | def make_test(author=None): 110 | if author is None: 111 | author = make_user() 112 | data_dict = make_test_dict() 113 | return Test.create(author=author, name=data_dict['name'], data=data_dict['data']) 114 | -------------------------------------------------------------------------------- /app.yaml: -------------------------------------------------------------------------------- 1 | runtime: python37 2 | entrypoint: gunicorn -b :$PORT api.main:app 3 | service: api 4 | includes: 5 | - app_secrets.yaml 6 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # This requirements file has been automatically generated from `Pipfile` with 3 | # `pipenv-to-requirements` 4 | # 5 | # 6 | # This has been done to maintain backward compatibility with tools and services 7 | # that do not support `Pipfile` yet. 8 | # 9 | # Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and 10 | # `Pipfile.lock` and then regenerate `requirements*.txt`. 11 | ################################################################################ 12 | 13 | atomicwrites==1.3.0 14 | attrs==19.1.0 15 | certifi==2019.6.16 16 | entrypoints==0.3 17 | flake8==3.7.8 18 | importlib-metadata==0.19 19 | mccabe==0.6.1 20 | more-itertools==7.2.0 21 | packaging==19.1 22 | pbr==5.4.1 23 | pipenv-to-requirements==0.8.1 24 | pipenv==2018.11.26 25 | pluggy==0.12.0 26 | py==1.8.0 27 | pycodestyle==2.5.0 28 | pyflakes==2.1.1 29 | pyparsing==2.4.2 30 | pytest==5.0.1 31 | pyyaml==5.1.2 32 | six==1.12.0 33 | virtualenv-clone==0.5.3 34 | virtualenv==16.7.2 35 | wcwidth==0.1.7 36 | zipp==0.5.2 37 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # This requirements file has been automatically generated from `Pipfile` with 3 | # `pipenv-to-requirements` 4 | # 5 | # 6 | # This has been done to maintain backward compatibility with tools and services 7 | # that do not support `Pipfile` yet. 8 | # 9 | # Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and 10 | # `Pipfile.lock` and then regenerate `requirements*.txt`. 11 | ################################################################################ 12 | 13 | alembic==1.0.11 14 | cachetools==3.1.1 15 | certifi==2019.6.16 16 | chardet==3.0.4 17 | click==7.0 18 | clickclick==1.2.2 19 | connexion==2.3.0 20 | flask-cors==3.0.8 21 | flask-migrate==2.5.2 22 | flask-sqlalchemy==2.4.0 23 | flask==1.1.1 24 | google-api-core==1.14.2 25 | google-auth==1.6.3 26 | google-cloud-core==1.0.3 27 | google-cloud-storage==1.17.0 28 | google-resumable-media==0.3.2 29 | googleapis-common-protos==1.6.0 30 | gunicorn==19.9.0 31 | idna==2.8 32 | inflection==0.3.1 33 | itsdangerous==1.1.0 34 | jinja2==2.10.1 35 | jsonschema==2.6.0 36 | libgravatar==0.2.3 37 | mako==1.0.14 38 | markupsafe==1.1.1 39 | mysqlclient==1.4.2.post1 40 | openapi-spec-validator==0.2.8 41 | pillow==6.1.0 42 | protobuf==3.9.0 43 | pyasn1-modules==0.2.5 44 | pyasn1==0.4.6 45 | pyjwt==1.7.1 46 | pymysql==0.9.3 47 | python-dateutil==2.8.0 48 | python-dotenv==0.10.3 49 | python-editor==1.0.4 50 | pytz==2019.2 51 | pyyaml==5.1.2 52 | requests==2.22.0 53 | rsa==4.0 54 | six==1.12.0 55 | sqlalchemy==1.3.6 56 | swagger-ui-bundle==0.0.5 57 | urllib3==1.25.3 58 | werkzeug==0.15.5 59 | --------------------------------------------------------------------------------