├── .gitignore ├── LICENSE ├── Makefile ├── Pipfile ├── README.md ├── docker-compose.yml ├── example.env ├── get_local_pool_access_token.sh ├── make-deploy-preflight ├── make-run-preflight ├── manage.py ├── package.json ├── preflight.py ├── pytest.ini ├── scheduled_commands.py ├── serverless.yml └── supportal ├── __init__.py ├── app ├── __init__.py ├── admin.py ├── apps.py ├── authentication_backend.py ├── common │ ├── __init__.py │ └── enums.py ├── migrations │ ├── 0001_initial.py │ └── __init__.py ├── models │ ├── __init__.py │ ├── api_key.py │ ├── base_model_mixin.py │ ├── email.py │ ├── person.py │ ├── user.py │ └── vol_prospect_models.py ├── permissions.py ├── serializers │ ├── __init__.py │ ├── person_serializer.py │ ├── user_serializers.py │ ├── vol_prospect_assignment_serializer.py │ └── vol_prospect_contact_event_serializer.py └── views │ ├── __init__.py │ ├── email_views.py │ ├── invite_views.py │ ├── pagination.py │ ├── person_views.py │ ├── user_views.py │ └── vol_prospect_views.py ├── conftest.py ├── docs.py ├── services ├── email_service.py ├── google_sheets_service.py └── mobilize_america.py ├── settings.py ├── shifter ├── __init__.py ├── apps.py ├── common │ ├── __init__.py │ └── error_codes.py ├── event_recommendation_strategies.py ├── management │ └── commands │ │ ├── import_mobilize_america_events.py │ │ ├── import_us_zip5s.py │ │ ├── move_zip5s_to_s3.py │ │ ├── retry_ma_events.py │ │ ├── update_prioritization.py │ │ └── update_prioritization_meta.py ├── migrations │ └── __init__.py ├── mobilize_america_helpers.py ├── models.py ├── serializers.py └── views.py ├── tests ├── __init__.py ├── app │ ├── management │ │ └── commands │ │ │ ├── test_email_users_with_expiring_assignments.py │ │ │ ├── test_expire_assignments.py │ │ │ ├── test_normalize_emails.py │ │ │ └── test_unskip_prospects.py │ ├── models │ │ ├── test_person.py │ │ ├── test_user.py │ │ └── test_vol_prospect_models.py │ ├── test_authentication_backend.py │ └── views │ │ ├── test_email_views.py │ │ ├── test_invite_views.py │ │ ├── test_person_views.py │ │ ├── test_user_views.py │ │ └── test_vol_prospect_views.py ├── services │ ├── test_mobilize_america.py │ └── test_sheets_service.py ├── shifter │ ├── management │ │ └── commands │ │ │ ├── test_import_mobilize_america_events.py │ │ │ ├── test_import_us_zip5s.py │ │ │ ├── test_retry_ma_events.py │ │ │ ├── test_update_prioritization.py │ │ │ ├── test_update_prioritization_meta.py │ │ │ └── us_10_test_zip5s.csv.gz │ ├── test_models.py │ └── test_views.py ├── test_throttles.py ├── test_urls.py └── utils.py ├── throttles.py ├── urls.py └── wsgi.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Other project settings 2 | node_modules 3 | .serverless 4 | 5 | # Mac 6 | .DS_Store 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # C extensions 14 | # *.so 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # VS Code 83 | .vscode 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # celery beat schedule file 89 | celerybeat-schedule 90 | 91 | # SageMath parsed files 92 | *.sage.py 93 | 94 | # Environments 95 | .env 96 | .venv 97 | env/ 98 | venv/ 99 | ENV/ 100 | env.bak/ 101 | venv.bak/ 102 | 103 | # Spyder project settings 104 | .spyderproject 105 | .spyproject 106 | 107 | # Rope project settings 108 | .ropeproject 109 | 110 | # mkdocs documentation 111 | /site 112 | 113 | # mypy 114 | .mypy_cache/ 115 | 116 | .idea 117 | *.iml 118 | 119 | db.sqlite3 120 | 121 | /supportal/dump.rdb 122 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Elizabeth-Warren 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Make Supportal 2 | 3 | STAGE?=dev 4 | INFRASTRUCTURE?=dev 5 | 6 | install: 7 | pipenv install 8 | 9 | install-gdal-ubuntu: 10 | sudo apt-get update -y 11 | sudo apt-get install -y libgdal-dev 12 | 13 | install-dev: 14 | pipenv install -d 15 | 16 | install-dev-ubuntu: install-gdal-ubuntu 17 | pipenv install -d 18 | 19 | test: install-dev 20 | pipenv run test 21 | 22 | install-deploy-dependencies: 23 | npm install 24 | 25 | create-domain: install-deploy-dependencies 26 | sls create_domain -s $(STAGE) --infrastructure $(INFRASTRUCTURE) 27 | 28 | deploy-preflight: install-deploy-dependencies 29 | (export STAGE=$(STAGE) && export INFRASTRUCTURE=$(INFRASTRUCTURE) && ./make-deploy-preflight) 30 | 31 | run-preflight: deploy-preflight 32 | (export STAGE=$(STAGE) && export INFRASTRUCTURE=$(INFRASTRUCTURE) && ./make-run-preflight) 33 | 34 | deploy: install-deploy-dependencies create-domain 35 | sls deploy -s $(STAGE) --infrastructure $(INFRASTRUCTURE) 36 | 37 | deploy-with-preflight: install-deploy-dependencies create-domain run-preflight 38 | sls deploy -s $(STAGE) --infrastructure $(INFRASTRUCTURE) 39 | # Run the migrations again. Because migrations are idempotent it shouldn't be 40 | # a problem to just run them again after a deploy. This "re-run" ensures that if 41 | # this is your first time standing up the edge stage, the database does get 42 | # migrated 43 | sls wsgi manage -c "migrate" -s $(STAGE) --infrastructure $(INFRASTRUCTURE) 44 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | ipython = "*" 8 | isort = "*" 9 | model-bakery = "*" 10 | nplusone = "*" 11 | pytest-django = "*" 12 | pytest-mock = "*" 13 | responses = "*" 14 | pylint = "*" 15 | freezegun = "==0.3.12" 16 | zappa = "==0.48.2" 17 | moto="*" 18 | pytest-env = "*" 19 | 20 | [packages] 21 | boto3 = "==1.9.177" 22 | cryptography = "==2.8" 23 | django = "==2.2.18" 24 | django-cors-headers = "==3.1.1" 25 | django-enumfields = "==1.0.0" 26 | django-filter = "==2.2.0" 27 | django-localflavor = "==2.2" 28 | django-phonenumber-field = "==3.0.1" 29 | django-s3-storage = "==0.12.5" 30 | djangorestframework = "==3.10.3" 31 | # drf-yasg could be replaced with drf's internal openapi schema generator 32 | # when we bump to the next version. We are currently seeing this issue: 33 | # https://github.com/encode/django-rest-framework/issues/6941 34 | drf-yasg = "==1.17.0" 35 | google-auth-oauthlib = "*" 36 | psycopg2-binary = "==2.8.4" 37 | pygsheets = "*" 38 | pyjwt = "==1.7.1" 39 | pytz = "==2019.3" 40 | werkzeug = "==0.16.0" 41 | django-redis = "==4.11.0" 42 | zipcodes = "*" 43 | 44 | [requires] 45 | python_version = "3.7" 46 | 47 | [scripts] 48 | migrate="python manage.py migrate" 49 | server="python manage.py runserver" 50 | shell="python manage.py shell -i ipython" 51 | test="pytest" 52 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Open Source Notes 2 | This is our code for a supporter portal (aka supportal)-- we stored a product called Switchboard and an internal shifting API in this code base. We have removed some of the migrations, the tests and many of the scheduled commands in order to open source this project. This code is not intended to be cloned and re-reun ouot of the box(as it would require a little work-- we rely on some shared code libraries that are not included). I've left the set up steps, and SLS documentation for posterity in hopes that they might be useful! 3 | 4 | If you wanted to set things up you would need to implement a few things that we implemented in common code: 5 | 6 | - an EmailService that sends email details of the required class are in the file services/email_service.py 7 | - Telemetry/Metric (an optional logging service) 8 | - get_env_var to get the environment variables 9 | - geocode to take an address and return a lat/long 10 | - extract_phone_number_e164 to take a phone number and get a properly formatted one 11 | - extract_postal_code to take a zip5 string and get the right postal code 12 | 13 | 14 | ### Set up 15 | Pipenv automatically loads env vars from a .env file. This is ignored by git to allow 16 | you to put secrets in it. For now, we don't require any secrets for local dev, so just 17 | copy the example file: 18 | 19 | ```bash 20 | cp example.env .env 21 | ``` 22 | 23 | Add security credentials to your AWS account 24 | by going to the AWS console's [security credentials section](https://console.aws.amazon.com/iam/home?region=us-east-1#/security_credentials) and include the following in your .env file: 25 | ```bash 26 | - AWS_ACCESS_KEY_ID=dummy-access-key 27 | - AWS_SECRET_ACCESS_KEY=dummy-access-key-secret 28 | - AWS_DEFAULT_REGION=us-east-1 29 | ``` 30 | 31 | You will need to also install postgres, openssl, GDAL and pipenv to run: 32 | 33 | ```bash 34 | brew install psycopg2 35 | brew install openssl 36 | pip3 install pipenv 37 | brew install GDAL 38 | ``` 39 | 40 | In addition you will probably need to include openssl cofigurations: 41 | ```bash 42 | export LDFLAGS="-L/usr/local/opt/openssl/lib" 43 | export CPPFLAGS="-I/usr/local/opt/openssl/include" 44 | ``` 45 | 46 | ### Running Locally 47 | 48 | Start mysql: 49 | ```bash 50 | docker-compose up -d 51 | ``` 52 | 53 | You can run any `manage` command using pipenv: `pipenv run python manage.py ` 54 | For convenience, we define pipenv scripts for the most common operations: 55 | 56 | ```bash 57 | pipenv run migrate 58 | pipenv run server 59 | pipenv run test 60 | ``` 61 | 62 | ## Deploying with SLS 63 | Most folks shouldn't really need to do this after we get CI/CD working, 64 | 65 | ### Installation 66 | 67 | 1. Install nodejs 68 | 69 | ➜ supportal git:(master) ✗ node --version 70 | v12.13.1 71 | 72 | 2. Install [serverless](https://serverless.com/) globally 73 | 74 | ➜ supportal git:(master) ✗ npm i -g serverless 75 | 76 | ➜ supportal git:(master) ✗ sls --version 77 | Framework Core: 1.58.0 78 | Plugin: 3.2.5 79 | SDK: 2.2.1 80 | Components Core: 1.1.2 81 | Components CLI: 1.4.0 82 | 83 | 3. Install our `sls` dependencies 84 | 85 | ➜ supportal git:(master) ✗ pwd 86 | /Users/peterstein/source/tc/supportal 87 | 88 | ➜ supportal git:(master) ✗ npm i 89 | 90 | ### Doing Deeds 91 | 92 | To run a deploy 93 | 94 | ➜ supportal git:(master) ✗ sls deploy --stage dev --infrastructure dev 95 | 96 | That command will deploy the supportal application stack in our dev environment. The supportal stack in `sls` consists of: 97 | 98 | 1. _The Server Lambda_. This is the one that actually runs the API 99 | 2. _The "Expire Assignments" Lambda_. This one runs once an hour to expire assignments 100 | 3. _The Preflight Lambda_. This one actually doesn't respond to any events. Instead we can deploy separately and invoke any commands that we need to run before deploying the application. Right now, it runs migrations. In general, only our CI/CD infrastructure will care about it. 101 | 102 | To deploy to prod 103 | 104 | ➜ supportal git:(master) ✗ sls deploy --stage prod --infrastructure prod 105 | 106 | To invoke a function remotely 107 | 108 | ➜ supportal git:(master) ✗ sls invoke -f expire -s dev 109 | 110 | To invoke a django management command remotely 111 | 112 | ➜ supportal git:(master) ✗ sls wsgi -s dev manage -c "check --list-tags" 113 | admin 114 | caches 115 | compatibility 116 | database 117 | models 118 | staticfiles 119 | templates 120 | translation 121 | urls 122 | 123 | To invoke arbitrary python remotely 124 | 125 | sls wsgi -s dev exec -c "from supportal.app.models.person import Person; print(Person.objects.count())" 126 | 166595 127 | You'll need to create a superuser for your account in the shell and make sure the user 128 | has a corresponding APIKey. 129 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.1' 2 | 3 | services: 4 | 5 | db: 6 | # Use 10.7 for compatibility with Aurora Serverless Postgres 7 | image: mdillon/postgis:10 8 | restart: always 9 | ports: 10 | - 5432:5432 11 | environment: 12 | POSTGRES_USER: django 13 | POSTGRES_PASSWORD: password 14 | POSTGRES_DB: supportal 15 | volumes: 16 | - pgdata:/var/lib/postgresql/data 17 | redis: 18 | image: redis:5.0.5 19 | ports: 20 | - 6379:6379 21 | 22 | volumes: 23 | pgdata: {} 24 | -------------------------------------------------------------------------------- /example.env: -------------------------------------------------------------------------------- 1 | POSTGRES_HOST=127.0.0.1 2 | POSTGRES_USER=django 3 | POSTGRES_PASSWORD=password 4 | DJANGO_DEBUG=1 5 | DJANGO_SECRET_KEY=1234suchsecretwow 6 | DJANGO_ADMIN_ENABLED=1 7 | DJANGO_ADMIN_ONLY=0 8 | COGNITO_USER_LOGIN_CLIENT_ID= 9 | COGNITO_USER_POOL= 10 | # Go to AWS to get the full secret 11 | COGNITO_LOCAL_USER_POOL_CLIENT_SECRET=1ivfu 12 | SHARED_REDIS_HOST=127.0.0.1 13 | TELEMETRY_DISABLE=1 14 | 15 | # To run shifter locally against our staging instance of MA: 16 | MOBILIZE_AMERICA_BASE_URL=https://staging-api.mobilize.us/v1 17 | MOBILIZE_AMERICA_ORG_ID= 18 | MOBILIZE_AMERICA_DEFAULT_VISIBILITY=PRIVATE 19 | MOBILIZE_AMERICA_API_KEY=ssm:/shared/mobilizeamerica/api_key_org_id_95 20 | -------------------------------------------------------------------------------- /get_local_pool_access_token.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | source .env 5 | curl -XPOST -H "Content-Type: application/x-www-form-urlencoded" \ 6 | -u :${COGNITO_LOCAL_USER_POOL_CLIENT_SECRET} \ 7 | https://ew-auth-local.auth.us-east-1.amazoncognito.com/oauth2/token \ 8 | --data-urlencode grant_type=client_credentials \ 9 | --data-urlencode client_id= 10 | -------------------------------------------------------------------------------- /make-deploy-preflight: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -uo pipefail 3 | 4 | if ! sls deploy list functions -f preflight -s $STAGE --infrastructure $INFRASTRUCTURE; then 5 | echo "Preflight is not available, so we cannot replace the function. Skipping..." 6 | else 7 | sls deploy function -f preflight -s $STAGE --infrastructure $INFRASTRUCTURE 8 | fi 9 | # DO NOT ADD ANYTHING TO THIS SCRIPT 10 | -------------------------------------------------------------------------------- /make-run-preflight: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -uo pipefail 3 | 4 | if ! sls deploy list functions -f preflight -s $STAGE --infrastructure $INFRASTRUCTURE; then 5 | echo "Preflight is not available, so we cannot replace the function. Skipping..." 6 | else 7 | sls invoke -f preflight -s $STAGE --infrastructure $INFRASTRUCTURE 8 | fi 9 | # DO NOT ADD ANYTHING TO THIS SCRIPT 10 | -------------------------------------------------------------------------------- /manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Django's command-line utility for administrative tasks.""" 3 | import os 4 | import sys 5 | 6 | 7 | def main(): 8 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "supportal.settings") 9 | try: 10 | from django.core.management import execute_from_command_line 11 | except ImportError as exc: 12 | raise ImportError( 13 | "Couldn't import Django. Are you sure it's installed and " 14 | "available on your PYTHONPATH environment variable? Did you " 15 | "forget to activate a virtual environment?" 16 | ) from exc 17 | execute_from_command_line(sys.argv) 18 | 19 | 20 | if __name__ == "__main__": 21 | main() 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "supportal-sls", 3 | "description": "", 4 | "version": "0.1.0", 5 | "dependencies": {}, 6 | "devDependencies": { 7 | "serverless": "^1.60.5", 8 | "serverless-associate-waf": "^1.1.1", 9 | "serverless-domain-manager": "^3.3.0", 10 | "serverless-plugin-aws-alerts": "^1.4.0", 11 | "serverless-python-requirements": "^5.0.1", 12 | "serverless-wsgi": "^1.7.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /preflight.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # pylint: skip-file 4 | # 5 | # We use this to run any preflight logic 6 | # 7 | 8 | import supportal.wsgi # isort:skip 9 | from django.core import management # isort:skip 10 | 11 | 12 | def handle(event, context): 13 | management.call_command("migrate") 14 | 15 | 16 | if __name__ == "__main__": 17 | handle(None, None) 18 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | env = 3 | CLIENT_ID_USER_LOGIN_CLIENT_ID=1234authcodeflow 4 | COGNITO_USER_LOGIN_CLIENT_ID=notarealclientid 5 | COGNITO_USER_POOL=fake_pool 6 | COGNITO_USER_POOL_URL=http://localhost:8000/fake_pool 7 | DJANGO_DEBUG=1 8 | DJANGO_SECRET_KEY=testsecret 9 | POSTGRES_HOST=127.0.0.1 10 | POSTGRES_PASSWORD=password 11 | POSTGRES_USER=django 12 | SHARED_REDIS_HOST=127.0.0.1 13 | SUPPORTAL_BASE_URL=https://localhost:8000 14 | MOBILIZE_AMERICA_BASE_URL=https://localhost:8000/mobilize/v1/ 15 | MOBILIZE_AMERICA_ORG_ID=1 16 | MOBILIZE_AMERICA_API_KEY=fakefakefake 17 | MOBILIZE_AMERICA_DEFAULT_VISIBILITY=PUBLIC 18 | TELEMETRY_DISABLE=1 19 | SUPPORTAL_VERIFY_USERNAME=username 20 | SUPPORTAL_VERIFY_PASSWORD=password 21 | DJANGO_SETTINGS_MODULE=supportal.settings 22 | filterwarnings = 23 | ignore::DeprecationWarning 24 | ignore::PendingDeprecationWarning 25 | -------------------------------------------------------------------------------- /scheduled_commands.py: -------------------------------------------------------------------------------- 1 | # pylint: skip-file 2 | # 3 | # We use this to run the expire handler 4 | # 5 | 6 | import supportal.wsgi # isort:skip 7 | from django.core import management # isort:skip 8 | 9 | # from ew_common.telemetry import telemetry # isort:skip 10 | 11 | 12 | # @telemetry.timed 13 | # @telemetry.report_exceptions(raise_exception=False) # suppress retries 14 | def import_mobilize_america_events(*args, **kwargs): 15 | management.call_command("import_mobilize_america_events", **kwargs) 16 | 17 | # @telemetry.timed 18 | # @telemetry.report_exceptions # allow this to retry 19 | def expire_assignments(event, context): 20 | management.call_command("expire_assignments") 21 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | # supportal 2 | 3 | service: supportal 4 | 5 | frameworkVersion: ">=1.1.0 <2.0.0" 6 | 7 | custom: 8 | stage: ${opt:stage, env:STAGE, "dev"} 9 | infrastructure: ${opt:infrastructure, env:INFRASTRUCTURE, "dev"} 10 | projectRoot: ${env:GITHUB_WORKSPACE, "${env:PWD}/../"} 11 | 12 | wsgi: 13 | app: supportal.wsgi.application 14 | 15 | pythonRequirements: 16 | dockerizePip: true 17 | slim: true 18 | useDownloadCache: false 19 | useStaticCache: false 20 | # We created these by hand 21 | cognito: 22 | dev: 23 | arn: xxxx 24 | name: xxxx 25 | clientId: xxxx 26 | prod: 27 | arn: xxxx 28 | name: xxxx 29 | clientId: xxxx 30 | baseUrls: 31 | dev: "https://staging--ew-switchboard.netlify.com/" 32 | prod: "https://switchboard.elizabethwarren.com" 33 | 34 | # This public layer is maintained by development seed 35 | layers: 36 | - arn:aws:lambda:us-east-1:552188055668:layer:geolambda:1 37 | 38 | deploy: 39 | # Users can define these parameters in SSM (they will be looked up at deploy time) 40 | # or in the deploy environment or inline 41 | bucket_name: ${ssm:${BUCKET NAME~true} 42 | bucket_arn: ${ssm:${BUCKET ARN~true} 43 | 44 | vpcConfig: 45 | subnetIds: 46 | # Users can define these parameters in SSM (they will be looked up at deploy time) 47 | # or in the deploy environment or inline 48 | - "${ssm:${SUBNET ID}~true}" 49 | ... 50 | securityGroupIds: 51 | # Users can define this parameter in SSM (it will be looked up at deploy time) 52 | # or in the deploy environment or inline 53 | - "${ssm:${SECURITY GROUP ID}~true}" 54 | 55 | customDomain: 56 | domainName: DOMAIN NAME 57 | stage: ${self:custom.stage} 58 | certificateName: "*.elizabethwarren.codes" 59 | createRoute53Record: true 60 | securityPolicy: tls_1_2 # current top tier 61 | 62 | associateWaf: 63 | # OPTIONAL. A WAF is not strictly required, but it is generally useful to have 64 | # one to handle IP-level rate limiting 65 | name: WAF NAME 66 | 67 | googleDocsPrio: 68 | # These Google sheets are used for override behavior. Users need to make their 69 | # own and secure machine credentials to access those sheets. 70 | dev: 71 | url: "https://docs.google.com/spreadsheets/d/GOOGLE SPREADSHEET ID" 72 | prod: 73 | url: "https://docs.google.com/spreadsheets/d/GOOGLE SPREADSHEET ID" 74 | 75 | mobilizeAmerica: 76 | # Users can define these parameters in SSM (they will be looked up at deploy time) 77 | # or in the deploy environment or inline 78 | dev: 79 | baseUrl: "https://staging-api.mobilize.us/v1" 80 | orgId: 95 81 | apiKey: "${ssm:${STAGING API KEY}~true}" 82 | defaultVisibility: "PUBLIC" 83 | prod: 84 | baseUrl: "https://api.mobilize.us/v1" 85 | orgId: 1316 86 | apiKey: "${ssm:${PRODUCTION API KEY}~true}" 87 | defaultVisibility: "PUBLIC" 88 | 89 | alerts: 90 | nameTemplate: $[functionName]-$[metricName]-Alarm 91 | topics: 92 | alarm: 93 | topic: ${SNS TOPIC ARN} 94 | definitions: 95 | functionInvocations: 96 | threshold: 5000 97 | functionDuration: 98 | threshold: 10000 99 | alarms: 100 | - functionThrottles 101 | - functionErrors 102 | - functionInvocations 103 | - functionDuration 104 | 105 | provider: 106 | name: aws 107 | runtime: python3.7 108 | region: ${opt:region, env:REGION, "us-east-1"} 109 | stage: ${self:custom.stage} 110 | deploymentBucket: 111 | name: ${self:custom.deploy.bucket_name} 112 | blockPublicAccess: true 113 | iamRoleStatements: 114 | - Effect: Allow 115 | Action: 116 | - "ssm:GetParameter" 117 | Resource: "*" 118 | - Effect: Allow 119 | Action: 120 | - "cognito-idp:AdminCreateUser" 121 | - "cognito-idp:AdminGetUser" 122 | - "cognito-idp:AdminUpdateUserAttributes" 123 | Resource: "${self:custom.cognito.${self:custom.infrastructure}.arn}" 124 | - Effect: Allow 125 | Action: 126 | - cloudwatch:PutMetricData 127 | Resource: "*" 128 | - Effect: Allow 129 | Action: events:PutEvents 130 | Resource: "*" 131 | - Effect: Allow 132 | Action: 133 | - "ses:CreateTemplate" 134 | - "ses:SendEmail" 135 | - "ses:SendTemplatedEmail" 136 | - "ses:SendBulkTemplatedEmail" 137 | - "ses:UpdateTemplate" 138 | Resource: "*" 139 | 140 | environment: 141 | STAGE: ${self:custom.stage} 142 | INFRASTRUCTURE: ${self:custom.infrastructure} 143 | SUPPORTAL_BASE_URL: "${self:custom.baseUrls.${self:custom.infrastructure}}" 144 | PRIORITIZATIONS_META: "${self:custom.googleDocsPrio.${self:custom.infrastructure}.url}" 145 | GDAL_LIBRARY_PATH: "/opt/lib/libgdal.so" 146 | MOBILIZE_AMERICA_BASE_URL: "${self:custom.mobilizeAmerica.${self:custom.infrastructure}.baseUrl}" 147 | MOBILIZE_AMERICA_ORG_ID: "${self:custom.mobilizeAmerica.${self:custom.infrastructure}.orgId}" 148 | MOBILIZE_AMERICA_API_KEY: "${self:custom.mobilizeAmerica.${self:custom.infrastructure}.apiKey}" 149 | MOBILIZE_AMERICA_DEFAULT_VISIBILITY: "${self:custom.mobilizeAmerica.${self:custom.infrastructure}.defaultVisibility}" 150 | COGNITO_USER_POOL: "${self:custom.cognito.${self:custom.infrastructure}.name}" 151 | COGNITO_USER_LOGIN_CLIENT_ID: "${self:custom.cognito.${self:custom.infrastructure}.clientId}" 152 | 153 | CONFIGURATION_SET_NAME: SES CONFIGURATION SET NAME 154 | 155 | # Users can define these parameters in SSM (they will be looked up at deploy time) 156 | # or in the deploy environment or inline. Secrets like this are best maintained 157 | # in something like SSM, but careful use of environment variables works here too! 158 | DJANGO_SECRET_KEY: "${ssm:${DJANGO_SECRET_KEY}}" 159 | GOOGLE_MAPS_API_KEY: "${ssm:${GOOGLE_MAPS_API_KEY}}" 160 | POSTGRES_HOST: "${ssm:${POSTGRES_HOST}}" 161 | POSTGRES_PASSWORD: "${ssm:${POSTGRES_PASSWORD}}" 162 | POSTGRES_USER: "${ssm:${POSTGRES_USER}}" 163 | SHARED_REDIS_HOST: "${ssm:${SHARED_REDIS_HOST}}" 164 | GOOGLE_DOCS_CREDENTIALS: "${ssm:${GOOGLE_DOCS_CREDENTIALS}}" 165 | 166 | 167 | # Packaging individually is slower, but we _have_ to do it so that we can sneakily 168 | # deploy the preflight lambda before we deploy the entire stack 169 | package: 170 | excludeDevDependencies: true 171 | individually: true 172 | exclude: 173 | - ".pytest_cache/**" 174 | - "node_modules/**" 175 | - ".vscode/**" 176 | 177 | functions: 178 | server: 179 | name: ${self:custom.stage}-supportal-server 180 | handler: wsgi_handler.handler 181 | events: 182 | - http: ANY / 183 | - http: ANY {proxy+} 184 | - schedule: 185 | rate: rate(4 minutes) 186 | timeout: 30 187 | layers: ${self:custom.layers} 188 | vpc: ${self:custom.vpcConfig} 189 | expire: 190 | name: ${self:custom.stage}-supportal-expire-assignments 191 | handler: scheduled_commands.expire_assignments 192 | layers: ${self:custom.layers} 193 | vpc: ${self:custom.vpcConfig} 194 | events: 195 | - schedule: 196 | rate: rate(1 hour) 197 | timeout: 60 198 | alarms: 199 | - name: functionDuration 200 | threshold: 60000 201 | import_mobilize_america_events: 202 | name: ${self:custom.stage}-import-mobilize-america-events 203 | handler: scheduled_commands.import_mobilize_america_events 204 | layers: ${self:custom.layers} 205 | vpc: ${self:custom.vpcConfig} 206 | events: 207 | - schedule: 208 | rate: rate(10 minutes) 209 | timeout: 600 210 | alarms: 211 | - name: functionDuration 212 | threshold: 600000 213 | update_prioritization: 214 | name: ${self:custom.stage}-update-prioritization 215 | handler: scheduled_commands.update_prioritization 216 | layers: ${self:custom.layers} 217 | vpc: ${self:custom.vpcConfig} 218 | events: 219 | - schedule: 220 | rate: rate(20 minutes) 221 | timeout: 600 222 | alarms: 223 | - name: functionDuration 224 | threshold: 600000 225 | update_prioritization_meta: 226 | name: ${self:custom.stage}-update-prioritization-meta 227 | handler: scheduled_commands.update_prioritization_meta 228 | layers: ${self:custom.layers} 229 | vpc: ${self:custom.vpcConfig} 230 | events: 231 | - schedule: 232 | rate: rate(20 minutes) 233 | timeout: 600 234 | alarms: 235 | - name: functionDuration 236 | threshold: 600000 237 | email_users_with_expiring_assignments: 238 | name: ${self:custom.stage}-supportal-email-expiring 239 | handler: scheduled_commands.email_users_with_expiring_assignments 240 | layers: ${self:custom.layers} 241 | vpc: ${self:custom.vpcConfig} 242 | events: 243 | - schedule: cron(0 14 * * ? *) 244 | timeout: 60 245 | alarms: 246 | - name: functionDuration 247 | threshold: 60000 248 | email_inactive_users: 249 | name: ${self:custom.stage}-supportal-email-inactive-users 250 | handler: scheduled_commands.email_inactive_users 251 | layers: ${self:custom.layers} 252 | vpc: ${self:custom.vpcConfig} 253 | events: 254 | - schedule: cron(0 14 * * ? *) 255 | timeout: 60 256 | alarms: 257 | - name: functionDuration 258 | threshold: 60000 259 | preflight: 260 | name: ${self:custom.stage}-supportal-preflight 261 | handler: preflight.handle 262 | layers: ${self:custom.layers} 263 | vpc: ${self:custom.vpcConfig} 264 | timeout: 60 265 | alarms: 266 | - name: functionDuration 267 | threshold: 60000 268 | 269 | plugins: 270 | - serverless-wsgi 271 | - serverless-python-requirements 272 | - serverless-domain-manager 273 | - serverless-associate-waf 274 | - serverless-plugin-aws-alerts 275 | -------------------------------------------------------------------------------- /supportal/__init__.py: -------------------------------------------------------------------------------- 1 | # In a lambda environment, AWS python does some strangeness to the log handlers 2 | # Instead, we blow away their root handler and set our own 3 | import logging 4 | 5 | # from ew_common.telemetry import Telemetry 6 | 7 | root = logging.getLogger() 8 | 9 | if root.handlers: 10 | for handler in root.handlers: 11 | root.removeHandler(handler) 12 | 13 | logging.basicConfig( 14 | level=logging.INFO, 15 | format="[%(asctime)s] [%(name)s] [%(levelname)s] [%(funcName)s] [line: %(lineno)s] - %(message)s", 16 | ) 17 | 18 | # t = Telemetry.default("supportal").as_global() 19 | -------------------------------------------------------------------------------- /supportal/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/app/__init__.py -------------------------------------------------------------------------------- /supportal/app/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.contrib.auth import get_user_model 3 | from django.contrib.auth.admin import UserAdmin 4 | from django.contrib.gis import admin as gis_admin 5 | 6 | from supportal.app.models import APIKey, Person 7 | 8 | User = get_user_model() 9 | 10 | 11 | class CustomUserAdmin(UserAdmin, gis_admin.GeoModelAdmin): 12 | model = User 13 | list_display = [ 14 | "username", 15 | "email", 16 | "is_staff", 17 | "is_superuser", 18 | "date_joined", 19 | "last_login", 20 | ] 21 | ordering = ["username"] 22 | fieldsets = ( 23 | (None, {"fields": ("username", "password")}), 24 | ("Personal info", {"fields": ("first_name", "last_name", "email", "phone")}), 25 | ("Address", {"fields": ("address", "city", "state", "zip5", "coordinates")}), 26 | ( 27 | "Permissions", 28 | { 29 | "fields": ( 30 | "is_active", 31 | "is_staff", 32 | "is_superuser", 33 | "groups", 34 | "user_permissions", 35 | ) 36 | }, 37 | ), 38 | ("Important dates", {"fields": ("last_login", "date_joined")}), 39 | ) 40 | add_fieldsets = ( 41 | ( 42 | None, 43 | { 44 | "classes": ["wide"], 45 | "fields": [ 46 | "email", 47 | "first_name", 48 | "last_name", 49 | "password1", 50 | "password2", 51 | "is_staff", 52 | "is_superuser", 53 | ], 54 | }, 55 | ), 56 | ) 57 | 58 | 59 | class CustomPersonAdmin(admin.ModelAdmin): 60 | model = Person 61 | list_display = [ 62 | "ngp_id", 63 | "first_name", 64 | "last_name", 65 | "state", 66 | "is_vol_prospect", 67 | "vol_yes_at", 68 | "is_vol_leader", 69 | "created_at", 70 | "updated_at", 71 | ] 72 | ordering = ["-updated_at"] 73 | 74 | 75 | admin.site.register(User, CustomUserAdmin) 76 | admin.site.register(APIKey, admin.ModelAdmin) 77 | admin.site.register(Person, CustomPersonAdmin) 78 | -------------------------------------------------------------------------------- /supportal/app/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class AppConfig(AppConfig): 5 | name = "app" 6 | -------------------------------------------------------------------------------- /supportal/app/authentication_backend.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | 4 | import jwt 5 | import requests 6 | from django.conf import settings 7 | from django.contrib.auth import get_user_model 8 | from django.contrib.auth.signals import user_logged_in 9 | from django.core.cache import cache 10 | from jwt import algorithms 11 | from rest_framework.authentication import ( 12 | BaseAuthentication, 13 | exceptions, 14 | get_authorization_header, 15 | ) 16 | from rest_framework.request import Request 17 | 18 | from supportal.app.models import APIKey 19 | 20 | User = get_user_model() 21 | 22 | JWT_VERIFY_OPTS = { 23 | "verify_signature": True, 24 | "verify_exp": True, 25 | "verify_iat": True, 26 | "verify_aud": False, # we verify aud manually for id tokens 27 | "verify_iss": True, 28 | "require_exp": True, 29 | } 30 | __COGNITO_USER_POOL_JWKS = None 31 | 32 | 33 | class CognitoJWTAuthentication(BaseAuthentication): 34 | def authenticate(self, request): 35 | """Authenticate requests with JWTs from AWS Cognito 36 | 37 | We accept two different types of tokens: 38 | 39 | 'id' tokens contain user-identifying information and originate from the 40 | authorization_code or implicit OAuth2 grants. In this case we return 41 | the User associated with the token. If the User doesn't exist, we create 42 | one since we trust Cognito as the source of truth for users in our system. 43 | 44 | 'access' tokens do not contain user information and are generated by the 45 | client_credentials OAuth2 grant. These are not tied to a user but rather 46 | have 'scopes' that determine what the token can do. We don't support any 47 | scopes at this point, so we grant this token the privileges of the user 48 | associated with the API key in the database. 49 | """ 50 | token = _get_bearer_token(request) 51 | if not token: 52 | return None 53 | 54 | token_data = validate_jwt(token) 55 | token_use = token_data.get("token_use") 56 | if token_use == "id": 57 | _validate_id_token_data(token_data) 58 | username = token_data.get("cognito:username") 59 | email = token_data.get("email") 60 | email_verified = token_data.get("email_verified") 61 | if not email_verified or not email or not username: 62 | raise exceptions.AuthenticationFailed("Invalid user state") 63 | try: 64 | user = User.objects.get_by_natural_key(username) 65 | except User.DoesNotExist: 66 | raise exceptions.AuthenticationFailed("User does not exist") 67 | elif token_use == "access": 68 | client_id = token_data.get("client_id") 69 | 70 | if not client_id: 71 | raise exceptions.AuthenticationFailed("Invalid access token") 72 | try: 73 | key = APIKey.objects.get(pk=client_id) 74 | user = key.user 75 | except APIKey.DoesNotExist: 76 | raise exceptions.AuthenticationFailed("Invalid access token") 77 | else: 78 | raise exceptions.AuthenticationFailed(f"Unknown token_use: {token_use}") 79 | 80 | if not user.is_active: 81 | raise exceptions.AuthenticationFailed("User deactivated") 82 | 83 | # Only allow admins to impersonate users 84 | if ( 85 | user.is_admin 86 | and user.impersonated_user is not None 87 | and user.id != user.impersonated_user.id 88 | ): 89 | # Intentionally don't fire the user_logged_in signal when impersonating 90 | return user.impersonated_user, token_data 91 | 92 | user_logged_in.send(sender=user.__class__, request=request, user=user) 93 | return user, token_data 94 | 95 | def authenticate_header(self, request): 96 | """Value of the `WWW-Authenticate` header in a `401 Unauthenticated` response 97 | 98 | If this is not supplied, auth failures will return `403 Permission Denied` responses. 99 | """ 100 | return "Bearer: realm=api" 101 | 102 | 103 | def get_jwks(): 104 | global __COGNITO_USER_POOL_JWKS 105 | if not __COGNITO_USER_POOL_JWKS: 106 | cached_val = cache.get("cognito_user_pool_jwks") 107 | if cached_val is not None: 108 | __COGNITO_USER_POOL_JWKS = cached_val 109 | else: 110 | res = requests.get( 111 | f"{settings.COGNITO_USER_POOL_URL}/.well-known/jwks.json" 112 | ) 113 | res.raise_for_status() 114 | payload = res.json() 115 | cache.set("cognito_user_pool_jwks", payload) 116 | __COGNITO_USER_POOL_JWKS = payload 117 | if not __COGNITO_USER_POOL_JWKS: 118 | raise Exception("We did not get any JWKs from Cognito.") 119 | return __COGNITO_USER_POOL_JWKS 120 | 121 | 122 | def validate_jwt(token): 123 | """Validate the signature of the JWT token from Cognito""" 124 | jwks = get_jwks() 125 | try: 126 | return jwt.decode( 127 | token, 128 | _get_public_key(token, jwks), 129 | issuer=settings.COGNITO_USER_POOL_URL, 130 | algorithms=["RS256"], 131 | options=JWT_VERIFY_OPTS, 132 | ) 133 | except jwt.exceptions.PyJWTError: 134 | # return a generic error message and log the exception, as this might 135 | # mean that someone is tampering with tokens 136 | logging.exception("Error decoding JWT token") 137 | raise exceptions.AuthenticationFailed("Invalid token") 138 | 139 | 140 | def _get_bearer_token(request: Request): 141 | """Extract the Bearer token from the 'Authentication' header""" 142 | return _get_auth_token(request, b"bearer") 143 | 144 | 145 | def _get_auth_token(request: Request, auth_type): 146 | """Extract the Bearer token from the 'Authentication' header""" 147 | header = get_authorization_header(request) 148 | split = header.split() 149 | if len(split) == 0: 150 | return None 151 | elif len(split) != 2 or split[0].lower() != auth_type: 152 | raise exceptions.AuthenticationFailed( 153 | f"Invalid auth header. Format should be '{auth_type} '" 154 | ) 155 | else: 156 | return split[1] 157 | 158 | 159 | def _get_public_key(token, jwks): 160 | """Find the appropriate JSON Web Key (JWK) to verify this token using RSA""" 161 | header = jwt.get_unverified_header(token) 162 | key_id = header.get("kid") 163 | 164 | alg = header.get("alg") 165 | if not key_id: 166 | raise exceptions.AuthenticationFailed("Invalid token missing 'kid' header") 167 | if alg != "RS256": 168 | raise exceptions.AuthenticationFailed(f"Unsupported 'alg' header {alg}") 169 | 170 | try: 171 | key = [k for k in jwks["keys"] if k["kid"] == key_id][0] 172 | except IndexError: 173 | # If there is an IndexError, the list constructed above probably empty. 174 | # That likely means the token was generated against a different 175 | # backend, so the header kid does not appear in our list of jwks keys 176 | logging.exception("Header key id not present in passed jwks keys") 177 | raise exceptions.AuthenticationFailed("Forbidden") 178 | 179 | return algorithms.RSAAlgorithm.from_jwk(json.dumps(key)) 180 | 181 | 182 | def _validate_id_token_data(token_data): 183 | """Validate additional claims on a decoded 'id' token""" 184 | aud = token_data.get("aud") 185 | if not aud or aud != settings.COGNITO_USER_LOGIN_CLIENT_ID: 186 | raise exceptions.AuthenticationFailed("Invalid id token") 187 | -------------------------------------------------------------------------------- /supportal/app/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/app/common/__init__.py -------------------------------------------------------------------------------- /supportal/app/common/enums.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, IntEnum 2 | 3 | from rest_framework.exceptions import ValidationError 4 | 5 | 6 | class ActivityStatus(IntEnum): 7 | ACTIVE = 1 8 | INACTIVE = 2 9 | CHURNING = 3 10 | NEW = 4 11 | 12 | 13 | def enum_from_name(enum_cls, name): 14 | try: 15 | return next(i for i in enum_cls if i.name == name) 16 | except StopIteration: 17 | raise ValidationError(f"Unknown enum name: {name}") 18 | 19 | 20 | class CanvassResultCategory(IntEnum): 21 | SUCCESSFUL = 1 22 | UNAVAILABLE = 2 23 | UNREACHABLE = 3 24 | 25 | @classmethod 26 | def from_name(cls, name): 27 | return enum_from_name(cls, name) 28 | 29 | 30 | class CanvassResult(IntEnum): 31 | """Result codes *must* match VAN canvass results, see vansync.results""" 32 | 33 | UNAVAILABLE_CALL_BACK = 17 34 | UNAVAILABLE_LEFT_MESSAGE = 19 35 | UNAVAILABLE_BUSY = 18 36 | UNREACHABLE_WRONG_NUMBER = 20 37 | UNREACHABLE_DISCONNECTED = 25 38 | UNREACHABLE_REFUSED = 2 39 | UNREACHABLE_MOVED = 5 40 | UNREACHABLE_DECEASED = 4 41 | SUCCESSFUL_CANVASSED = 14 42 | 43 | @classmethod 44 | def from_name(cls, name): 45 | return enum_from_name(cls, name) 46 | 47 | def category(self): 48 | prefix = self.name.split("_")[0] 49 | return CanvassResultCategory.from_name(prefix) 50 | 51 | 52 | class VolProspectAssignmentStatus(Enum): 53 | ASSIGNED = (False, None) 54 | CONTACTED_SUCCESSFUL = (False, CanvassResultCategory.SUCCESSFUL) 55 | CONTACTED_UNAVAILABLE = (False, CanvassResultCategory.UNAVAILABLE) 56 | CONTACTED_UNREACHABLE = (True, CanvassResultCategory.UNREACHABLE) 57 | SKIPPED = (True, None) 58 | 59 | def __init__(self, suppressed, result_category): 60 | self.suppressed = suppressed 61 | self.result_category = result_category 62 | 63 | @classmethod 64 | def from_db_state( 65 | cls, suppressed, person_supressed, latest_event_result_category=None 66 | ): 67 | if not latest_event_result_category: 68 | if suppressed: 69 | return cls.SKIPPED 70 | return cls.ASSIGNED 71 | 72 | if suppressed and not person_supressed: 73 | return cls.SKIPPED 74 | 75 | if latest_event_result_category == CanvassResultCategory.SUCCESSFUL: 76 | return cls.CONTACTED_SUCCESSFUL 77 | elif latest_event_result_category == CanvassResultCategory.UNAVAILABLE: 78 | return cls.CONTACTED_UNAVAILABLE 79 | else: 80 | return cls.CONTACTED_UNREACHABLE 81 | 82 | @classmethod 83 | def from_name(cls, name): 84 | return enum_from_name(cls, name) 85 | -------------------------------------------------------------------------------- /supportal/app/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | # Generated by Django 2.2.6 on 2019-10-21 21:51 2 | import django.contrib.auth.validators 3 | import django.db.models.deletion 4 | import django.utils.timezone 5 | from django.conf import settings 6 | from django.db import migrations, models 7 | 8 | import supportal.app.models.user 9 | 10 | 11 | class Migration(migrations.Migration): 12 | 13 | initial = True 14 | 15 | dependencies = [("auth", "0011_update_proxy_permissions")] 16 | 17 | operations = [ 18 | migrations.CreateModel( 19 | name="User", 20 | fields=[ 21 | ( 22 | "id", 23 | models.AutoField( 24 | auto_created=True, 25 | primary_key=True, 26 | serialize=False, 27 | verbose_name="ID", 28 | ), 29 | ), 30 | ("password", models.CharField(max_length=128, verbose_name="password")), 31 | ( 32 | "last_login", 33 | models.DateTimeField( 34 | blank=True, null=True, verbose_name="last login" 35 | ), 36 | ), 37 | ( 38 | "is_superuser", 39 | models.BooleanField( 40 | default=False, 41 | help_text="Designates that this user has all permissions without explicitly assigning them.", 42 | verbose_name="superuser status", 43 | ), 44 | ), 45 | ( 46 | "username", 47 | models.CharField( 48 | error_messages={ 49 | "unique": "A user with that username already exists." 50 | }, 51 | help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", 52 | max_length=150, 53 | unique=True, 54 | validators=[ 55 | django.contrib.auth.validators.UnicodeUsernameValidator() 56 | ], 57 | verbose_name="username", 58 | ), 59 | ), 60 | ( 61 | "first_name", 62 | models.CharField( 63 | blank=True, max_length=30, verbose_name="first name" 64 | ), 65 | ), 66 | ( 67 | "last_name", 68 | models.CharField( 69 | blank=True, max_length=150, verbose_name="last name" 70 | ), 71 | ), 72 | ( 73 | "is_staff", 74 | models.BooleanField( 75 | default=False, 76 | help_text="Designates whether the user can log into this admin site.", 77 | verbose_name="staff status", 78 | ), 79 | ), 80 | ( 81 | "is_active", 82 | models.BooleanField( 83 | default=True, 84 | help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", 85 | verbose_name="active", 86 | ), 87 | ), 88 | ( 89 | "date_joined", 90 | models.DateTimeField( 91 | default=django.utils.timezone.now, verbose_name="date joined" 92 | ), 93 | ), 94 | ( 95 | "email", 96 | models.EmailField( 97 | max_length=254, unique=True, verbose_name="email address" 98 | ), 99 | ), 100 | ( 101 | "groups", 102 | models.ManyToManyField( 103 | blank=True, 104 | help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", 105 | related_name="user_set", 106 | related_query_name="user", 107 | to="auth.Group", 108 | verbose_name="groups", 109 | ), 110 | ), 111 | ( 112 | "user_permissions", 113 | models.ManyToManyField( 114 | blank=True, 115 | help_text="Specific permissions for this user.", 116 | related_name="user_set", 117 | related_query_name="user", 118 | to="auth.Permission", 119 | verbose_name="user permissions", 120 | ), 121 | ), 122 | ], 123 | options={ 124 | "verbose_name": "user", 125 | "verbose_name_plural": "users", 126 | "abstract": False, 127 | }, 128 | managers=[("objects", supportal.app.models.user.UserManager())], 129 | ), 130 | migrations.CreateModel( 131 | name="APIKey", 132 | fields=[ 133 | ( 134 | "client_id", 135 | models.CharField(max_length=100, primary_key=True, serialize=False), 136 | ), 137 | ("created_at", models.DateTimeField(auto_now_add=True)), 138 | ( 139 | "user", 140 | models.ForeignKey( 141 | on_delete=django.db.models.deletion.CASCADE, 142 | to=settings.AUTH_USER_MODEL, 143 | ), 144 | ), 145 | ], 146 | ), 147 | ] 148 | -------------------------------------------------------------------------------- /supportal/app/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/app/migrations/__init__.py -------------------------------------------------------------------------------- /supportal/app/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .api_key import APIKey 2 | from .email import EmailSend 3 | from .person import Person 4 | from .user import User 5 | from .vol_prospect_models import ( 6 | MobilizeAmericaEventSignupExcpetion, 7 | VolProspectAssignment, 8 | VolProspectContactEvent, 9 | ) 10 | -------------------------------------------------------------------------------- /supportal/app/models/api_key.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.db import models 3 | 4 | from supportal.app.models.base_model_mixin import BaseModelMixin 5 | 6 | 7 | class APIKey(BaseModelMixin): 8 | """ 9 | Cognito API Keys for authenticating access tokens issued by the 10 | client_credentials OAuth grant. Each key receives the privileges of the 11 | User it is associated with. 12 | 13 | To add a new API Key: 14 | - Go to the Cognito User Pool > App Clients 15 | - Create a new client and make sure to check "Generate client secret" 16 | - In the sidebar go to "App Client Settings" 17 | - Find you app client and select 'Client credentials" under "Allowed OAuth Flows" 18 | """ 19 | 20 | client_id = models.CharField(primary_key=True, max_length=100) 21 | user = models.ForeignKey( 22 | settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=False 23 | ) 24 | -------------------------------------------------------------------------------- /supportal/app/models/base_model_mixin.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | 4 | class BaseModelMixin(models.Model): 5 | created_at = models.DateTimeField(auto_now_add=True, db_index=True) 6 | updated_at = models.DateTimeField(auto_now=True, db_index=True) 7 | 8 | class Meta: 9 | abstract = True 10 | -------------------------------------------------------------------------------- /supportal/app/models/email.py: -------------------------------------------------------------------------------- 1 | from django.contrib.postgres.fields import JSONField 2 | from django.db import models 3 | 4 | from supportal.app.models.base_model_mixin import BaseModelMixin 5 | 6 | 7 | class EmailSend(BaseModelMixin): 8 | INVITE_EMAIL = "switchboard_invite_email" 9 | EXPIRING_PROSPECTS = "expiring_contacts_email" 10 | INACTIVE_USER_EMAIL = "switchboard_inactive_user_email" 11 | BLAST_EMAIL = "switchboard_blast_send" 12 | VERIFIED_EMAIL = "switchboard_verified_email" 13 | 14 | EMAIL_CHOICES = [ 15 | (INVITE_EMAIL, "Invite Email"), 16 | (EXPIRING_PROSPECTS, "Expiring Prospects"), 17 | (INACTIVE_USER_EMAIL, "Invite Inactive Users"), 18 | (BLAST_EMAIL, "Blast Email"), 19 | (VERIFIED_EMAIL, "User Verified"), 20 | ] 21 | user = models.ForeignKey( 22 | "app.User", on_delete=models.CASCADE, related_name="email_sends" 23 | ) 24 | template_name = models.CharField( 25 | choices=EMAIL_CHOICES, db_index=True, max_length=250 26 | ) 27 | payload = JSONField(null=True) 28 | -------------------------------------------------------------------------------- /supportal/app/models/person.py: -------------------------------------------------------------------------------- 1 | from django.contrib.gis.db import models as gis_models 2 | from django.contrib.gis.db.models.functions import Distance 3 | from django.contrib.gis.measure import D 4 | from django.db import models 5 | from django.utils import timezone 6 | from localflavor.us.models import USStateField, USZipCodeField 7 | from localflavor.us.us_states import STATE_CHOICES 8 | from phonenumber_field.modelfields import PhoneNumberField 9 | 10 | from supportal.app.models.base_model_mixin import BaseModelMixin 11 | 12 | 13 | class PersonQuerySet(models.QuerySet): 14 | def from_reference(self, coordinates, radius_mi): 15 | """Returns queryset with all people ordered by proximity to reference.coordinates.""" 16 | return ( 17 | self.filter(coordinates__distance_lte=(coordinates, D(mi=radius_mi))) 18 | .annotate(distance=Distance("coordinates", coordinates)) 19 | .order_by("distance") 20 | ) 21 | 22 | def get_queryset(self): 23 | return self.filter(is_demo=False) 24 | 25 | def get_demo_queryset(self): 26 | return self.filter(is_demo=True) 27 | 28 | 29 | class Person(BaseModelMixin): 30 | objects = PersonQuerySet.as_manager() 31 | 32 | myc_state_and_id = models.CharField(max_length=255, unique=True, null=True) 33 | ngp_id = models.CharField(max_length=255, unique=True, null=True) 34 | 35 | # Personal Info 36 | first_name = models.CharField(max_length=255, blank=True) 37 | middle_name = models.CharField(max_length=255, blank=True) 38 | last_name = models.CharField(max_length=255, blank=True) 39 | suffix = models.CharField(max_length=255, blank=True) 40 | 41 | email = models.EmailField(blank=True, db_index=True) 42 | phone = PhoneNumberField(blank=True) 43 | date_of_birth = models.DateField(null=True, blank=True) 44 | sex = models.CharField(max_length=1, blank=True) # VAN's schema is VARCHAR(1) 45 | 46 | address = models.CharField(max_length=255, blank=True) 47 | city = models.CharField(max_length=255, blank=True) 48 | state = USStateField(choices=STATE_CHOICES, blank=True) 49 | zip5 = USZipCodeField(blank=True) 50 | coordinates = gis_models.PointField( 51 | "coordinates", geography=True, srid=4326, null=True 52 | ) 53 | 54 | is_vol_prospect = models.BooleanField(default=False) 55 | vol_yes_at = models.DateField(null=True, blank=True, db_index=True) 56 | 57 | is_vol_leader = models.BooleanField(default=False) 58 | suppressed_at = models.DateTimeField(null=True, db_index=True) 59 | is_demo = models.BooleanField(default=False) 60 | 61 | def suppress(self): 62 | if not self.suppressed_at: 63 | self.suppressed_at = timezone.now() 64 | self.save(update_fields=["suppressed_at"]) 65 | 66 | def trimmed_last_name(self): 67 | if self.last_name: 68 | return f"{self.last_name[0:1]}." 69 | return "" 70 | 71 | def get_has_email(self): 72 | return self.email and self.email != "" 73 | 74 | @property 75 | def full_name(self): 76 | name_parts = [self.first_name] 77 | if self.middle_name: 78 | name_parts.append(self.middle_name) 79 | name_parts.append(self.last_name) 80 | if self.suffix: 81 | name_parts.append(self.suffix) 82 | return " ".join(name_parts) 83 | -------------------------------------------------------------------------------- /supportal/app/permissions.py: -------------------------------------------------------------------------------- 1 | from rest_framework.permissions import BasePermission 2 | 3 | 4 | def check_user_attribute(user, attribute): 5 | return hasattr(user, attribute) and bool(getattr(user, attribute)) 6 | 7 | 8 | class IsSuperuser(BasePermission): 9 | def has_permission(self, request, view): 10 | return bool(request.user and check_user_attribute(request.user, "is_superuser")) 11 | 12 | 13 | class IsSupportalAdminUser(BasePermission): 14 | def has_permission(self, request, view): 15 | return bool( 16 | request.user 17 | and ( 18 | check_user_attribute(request.user, "is_admin") 19 | or check_user_attribute(request.user, "is_staff") 20 | ) 21 | ) 22 | 23 | 24 | class HasInvite(BasePermission): 25 | def has_permission(self, request, view): 26 | return bool(request.user and check_user_attribute(request.user, "has_invite")) 27 | -------------------------------------------------------------------------------- /supportal/app/serializers/__init__.py: -------------------------------------------------------------------------------- 1 | from .person_serializer import FullPersonSerializer, LimitedPersonSerializer 2 | from .user_serializers import FullUserSerializer, MeSerializer 3 | from .vol_prospect_assignment_serializer import ( 4 | VolProspectAssignmentSerializer, 5 | VolProspectContactEventSerializer, 6 | ) 7 | -------------------------------------------------------------------------------- /supportal/app/serializers/person_serializer.py: -------------------------------------------------------------------------------- 1 | from rest_framework import serializers 2 | 3 | from supportal.app.models import Person 4 | 5 | 6 | class FullPersonSerializer(serializers.ModelSerializer): 7 | class Meta: 8 | model = Person 9 | fields = [ 10 | "id", 11 | "created_at", 12 | "updated_at", 13 | "myc_state_and_id", 14 | "ngp_id", 15 | "first_name", 16 | "middle_name", 17 | "last_name", 18 | "suffix", 19 | "email", 20 | "phone", 21 | "address", 22 | "city", 23 | "state", 24 | "zip5", 25 | "coordinates", 26 | "is_vol_prospect", 27 | "vol_yes_at", 28 | "is_vol_leader", 29 | ] 30 | 31 | # By default, ngp_id will have a uniqueness validator; we don't want to 32 | # validate uniqueness on ngp_id, because we allow upsert. 33 | extra_kwargs = {"ngp_id": {"validators": []}, "address": {"write_only": True}} 34 | 35 | def create(self, validated_data): 36 | """Create or update based on ngp_id. 37 | 38 | This results in POST behaving as an upsert based on ngp_id. 39 | """ 40 | ngp_id = validated_data.get("ngp_id", None) 41 | if ngp_id: 42 | person, created = Person.objects.update_or_create( 43 | ngp_id=ngp_id, defaults=validated_data 44 | ) 45 | return person 46 | return super().create(validated_data) 47 | 48 | 49 | class LimitedPersonSerializer(serializers.ModelSerializer): 50 | 51 | last_name = serializers.CharField( 52 | max_length=2, allow_blank=True, source="trimmed_last_name" 53 | ) 54 | has_email = serializers.BooleanField(default=True, source="get_has_email") 55 | 56 | class Meta: 57 | model = Person 58 | fields = [ 59 | "id", 60 | "created_at", 61 | "updated_at", 62 | "first_name", 63 | "last_name", 64 | "suffix", 65 | "phone", 66 | "city", 67 | "state", 68 | "is_demo", 69 | "has_email", 70 | ] 71 | read_only_fields = fields 72 | -------------------------------------------------------------------------------- /supportal/app/serializers/user_serializers.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from django.conf import settings 4 | from django.contrib.gis.geos import Point 5 | from django.db.models import Count 6 | from django.utils import timezone 7 | from rest_framework import serializers 8 | from rest_framework.exceptions import ValidationError 9 | from rest_framework.serializers import CharField 10 | 11 | # from ew_common.geocode import geocode 12 | # from ew_common.input_validation import extract_phone_number_e164 13 | from supportal.app.common.enums import ActivityStatus 14 | from supportal.app.models import User, VolProspectAssignment 15 | from supportal.app.serializers import FullPersonSerializer 16 | 17 | 18 | class LimitedUserSerializer(serializers.ModelSerializer): 19 | """ A limited User Serializer """ 20 | 21 | class Meta: 22 | model = User 23 | fields = ["id", "first_name", "last_name"] 24 | read_only_fields = fields 25 | 26 | 27 | class FullUserSerializer(serializers.ModelSerializer): 28 | """Full read-write User serializer for superuser access""" 29 | 30 | ngp_id = CharField(source="person.ngp_id", required=False) 31 | activity_status = serializers.SerializerMethodField() 32 | added_by = LimitedUserSerializer(required=False, read_only=True) 33 | is_mobilize_america_signup = serializers.BooleanField(required=False) 34 | should_send_invite_email = serializers.BooleanField(required=False) 35 | 36 | def get_activity_status(self, obj): 37 | """Currently activity status follows rules below: 38 | ACTIVE: last_login within in in past week 39 | INACTIVE: last_login > two weeks 40 | CHURNING: last_login > one week and last_week: 52 | return ActivityStatus.ACTIVE 53 | else: 54 | return ActivityStatus.CHURNING 55 | else: 56 | return ActivityStatus.NEW 57 | 58 | class Meta: 59 | model = User 60 | fields = [ 61 | "id", 62 | "is_admin", 63 | "created_at", 64 | "updated_at", 65 | "person", 66 | "first_name", 67 | "last_name", 68 | "email", 69 | "phone", 70 | "address", 71 | "city", 72 | "state", 73 | "zip5", 74 | "coordinates", 75 | "ngp_id", 76 | "last_login", 77 | "activity_status", 78 | "added_by", 79 | "self_reported_team_name", 80 | "is_mobilize_america_signup", 81 | "should_send_invite_email", 82 | ] 83 | # would want to update ngp_id on the person and not here 84 | read_only_fields = ["id", "created_at", "updated_at", "ngp_id", "is_admin"] 85 | # By default, email will have a uniqueness validator; we don't want to 86 | # validate uniqueness on email, because we allow upsert. 87 | extra_kwargs = {"email": {"validators": []}} 88 | 89 | def update(self, instance, validated_data): 90 | if "email" in validated_data: 91 | raise ValidationError("Changing email is not allowed through this endpoint") 92 | return super().update(instance, validated_data) 93 | 94 | def create(self, validated_data): 95 | """Create or update based on email. 96 | 97 | This results in POST behaving as an upsert based on email. 98 | """ 99 | email = validated_data.pop("email") 100 | is_mobilize_america_signup = validated_data.pop( 101 | "is_mobilize_america_signup", None 102 | ) 103 | should_send_invite_email = validated_data.pop("should_send_invite_email", False) 104 | request = self.context.get("request") 105 | if not request or not request.user: 106 | raise ValidationError("Cannot create outside of a request context") 107 | 108 | validated_data.update(added_by=request.user) 109 | user = None 110 | try: 111 | existing_user = User.objects.get_user_by_email(email) 112 | for k, v in validated_data.items(): 113 | setattr(existing_user, k, v) 114 | existing_user.save() 115 | user = existing_user 116 | except User.DoesNotExist: 117 | user = User.objects.create_user( 118 | None, email, should_send_invite_email, **validated_data 119 | ) 120 | if not is_mobilize_america_signup and user.verified_at is None: 121 | # we pass in is_demo for the mobilize america sync only 122 | VolProspectAssignment.objects.delete_demo_assignments(user) 123 | user.verified_at = timezone.now() 124 | user.save() 125 | return user 126 | 127 | 128 | class MeSerializer(serializers.ModelSerializer): 129 | """Limited read-write User serializer for users to access their own data""" 130 | 131 | class Meta: 132 | model = User 133 | fields = [ 134 | "id", 135 | "email", 136 | "first_name", 137 | "last_name", 138 | "phone", 139 | "address", 140 | "city", 141 | "state", 142 | "zip5", 143 | "coordinates", 144 | "self_reported_team_name", 145 | "is_admin", 146 | "created_at", 147 | "updated_at", 148 | ] 149 | # Note: Users must not be allowed to change their email address 150 | read_only_fields = ["id", "email", "created_at", "updated_at", "is_admin"] 151 | 152 | # We do our own phone validation. 153 | extra_kwargs = {"phone": {"validators": []}} 154 | 155 | def validate_phone(self, value): 156 | # TODO: format number here 157 | # canonical_phone = extract_phone_number_e164(value) 158 | canonical_phone= value 159 | if not canonical_phone: 160 | raise serializers.ValidationError("Phone number invalid format: {value}") 161 | return canonical_phone 162 | 163 | def validate(self, data): 164 | zip5 = data.get("zip5") 165 | if zip5: 166 | pieces = [data.get("address"), data.get("city"), data.get("state"), zip5] 167 | full_address = ", ".join((filter(None, pieces))) 168 | location = full_address 169 | # note will need to geocode aka get lat/long from address 170 | # location = geocode(full_address, settings.GOOGLE_MAPS_API_KEY) 171 | if location: 172 | data["coordinates"] = Point(location["lng"], location["lat"], srid=4326) 173 | else: 174 | raise serializers.ValidationError( 175 | "No location from address geocode: {data}" 176 | ) 177 | 178 | return super().validate(data) 179 | -------------------------------------------------------------------------------- /supportal/app/serializers/vol_prospect_assignment_serializer.py: -------------------------------------------------------------------------------- 1 | from django.core.serializers import serialize 2 | from rest_framework import serializers 3 | 4 | from supportal.app.common.enums import VolProspectAssignmentStatus 5 | from supportal.app.models import VolProspectAssignment 6 | from supportal.app.serializers import LimitedPersonSerializer 7 | from supportal.app.serializers.vol_prospect_contact_event_serializer import ( 8 | VolProspectContactEventSerializer, 9 | ) 10 | 11 | 12 | class LatLang(serializers.Serializer): 13 | latitude = serializers.IntegerField() 14 | longitude = serializers.IntegerField() 15 | 16 | 17 | class VolProspectAssignmentSerializer(serializers.ModelSerializer): 18 | person = LimitedPersonSerializer(read_only=True) 19 | location = LatLang(required=False, write_only=True) 20 | vol_prospect_contact_events = VolProspectContactEventSerializer(many=True) 21 | 22 | class Meta: 23 | model = VolProspectAssignment 24 | fields = [ 25 | "id", 26 | "user", 27 | "person", 28 | "suppressed_at", 29 | "expired_at", 30 | "status", 31 | "vol_prospect_contact_events", 32 | "created_at", 33 | "updated_at", 34 | "note", 35 | "location", 36 | ] 37 | read_only_fields = [ 38 | "id", 39 | "user", 40 | "person", 41 | "status", 42 | "created_at", 43 | "updated_at", 44 | ] 45 | 46 | def to_internal_value(self, data): 47 | data["status"] = VolProspectAssignmentStatus.from_name(data["status"]) 48 | return super().to_internal_value(data) 49 | 50 | def to_representation(self, instance): 51 | ret = super().to_representation(instance) 52 | ret["status"] = instance.status.name 53 | return ret 54 | -------------------------------------------------------------------------------- /supportal/app/serializers/vol_prospect_contact_event_serializer.py: -------------------------------------------------------------------------------- 1 | from rest_framework import serializers 2 | from rest_framework.exceptions import NotFound, ValidationError 3 | 4 | from supportal.app.common.enums import CanvassResult, CanvassResultCategory 5 | from supportal.app.models import VolProspectAssignment, VolProspectContactEvent 6 | 7 | 8 | class VolProspectContactEventSerializer(serializers.ModelSerializer): 9 | # TODO: drf-yasg doesn't understand our Enum -> String conversion without a little help 10 | # Do something like this to get strings rather than numbers in the docs: 11 | # https://github.com/axnsan12/drf-yasg/issues/478 12 | 13 | class Meta: 14 | model = VolProspectContactEvent 15 | fields = [ 16 | "id", 17 | "vol_prospect_assignment", 18 | "ma_event_id", 19 | "ma_timeslot_ids", 20 | "result_category", 21 | "result", 22 | "metadata", 23 | "created_at", 24 | "updated_at", 25 | "note", 26 | ] 27 | read_only_fields = [ 28 | "id", 29 | "user", 30 | "result_category", # gets set automatically from result 31 | "created_at", 32 | "updated_at", 33 | ] 34 | required_fields = ["vol_prospect_assignment", "result"] 35 | 36 | def to_internal_value(self, data): 37 | data["result"] = CanvassResult.from_name(data["result"]) 38 | result_cat = data.get("result_category") 39 | if result_cat: 40 | data["result_category"] = CanvassResultCategory.from_name(result_cat) 41 | return super().to_internal_value(data) 42 | 43 | def to_representation(self, instance): 44 | ret = super().to_representation(instance) 45 | ret["result"] = instance.result.name 46 | ret["result_category"] = instance.result_category.name 47 | return ret 48 | 49 | def create(self, validated_data): 50 | request = self.context.get("request") 51 | if not request or not request.user: 52 | raise ValidationError("Cannot create outside of a request context") 53 | assignment = validated_data["vol_prospect_assignment"] 54 | if assignment.user != request.user: 55 | raise NotFound() 56 | return assignment.create_contact_event(**validated_data) 57 | -------------------------------------------------------------------------------- /supportal/app/views/__init__.py: -------------------------------------------------------------------------------- 1 | from rest_framework import permissions 2 | from rest_framework.decorators import api_view, permission_classes 3 | from rest_framework.response import Response 4 | 5 | from .email_views import UnsubscribeView 6 | from .invite_views import InviteViewSet 7 | from .person_views import PersonViewSet 8 | from .user_views import FullUserViewSet, MeView 9 | from .vol_prospect_views import ( 10 | VolProspectAssignmentViewSet, 11 | VolProspectContactEventViewSet, 12 | ) 13 | 14 | 15 | @api_view() 16 | @permission_classes([permissions.AllowAny]) 17 | def index(request): 18 | """Unauthenticated health check endpoint""" 19 | return Response({"message": "Hello, from EW!"}) 20 | -------------------------------------------------------------------------------- /supportal/app/views/email_views.py: -------------------------------------------------------------------------------- 1 | from django.utils import timezone 2 | from rest_framework import status 3 | from rest_framework.generics import GenericAPIView 4 | from rest_framework.permissions import AllowAny 5 | from rest_framework.response import Response 6 | 7 | from supportal.app.models import User 8 | 9 | 10 | class UnsubscribeView(GenericAPIView): 11 | """ Allow a user to unsubscribe from 12 | emails from supportal. Only requires 13 | an email to look up the user and 14 | unsubscribe""" 15 | 16 | permission_classes = [AllowAny] 17 | authentication_classes = [] 18 | 19 | def post(self, request, *args, **kwargs): 20 | email = request.data["email"] 21 | try: 22 | user = User.objects.get_user_by_email(email=email) 23 | except User.DoesNotExist: 24 | return Response( 25 | {"message": "No User with that email"}, 26 | status=status.HTTP_400_BAD_REQUEST, 27 | ) 28 | 29 | user.unsubscribed_at = timezone.now() 30 | user.save() 31 | return Response(None, status=status.HTTP_200_OK) 32 | -------------------------------------------------------------------------------- /supportal/app/views/invite_views.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.core.exceptions import ValidationError 3 | from django.core.validators import EmailValidator 4 | from django.utils import timezone 5 | from rest_framework import status, viewsets 6 | from rest_framework.decorators import action 7 | from rest_framework.generics import GenericAPIView 8 | from rest_framework.permissions import IsAuthenticated 9 | from rest_framework.response import Response 10 | 11 | # from ew_common.telemetry import Metric, telemetry 12 | from supportal.app.models import EmailSend, User, VolProspectAssignment 13 | from supportal.app.permissions import HasInvite, IsSupportalAdminUser 14 | from supportal.services.email_service import get_email_service 15 | 16 | 17 | class VerifyView(GenericAPIView): 18 | """ APIView for a verifying a user """ 19 | 20 | permission_classes = [IsSupportalAdminUser] 21 | 22 | def _email_verified_user(self, email): 23 | payload = {"email": email, "transactional": True} 24 | get_email_service().send_email( 25 | template_name=EmailSend.VERIFIED_EMAIL, 26 | from_email=settings.FROM_EMAIL, 27 | recipient=email, 28 | reply_to_email=settings.REPLY_TO_EMAIL, 29 | configuration_set_name=settings.CONFIGURATION_SET_NAME, 30 | payload=payload, 31 | application_name="supportal", 32 | ) 33 | 34 | def _verify_single_user(self, email): 35 | try: 36 | user = User.objects.get_user_by_email(email=email) 37 | VolProspectAssignment.objects.delete_demo_assignments(user) 38 | except User.DoesNotExist: 39 | user = User.objects.create_user(None, email) 40 | # telemetry.metric(Metric("UsersCreatedViaVerify", 1, unit="Count")) 41 | if user.verified_at is None: 42 | user.verified_at = timezone.now() 43 | user.save() 44 | self._email_verified_user(email) 45 | 46 | def _verify_users(self, email_list): 47 | for email in email_list: 48 | self._verify_single_user(email) 49 | 50 | def post(self, request, *args, **kwargs): 51 | emails = request.data.get("emails", []) 52 | email = request.data.get("email") 53 | if email: 54 | emails.append(email) 55 | self._verify_users(emails) 56 | return Response(None, status=status.HTTP_200_OK) 57 | 58 | 59 | class InviteViewSet(viewsets.ViewSet): 60 | """ 61 | Invite a user via email. Each User gets an invite after they 62 | have talked to 10 contacts. The person they invite must talk 63 | to 10 perspective users before the invite-r gets an invite 64 | """ 65 | 66 | permission_classes = [IsSupportalAdminUser | HasInvite] 67 | 68 | def _create_user_from_email(self, email, request_user): 69 | """ 70 | Create the user from an email. First try looking up 71 | the user, if that user exists already or for some 72 | reason we have multiple added users just return None. 73 | """ 74 | try: 75 | User.objects.get_user_by_email(email=email) 76 | except User.DoesNotExist: 77 | return User.objects.create_user( 78 | email, 79 | email, 80 | should_send_invite_email=True, 81 | added_by=request_user, 82 | verified_at=timezone.now(), 83 | ) 84 | except User.MultipleObjectsReturned: 85 | pass 86 | return None 87 | 88 | @action(detail=False, permission_classes=[IsAuthenticated]) 89 | def available(self, request, *args, **kwargs): 90 | # user has the ability to send an invite 91 | user = request.user 92 | has_invite = user.has_invite 93 | latest_invite = user.latest_invite 94 | latest_invite_object = {} 95 | 96 | if latest_invite: 97 | latest_invite_object = { 98 | "email": latest_invite.email, 99 | "remaining_contacts_count": latest_invite.remaining_contacts_count, 100 | } 101 | response_data = { 102 | "has_invite": has_invite, 103 | "remaining_contacts_count": user.remaining_contacts_count, 104 | "latest_invite": latest_invite_object, 105 | } 106 | 107 | return Response(response_data, status=status.HTTP_200_OK) 108 | 109 | def create(self, request, *args, **kwargs): 110 | email = request.data.get("email", "").strip() 111 | if not email: 112 | return Response( 113 | {"message": "Must include an email address"}, 114 | status=status.HTTP_400_BAD_REQUEST, 115 | ) 116 | try: 117 | validator = EmailValidator() 118 | validator(email) 119 | except ValidationError: 120 | return Response( 121 | {"message": "Invalid email address given"}, 122 | status=status.HTTP_400_BAD_REQUEST, 123 | ) 124 | 125 | created_user = self._create_user_from_email(email, request.user) 126 | 127 | if created_user: 128 | return Response(status=status.HTTP_201_CREATED) 129 | 130 | # If the user tried to send an invite to a user who 131 | # already existed no-op 132 | return Response(status=status.HTTP_204_NO_CONTENT) 133 | -------------------------------------------------------------------------------- /supportal/app/views/pagination.py: -------------------------------------------------------------------------------- 1 | from rest_framework.pagination import PageNumberPagination 2 | 3 | 4 | class StandardResultsSetPagination(PageNumberPagination): 5 | page_size = 100 6 | page_size_query_param = "page_size" 7 | max_page_size = 1000 8 | -------------------------------------------------------------------------------- /supportal/app/views/person_views.py: -------------------------------------------------------------------------------- 1 | from rest_framework import viewsets 2 | from rest_framework.permissions import IsAdminUser, IsAuthenticated 3 | 4 | from supportal.app.models import Person 5 | from supportal.app.serializers import FullPersonSerializer 6 | 7 | 8 | class PersonViewSet(viewsets.ModelViewSet): 9 | """ 10 | API endpoint that allows people to be viewed or edited. 11 | """ 12 | 13 | permission_classes = [IsAdminUser] 14 | queryset = Person.objects.all().order_by("-vol_yes_at") 15 | serializer_class = FullPersonSerializer 16 | 17 | def get_serializer(self, *args, **kwargs): 18 | """Set many=True for lists, so we can serialize multiple incoming objects""" 19 | if isinstance(kwargs.get("data", {}), list): 20 | kwargs["many"] = True 21 | return super().get_serializer(*args, **kwargs) 22 | -------------------------------------------------------------------------------- /supportal/app/views/user_views.py: -------------------------------------------------------------------------------- 1 | from django.db.models import Count 2 | from django_filters.rest_framework import DjangoFilterBackend 3 | from rest_framework import filters, status, viewsets 4 | from rest_framework.decorators import action 5 | from rest_framework.generics import GenericAPIView 6 | from rest_framework.response import Response 7 | 8 | from supportal.app.models import User 9 | from supportal.app.permissions import IsSupportalAdminUser 10 | from supportal.app.serializers import FullUserSerializer, MeSerializer 11 | from supportal.app.views.pagination import StandardResultsSetPagination 12 | 13 | 14 | class FullUserViewSet(viewsets.ModelViewSet): 15 | """Full CRUD User API for superusers""" 16 | 17 | queryset = User.objects.all().filter(is_active=True).order_by("-created_at") 18 | serializer_class = FullUserSerializer 19 | pagination_class = StandardResultsSetPagination 20 | permission_classes = [IsSupportalAdminUser] 21 | filter_backends = [DjangoFilterBackend, filters.OrderingFilter] 22 | filterset_fields = ["state"] 23 | ordering_fields = ["state", "city", "email"] 24 | 25 | def _bulk_create(self, request): 26 | response = [] 27 | 28 | for user in request.data: 29 | serializer = self.get_serializer(data=user, context={"request": request}) 30 | 31 | if serializer.is_valid(raise_exception=False): 32 | self.perform_create(serializer) 33 | response.append(serializer.data) 34 | else: 35 | response.append( 36 | {"error": "Invalid user creation", "email": user.get("email")} 37 | ) 38 | return Response(response, status=201) 39 | 40 | def create(self, request, *args, **kwargs): 41 | """ Wrapping this to allow the request object 42 | to be sent to the user_serializer. The requesting 43 | user gets set as added_by """ 44 | if isinstance(request.data, list): 45 | return self._bulk_create(request) 46 | else: 47 | serializer = self.serializer_class( 48 | data=request.data, context={"request": request} 49 | ) 50 | serializer.is_valid(raise_exception=True) 51 | serializer.save() 52 | return Response(serializer.data, status=status.HTTP_201_CREATED) 53 | 54 | def destroy(self, request, *args, **kwargs): 55 | instance = self.get_object() 56 | instance.is_active = False 57 | instance.save() 58 | return Response(status=status.HTTP_204_NO_CONTENT) 59 | 60 | @action(detail=False, methods=["get"]) 61 | def meta(self, *args, **kwargs): 62 | data = ( 63 | User.objects.filter(is_active=True) 64 | .values("state") 65 | .annotate(count=Count("state")) 66 | .order_by("state") 67 | ) 68 | response_data = { 69 | "all": {"count": User.objects.all().filter(is_active=True).count()}, 70 | "states": list(data), 71 | } 72 | return Response(response_data, status=status.HTTP_200_OK) 73 | 74 | 75 | class MeView(GenericAPIView): 76 | """User API for normal users to read and update their own information""" 77 | 78 | serializer_class = MeSerializer 79 | 80 | def get(self, request, *args, **kwargs): 81 | s = self.serializer_class(request.user) 82 | return Response(s.data) 83 | 84 | def patch(self, request, *args, **kwargs): 85 | s = self.serializer_class(request.user, data=request.data, partial=True) 86 | if s.is_valid(): 87 | s.save() 88 | return Response(s.data, status=status.HTTP_201_CREATED) 89 | return Response(s.errors, status=status.HTTP_400_BAD_REQUEST) 90 | -------------------------------------------------------------------------------- /supportal/app/views/vol_prospect_views.py: -------------------------------------------------------------------------------- 1 | from django.contrib.gis.geos import Point 2 | from django_filters.rest_framework import DjangoFilterBackend 3 | from rest_framework import filters, mixins, status, viewsets 4 | from rest_framework.decorators import action 5 | from rest_framework.exceptions import MethodNotAllowed, ValidationError 6 | from rest_framework.response import Response 7 | 8 | from supportal.app.common.enums import VolProspectAssignmentStatus 9 | from supportal.app.models import ( 10 | MobilizeAmericaEventSignupExcpetion, 11 | VolProspectAssignment, 12 | VolProspectContactEvent, 13 | ) 14 | from supportal.app.serializers import VolProspectAssignmentSerializer 15 | from supportal.app.serializers.vol_prospect_contact_event_serializer import ( 16 | VolProspectContactEventSerializer, 17 | ) 18 | 19 | 20 | class VolProspectAssignmentViewSet( 21 | mixins.RetrieveModelMixin, 22 | mixins.UpdateModelMixin, 23 | mixins.ListModelMixin, 24 | viewsets.GenericViewSet, 25 | ): 26 | serializer_class = VolProspectAssignmentSerializer 27 | filter_backends = [filters.OrderingFilter] 28 | ordering_fields = ["created_at"] 29 | base_throttle_scope = "vol_prospect_assignments" 30 | 31 | def get_throttles(self): 32 | """Custom action-level throttling using ScopedRateThrottle 33 | 34 | Based on: 35 | https://www.pedaldrivenprogramming.com/2017/05/throttling-django-rest-framwork-viewsets/ 36 | """ 37 | if self.action == "assign": 38 | self.throttle_scope = f"{self.base_throttle_scope}.assign" 39 | else: 40 | self.throttle_scope = self.base_throttle_scope 41 | return super().get_throttles() 42 | 43 | def get_queryset(self): 44 | user = self.request.user 45 | 46 | if not user.verified_at: 47 | return VolProspectAssignment.objects.get_demo_queryset().filter(user=user) 48 | 49 | queryset = VolProspectAssignment.objects.filter( 50 | user=user, expired_at__isnull=True, person__is_demo=False 51 | ) 52 | status_param = self.request.query_params.get("status", None) 53 | if status_param: 54 | vpa_status = VolProspectAssignmentStatus.from_name(status_param) 55 | if vpa_status.result_category: 56 | queryset = queryset.filter( 57 | vol_prospect_contact_events__result_category=vpa_status.result_category, 58 | suppressed_at__isnull=not vpa_status.suppressed, 59 | ) 60 | else: 61 | queryset = queryset.filter( 62 | vol_prospect_contact_events=None, 63 | suppressed_at__isnull=not vpa_status.suppressed, 64 | ) 65 | return queryset 66 | 67 | def update(self, request, *args, **kwargs): 68 | raise MethodNotAllowed("PUT") 69 | 70 | # TODO: override swagger documentation 71 | @action(detail=False, methods=["post"]) 72 | def assign(self, request, format=None, *args, **kwargs): 73 | """ 74 | Assign 10 vol prospects to the authenticated user. Note that if the user 75 | is not verified, the assignments are made with demo people. 76 | """ 77 | if VolProspectAssignment.objects.has_outstanding_assignments(request.user): 78 | return Response( 79 | {"error": "User has outstanding assignments"}, 80 | status=status.HTTP_400_BAD_REQUEST, 81 | ) 82 | 83 | if not request.user.coordinates: 84 | return Response( 85 | {"error": "User missing coordinates"}, 86 | status=status.HTTP_400_BAD_REQUEST, 87 | ) 88 | location = request.data.get("location", None) 89 | if location: 90 | location = Point( 91 | float(location["longitude"]), float(location["latitude"]), srid=4326 92 | ) 93 | VolProspectAssignment.objects.assign(request.user, location=location) 94 | return Response(None, status=status.HTTP_201_CREATED) 95 | 96 | def partial_update(self, request, *args, **kwargs): 97 | s = request.data.get("status") 98 | note = request.data.get("note") 99 | vpa = self.get_object() 100 | 101 | # get_object 404s if the request.user and vpa.user don't match 102 | # keepting things as proection as an extra check 103 | if vpa.user != request.user: 104 | return Response(None, status=status.HTTP_403_FORBIDDEN) 105 | 106 | if ( 107 | s 108 | and VolProspectAssignmentStatus.from_name(s) 109 | == VolProspectAssignmentStatus.SKIPPED 110 | ): 111 | vpa.suppress() 112 | 113 | if note is not None: 114 | vpa.note = note 115 | vpa.save() 116 | 117 | return Response(None, status=status.HTTP_204_NO_CONTENT) 118 | 119 | 120 | class VolProspectContactEventViewSet( 121 | mixins.CreateModelMixin, 122 | mixins.ListModelMixin, 123 | mixins.UpdateModelMixin, 124 | mixins.RetrieveModelMixin, 125 | viewsets.GenericViewSet, 126 | ): 127 | serializer_class = VolProspectContactEventSerializer 128 | filter_backends = [DjangoFilterBackend, filters.OrderingFilter] 129 | filterset_fields = ["vol_prospect_assignment"] 130 | ordering_fields = ["created_at"] 131 | ordering = ["-created_at"] 132 | base_throttle_scope = "vol_prospect_contact_events" 133 | 134 | def get_throttles(self): 135 | if self.action == "create": 136 | self.throttle_scope = f"{self.base_throttle_scope}.create" 137 | else: 138 | # Don't rate limit GETS. These should be removed anyway 139 | self.throttle_scope = None 140 | return super().get_throttles() 141 | 142 | def get_queryset(self): 143 | user = self.request.user 144 | queryset = VolProspectContactEvent.objects.filter( 145 | vol_prospect_assignment__user=user, 146 | vol_prospect_assignment__expired_at__isnull=True, 147 | ) 148 | return queryset 149 | 150 | def create(self, request, *args, **kwargs): 151 | serializer = self.serializer_class( 152 | data=request.data, context={"request": request} 153 | ) 154 | serializer.is_valid(raise_exception=True) 155 | try: 156 | serializer.save() 157 | return Response(serializer.data, status=status.HTTP_201_CREATED) 158 | except MobilizeAmericaEventSignupExcpetion as e: 159 | return Response(e.message, status=status.HTTP_400_BAD_REQUEST) 160 | -------------------------------------------------------------------------------- /supportal/conftest.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | 5 | import pytest 6 | from django.conf import settings 7 | from django.contrib.gis.geos import Point 8 | from django.core.serializers.json import DjangoJSONEncoder 9 | from django.forms import model_to_dict 10 | from model_bakery import baker 11 | from nplusone.core import profiler 12 | from rest_framework.test import APIClient 13 | 14 | from supportal.app.common.enums import CanvassResult 15 | from supportal.app.models import User 16 | from supportal.settings import BASE_DIR 17 | from supportal.shifter.models import USZip5 18 | from supportal.tests import utils 19 | from supportal.tests.baker_recipes import set_mobilize_america_event_raw 20 | 21 | with open(os.path.join(BASE_DIR, "supportal", "tests", "jwk_rsa_pub.json")) as f: 22 | JWK_PUBLIC_KEY = json.load(f) 23 | 24 | COGNITO_USER_POOL_JWKS = {"keys": [JWK_PUBLIC_KEY]} 25 | 26 | # Avoid cache collisions by giving each test run a unique prefix 27 | settings.CACHES["default"]["KEY_PREFIX"] = f"{int(time.time())}-test-supportal" 28 | settings.SHIFTER_IP_RATE_LIMIT = "100/min" 29 | 30 | 31 | @pytest.fixture(autouse=True) 32 | def jwks_setup(mocker): 33 | mocker.patch( 34 | "supportal.app.authentication_backend.get_jwks", 35 | return_value=COGNITO_USER_POOL_JWKS, 36 | ) 37 | 38 | 39 | @pytest.fixture(autouse=True) 40 | def no_nplusone(): 41 | """Raise an exception on any nplusone query in any test.""" 42 | with profiler.Profiler(): 43 | yield 44 | 45 | 46 | def _user(**extra_fields): 47 | return User.objects.create_user( 48 | "testuser", "fake@fake.com", skip_cognito=True, **extra_fields 49 | ) 50 | 51 | 52 | def _superuser(): 53 | return User.objects.create_superuser( 54 | "super", "superfake@fake.com", "password2", skip_cognito=True 55 | ) 56 | 57 | 58 | @pytest.fixture() 59 | def user(): 60 | return _user() 61 | 62 | 63 | @pytest.fixture() 64 | def supportal_admin_user(): 65 | return _user(is_admin=True) 66 | 67 | 68 | @pytest.fixture 69 | def superuser(): 70 | return _superuser() 71 | 72 | 73 | @pytest.fixture() 74 | def auth(user): 75 | return utils.id_auth(user) 76 | 77 | 78 | @pytest.fixture() 79 | def auth_supportal_admin_user(supportal_admin_user): 80 | return utils.id_auth(supportal_admin_user) 81 | 82 | 83 | @pytest.fixture() 84 | def auth_superuser(superuser): 85 | return utils.id_auth(superuser) 86 | 87 | 88 | @pytest.fixture() 89 | def api_client(): 90 | return APIClient() 91 | 92 | 93 | @pytest.fixture 94 | def mattapan_leader(): 95 | return baker.make_recipe("supportal.tests.mattapan_leader") 96 | 97 | 98 | @pytest.fixture 99 | def roslindale_leader(): 100 | return baker.make_recipe("supportal.tests.roslindale_leader") 101 | 102 | 103 | @pytest.fixture 104 | def norwood_prospect(): 105 | return baker.make_recipe("supportal.tests.norwood_prospect") 106 | 107 | 108 | @pytest.fixture 109 | def roslindale_prospect(): 110 | return baker.make_recipe("supportal.tests.roslindale_prospect") 111 | 112 | 113 | @pytest.fixture 114 | def jamaica_plain_prospect(): 115 | return baker.make_recipe("supportal.tests.jamaica_plain_prospect") 116 | 117 | 118 | @pytest.fixture 119 | def west_roxbury_prospect(): 120 | return baker.make_recipe("supportal.tests.west_roxbury_prospect") 121 | 122 | 123 | @pytest.fixture 124 | def cambridge_leader(): 125 | return baker.make_recipe("supportal.tests.cambridge_leader") 126 | 127 | 128 | @pytest.fixture 129 | def cambridge_prospect(): 130 | return baker.make_recipe("supportal.tests.cambridge_prospect") 131 | 132 | 133 | @pytest.fixture 134 | def somerville_prospect(): 135 | return baker.make_recipe("supportal.tests.somerville_prospect") 136 | 137 | 138 | @pytest.fixture 139 | def medford_prospect(): 140 | return baker.make_recipe("supportal.tests.medford_prospect") 141 | 142 | 143 | @pytest.fixture 144 | def malden_prospect(): 145 | return baker.make_recipe("supportal.tests.malden_prospect") 146 | 147 | 148 | @pytest.fixture 149 | def malden_prospect_suppressed(): 150 | return baker.make_recipe("supportal.tests.malden_prospect_suppressed") 151 | 152 | 153 | @pytest.fixture 154 | def cambridge_prospect_assignment(): 155 | return baker.make_recipe("supportal.tests.cambridge_prospect_assignment") 156 | 157 | 158 | @pytest.fixture 159 | def roslindale_prospect_assignment(): 160 | return baker.make_recipe("supportal.tests.roslindale_prospect_assignment") 161 | 162 | 163 | @pytest.fixture 164 | def cambridge_prospect_unreachable_event(cambridge_prospect_assignment): 165 | return cambridge_prospect_assignment.create_contact_event( 166 | result=CanvassResult.UNREACHABLE_MOVED, metadata={"moved_to": "CA"}, note="test" 167 | ) 168 | 169 | 170 | @pytest.fixture 171 | def california_prospect(): 172 | return baker.make_recipe("supportal.tests.california_prospect") 173 | 174 | 175 | @pytest.fixture() 176 | def mattapan_leader_user(): 177 | return baker.make_recipe("supportal.tests.mattapan_leader_user") 178 | 179 | 180 | @pytest.fixture() 181 | def roslindale_leader_user(): 182 | return baker.make_recipe("supportal.tests.roslindale_leader_user") 183 | 184 | 185 | @pytest.fixture() 186 | def hayes_valley_leader_user(): 187 | return baker.make_recipe("supportal.tests.hayes_valley_leader_user") 188 | 189 | 190 | @pytest.fixture() 191 | def cambridge_leader_user(): 192 | return baker.make_recipe("supportal.tests.cambridge_leader_user") 193 | 194 | 195 | @pytest.fixture() 196 | def cambridge_event_signup(): 197 | return baker.make_recipe("supportal.tests.cambridge_event_signup") 198 | 199 | 200 | @pytest.fixture() 201 | def cambridge_event(): 202 | return baker.make_recipe("supportal.tests.cambridge_event") 203 | 204 | 205 | @pytest.fixture() 206 | def virtual_phone_bank(): 207 | return set_mobilize_america_event_raw( 208 | baker.make_recipe("supportal.tests.virtual_phone_bank") 209 | ) 210 | 211 | 212 | @pytest.fixture() 213 | def high_pri_virtual_phone_bank(): 214 | return set_mobilize_america_event_raw( 215 | baker.make_recipe("supportal.tests.high_pri_virtual_phone_bank") 216 | ) 217 | 218 | 219 | @pytest.fixture() 220 | def iowa_state(): 221 | return baker.make_recipe("supportal.tests.iowa_state") 222 | 223 | 224 | @pytest.fixture() 225 | def ia_zip5(): 226 | return USZip5.objects.create( 227 | zip5="52240", state="IA", coordinates=Point(-91.5016, 41.6355, srid=4326) 228 | ) 229 | 230 | 231 | @pytest.fixture() 232 | def nh_zip5(): 233 | return USZip5.objects.create( 234 | zip5="03037", state="NH", coordinates=Point(-71.2513, 43.1378, srid=4326) 235 | ) 236 | 237 | 238 | @pytest.fixture() 239 | def nv_zip5(): 240 | return USZip5.objects.create( 241 | zip5="89006", state="NV", coordinates=Point(-114.9721, 35.9279, srid=4326) 242 | ) 243 | 244 | 245 | @pytest.fixture() 246 | def sc_zip5(): 247 | return USZip5.objects.create( 248 | zip5="29409", state="SC", coordinates=Point(-79.9605, 32.7961, srid=4326) 249 | ) 250 | 251 | 252 | @pytest.fixture() 253 | def ca_zip5(): 254 | return USZip5.objects.create( 255 | zip5="94102", state="CA", coordinates=Point(-124.4167, 37.7813, srid=4326) 256 | ) 257 | 258 | 259 | @pytest.fixture() 260 | def ma_zip5(): 261 | return USZip5.objects.create( 262 | zip5="02130", state="MA", coordinates=Point(-71.113845, 42.312759, srid=4326) 263 | ) 264 | -------------------------------------------------------------------------------- /supportal/docs.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import url 2 | from drf_yasg import openapi 3 | from drf_yasg.views import get_schema_view 4 | from rest_framework import permissions 5 | 6 | # TODO: Password-protect the docs. AWS Lambda remaps WWW-Authenticate, so we can't 7 | # use this approach: 8 | # class DocsBasicAuth(BasicAuthentication): 9 | # """Basic Auth used to provide simple password protection for doc pages 10 | # 11 | # Using the shared password displays docs for regular users. To see the full 12 | # API documentation pass your superuser name and password to the browser 13 | # basic auth prompt. 14 | # """ 15 | # 16 | # def authenticate_credentials(self, userid, password, request=None): 17 | # if ( 18 | # settings.DOCUMENTATION_SHARED_USER 19 | # and userid == settings.DOCUMENTATION_SHARED_USER 20 | # and settings.DOCUMENTATION_SHARED_PASSWORD 21 | # and password == settings.DOCUMENTATION_SHARED_PASSWORD 22 | # ): 23 | # return (AnonymousUser, None) 24 | # 25 | # # fall back to BasicAuthentication's default username/password authentication 26 | # return super().authenticate_credentials(userid, password, request) 27 | 28 | 29 | schema_view = get_schema_view( 30 | openapi.Info( 31 | title="Supportal API", 32 | default_version="v1", 33 | description="API Docs for the Supportal!", 34 | ), 35 | public=True, 36 | permission_classes=(permissions.AllowAny,), 37 | ) 38 | 39 | documentation_urls = [ 40 | url( 41 | r"^swagger(?P\.json|\.yaml)$", 42 | schema_view.without_ui(cache_timeout=0), 43 | name="schema-json", 44 | ), 45 | url( 46 | r"^swagger/$", 47 | schema_view.with_ui("swagger", cache_timeout=0), 48 | name="schema-swagger-ui", 49 | ), 50 | url( 51 | r"^redoc/$", schema_view.with_ui("redoc", cache_timeout=0), name="schema-redoc" 52 | ), 53 | ] 54 | -------------------------------------------------------------------------------- /supportal/services/email_service.py: -------------------------------------------------------------------------------- 1 | """ 2 | TODO: You'll need to write an email service here like 3 | 4 | abstract class EmailService: 5 | def send_email( 6 | template_name, 7 | from_email, 8 | recipient, 9 | reply_to_email, 10 | configuration_set_name, 11 | payload, 12 | application_name 13 | ): 14 | # sends an email 15 | return s 16 | 17 | def send_bulk_email( 18 | configuration_set_name, 19 | default_template_data, 20 | from_email, 21 | payload_array, 22 | reply_to_email, 23 | template, 24 | application_name): 25 | # sends email to a list of people 26 | return 27 | 28 | """ 29 | # from ew_common.email_service import EmailService 30 | 31 | _email_service = None 32 | 33 | 34 | def get_email_service(): 35 | global _email_service 36 | # if _email_service is None: 37 | # _email_service = EmailService() 38 | return _email_service 39 | -------------------------------------------------------------------------------- /supportal/services/google_sheets_service.py: -------------------------------------------------------------------------------- 1 | # get things from the master sheet to update the prio doc 2 | # TAB_NAME: Source of Truth 3 | # COLUMNS required: state_code, sheet 4 | 5 | # get things from the db to update the prioritization 6 | # TAB_NAME: Prioritizations 7 | # COLUMNS required: event_id, pritoization 8 | 9 | # write a last updated back to the doc 10 | # TAB_NAME: Last Updated 11 | # COLUMNS required: last_updated, errors 12 | import json 13 | 14 | import pygsheets 15 | from google.oauth2.credentials import Credentials 16 | 17 | SCOPES = [ 18 | "https://www.googleapis.com/auth/spreadsheets", 19 | "https://www.googleapis.com/auth/drive", 20 | ] 21 | 22 | 23 | class GoogleSheetsClient: 24 | client = None 25 | 26 | def __init__(self, credentials): 27 | self.client = self.__get_sheets_client(credentials) 28 | 29 | def __get_sheets_client(self, json_credentials): 30 | credentials = Credentials.from_authorized_user_info( 31 | json.loads(json_credentials), scopes=SCOPES 32 | ) 33 | return pygsheets.client.Client(credentials) 34 | 35 | def __get_worksheet(self, url, tab_name): 36 | spreadsheet = self.client.open_by_url(url) 37 | worksheet = spreadsheet.worksheet_by_title(tab_name) 38 | return worksheet 39 | 40 | def get_values_from_sheet(self, url, tab_name, columns=[]): 41 | """ Gets the values from a shet at the given url/tab name 42 | Can specify the columns to filter and get back. 43 | Returns a dictionary like [{"column1": value, "column2": value}, {"column1": value...}] 44 | """ 45 | record_dictionaries = self.__get_worksheet(url, tab_name).get_all_records() 46 | if len(columns) == 0: 47 | return record_dictionaries 48 | # maybe use a fancy yield here. THAT SEEMS COOL 49 | return [ 50 | {k: v for k, v in row_dictionary.items() if k in columns} 51 | for row_dictionary in record_dictionaries 52 | ] 53 | -------------------------------------------------------------------------------- /supportal/shifter/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/shifter/__init__.py -------------------------------------------------------------------------------- /supportal/shifter/apps.py: -------------------------------------------------------------------------------- 1 | from django.apps import AppConfig 2 | 3 | 4 | class ShifterConfig(AppConfig): 5 | name = "shifter" 6 | -------------------------------------------------------------------------------- /supportal/shifter/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/shifter/common/__init__.py -------------------------------------------------------------------------------- /supportal/shifter/common/error_codes.py: -------------------------------------------------------------------------------- 1 | from enum import IntEnum 2 | 3 | from rest_framework import status 4 | 5 | 6 | def get_error_code_and_status(ma_response): 7 | """ Gets the error response we want to send back to the client 8 | and the status from the original mobilize america response. 9 | """ 10 | ma_error = ma_response.get("error", {}) 11 | status_code = ma_error.pop("status_code", status.HTTP_400_BAD_REQUEST) 12 | error_enum = ErrorCodes.from_error(ma_error) 13 | error_response = generate_error_for_code(error_enum.name, ma_error) 14 | return error_response, status_code 15 | 16 | 17 | def format_zip_error(): 18 | """ Format a zip error response to imitate Mobilizer America's zip error-- 19 | This ensures consistency to the frontend. 20 | """ 21 | return {"zip5": ["The zip entered is not valid."]} 22 | 23 | 24 | def generate_error_for_code(code_name, full_error): 25 | """ Generate an error response object from the code and error message. 26 | Used to pass through the mobilize america error so 27 | that the front end has access to it. 28 | """ 29 | detail = full_error.get("detail") 30 | if code_name == ErrorCodes.ZIP_INVALID.name: 31 | return {"code": ErrorCodes.VALIDATION.name, "detail": format_zip_error()} 32 | return {"code": code_name, "detail": detail or full_error} 33 | 34 | 35 | class ErrorCodes(IntEnum): 36 | """ The Enums do not include all the possible errors as it's set up to 37 | return from the HTTP Status codes that are detailed in 38 | https://www.django-rest-framework.org/api-guide/status-codes/, but 39 | without the HTTP_XYZ_ appeneded to the beginning """ 40 | 41 | TIMESLOT_FULL = 1 42 | TIMESLOT_NOT_FOUND = 2 43 | TIMESLOT_NOT_ASSOCIATED_WITH_EVENT = 3 44 | ZIP_INVALID = 4 45 | TIMESLOT_IN_THE_PAST = 5 46 | UNKNOWN = 6 47 | GENERIC_PERSON = 7 48 | NOT_FOUND = 8 49 | GENERIC_TIMESLOT = 9 50 | MA_500 = 10 51 | INVALID_EVENT_ID = 11 52 | VALIDATION = 12 53 | BAD_REQUEST = 13 54 | UNAUTHORIZED = 14 55 | FORBIDDEN = 15 56 | METHOD_NOT_ALLOWED = 17 57 | REQUEST_TIMEOUT = 18 58 | TOO_MANY_REQUESTS = 19 59 | 60 | @classmethod 61 | def _map_error_to_code(cls, error_type, error_string): 62 | if error_string == "Cannot create an attendance for a timeslot in the past.": 63 | return cls.TIMESLOT_IN_THE_PAST 64 | elif error_string == "Timeslot does not exist.": 65 | return cls.TIMESLOT_NOT_FOUND 66 | elif error_string == "Timeslot is full.": 67 | return cls.TIMESLOT_FULL 68 | elif error_string == "Timeslot is not associated with event.": 69 | return cls.TIMESLOT_NOT_ASSOCIATED_WITH_EVENT 70 | elif error_string == "Please enter a valid 5-digit US zipcode.": 71 | return cls.ZIP_INVALID 72 | elif error_string.endswith("does not appear to be a valid U.S. zipcode."): 73 | return cls.ZIP_INVALID 74 | elif error_string == "Not found.": 75 | return cls.NOT_FOUND 76 | elif error_type == "person": 77 | return cls.GENERIC_PERSON 78 | elif error_type == "timeslots": 79 | return cls.GENERIC_TIMESLOT 80 | else: 81 | return cls.UNKNOWN 82 | 83 | @classmethod 84 | def from_error(cls, error_object): 85 | """ If the error is something from the input then throw an 86 | actual error 87 | """ 88 | error_code = None 89 | postal_code_error = error_object.get("person", {}).get("postal_code", []) 90 | if len(postal_code_error) > 0: 91 | error_code = cls._map_error_to_code("person", postal_code_error[0]) 92 | 93 | timeslot_error = error_object.get("timeslots", []) 94 | if len(timeslot_error) > 0: 95 | error_code = cls._map_error_to_code("timeslots", timeslot_error[0]) 96 | 97 | toplevel_zipcode_error = error_object.get("zipcode", []) 98 | if len(toplevel_zipcode_error) > 0: 99 | error_code = cls._map_error_to_code("zipcode", toplevel_zipcode_error[0]) 100 | 101 | event_not_found = error_object.get("detail") 102 | if event_not_found: 103 | error_code = cls._map_error_to_code("", event_not_found) 104 | 105 | ma_500_error = error_object.get("status_code", None) 106 | 107 | if ma_500_error and ma_500_error >= 500: 108 | error_code = cls.MA_500 109 | 110 | if error_code is None: 111 | error_code = cls.UNKNOWN 112 | return error_code 113 | -------------------------------------------------------------------------------- /supportal/shifter/event_recommendation_strategies.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from abc import ABC, abstractmethod 3 | from datetime import datetime, timezone 4 | from typing import Any, Dict, List, Optional 5 | 6 | from django.conf import settings 7 | from django.contrib.gis.db.models.functions import Distance 8 | from django.contrib.gis.measure import D 9 | from django.db.models import Min 10 | from rest_framework.exceptions import ValidationError 11 | 12 | from supportal.services.mobilize_america import ( 13 | CANVASS, 14 | MobilizeAmericaAPIException, 15 | get_global_client, 16 | ) 17 | from supportal.shifter.models import MobilizeAmericaEvent, State, USZip5 18 | 19 | 20 | class BaseRecommendationStrategy(ABC): 21 | """ 22 | Base class for recommendation strategies 23 | 24 | Implementations must override the find_events method, which is a classmethod 25 | for now because we don't expect strategies to be stateful. 26 | """ 27 | 28 | @classmethod 29 | @abstractmethod 30 | def find_events( 31 | cls, 32 | limit: int, 33 | zip5: Optional[str] = None, 34 | max_dist: Optional[int] = None, 35 | tag_ids: Optional[List[int]] = None, 36 | timeslot_start: Optional[datetime] = None, 37 | timeslot_end: Optional[datetime] = None, 38 | event_types: Optional[List[str]] = None, 39 | is_virtual: bool = False, 40 | states: Optional[List[str]] = None, 41 | ) -> List[Dict[str, Any]]: 42 | pass 43 | 44 | 45 | class MobilizeAmericaAPIRecommendationStrategy(BaseRecommendationStrategy): 46 | @classmethod 47 | def find_events( 48 | cls, 49 | limit, 50 | zip5=None, 51 | max_dist=None, 52 | tag_ids=None, 53 | timeslot_start=None, 54 | timeslot_end=None, 55 | event_types=None, 56 | is_virtual=False, 57 | states=None, 58 | ) -> List[Dict[str, Any]]: 59 | if timeslot_start: 60 | timeslot_start_param = f"gte_{int(timeslot_start.timestamp())}" 61 | else: 62 | timeslot_start_param = "gte_now" 63 | if timeslot_end: 64 | timeslot_end_param = f"lte_{int(timeslot_end.timestamp())}" 65 | else: 66 | timeslot_end_param = None 67 | 68 | if states: 69 | logging.warning( 70 | "Cannot pass states param when using Mobilize America Recomendation Strategy" 71 | ) 72 | 73 | params = { 74 | "timeslot_start": timeslot_start_param, 75 | "tag_id": tag_ids, 76 | "event_types": event_types, 77 | } 78 | if timeslot_end_param: 79 | params["timeslot_end"] = timeslot_end_param 80 | 81 | if is_virtual: 82 | params["is_virtual"] = True 83 | else: 84 | params["zipcode"] = zip5 85 | params["max_dist"] = max_dist 86 | 87 | res = get_global_client().list_organization_events(params) 88 | return next(res)["data"][0:limit] 89 | 90 | 91 | class DBRecommendationStrategy(BaseRecommendationStrategy): 92 | @classmethod 93 | def _should_use_doc_prio(cls, states) -> bool: 94 | if states: 95 | states_with_doc_prio = cls._filter_to_states_with_prio(states) 96 | return len(states_with_doc_prio) > 0 97 | return False 98 | 99 | @classmethod 100 | def _filter_to_states_with_prio(cls, state_codes): 101 | return ( 102 | State.objects.filter(state_code__in=state_codes) 103 | .filter(use_prioritization_doc=True) 104 | .exclude(prioritization_doc="") 105 | .values("state_code") 106 | ) 107 | 108 | @classmethod 109 | def find_events( 110 | cls, 111 | limit, 112 | zip5=None, 113 | max_dist=None, 114 | tag_ids=None, 115 | timeslot_start=None, 116 | timeslot_end=None, 117 | event_types=None, 118 | is_virtual=False, 119 | states=None, 120 | ): 121 | filter_args = { 122 | "is_virtual": is_virtual, 123 | "visibility": settings.MOBILIZE_AMERICA_DEFAULT_VISIBILITY, 124 | "is_active": True, 125 | } 126 | if tag_ids: 127 | filter_args["tag_ids__overlap"] = tag_ids 128 | 129 | filter_args["timeslots__start_date__gte"] = timeslot_start or datetime.now( 130 | tz=timezone.utc 131 | ) 132 | if timeslot_end: 133 | filter_args["timeslots__end_date__lte"] = timeslot_end 134 | if event_types: 135 | filter_args["event_type__in"] = event_types 136 | 137 | if is_virtual: 138 | events = list( 139 | MobilizeAmericaEvent.objects.filter(**filter_args) 140 | .annotate(earliest_timeslot=Min("timeslots__start_date")) 141 | .order_by("-high_priority", "earliest_timeslot") 142 | .all()[0:limit] 143 | ) 144 | else: 145 | coordinates = USZip5.objects.get(zip5=zip5).coordinates 146 | if max_dist: 147 | filter_args["coordinates__distance_lte"] = ( 148 | coordinates, 149 | D(mi=int(max_dist)), 150 | ) 151 | 152 | order_by_list = ["distance", "earliest_timeslot"] 153 | 154 | if cls._should_use_doc_prio(states): 155 | # If the event is a canvas and the states are in prio mode 156 | filter_args["state__state_code__in"] = cls._filter_to_states_with_prio( 157 | states 158 | ) 159 | order_by_list = ["state_prioritization", *order_by_list] 160 | else: 161 | if states: 162 | filter_args["state__state_code__in"] = states 163 | 164 | events = list( 165 | MobilizeAmericaEvent.objects.filter(**filter_args) 166 | .annotate( 167 | distance=Distance("coordinates", coordinates), 168 | earliest_timeslot=Min("timeslots__start_date"), 169 | ) 170 | .order_by(*order_by_list) 171 | .all()[0:limit] 172 | ) 173 | return [e.raw for e in events] 174 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/import_mobilize_america_events.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import timedelta 3 | 4 | from django.core.management import BaseCommand 5 | from django.utils import timezone 6 | 7 | from supportal.services.mobilize_america import ( 8 | EVENT_TYPES, 9 | VISIBILITY_TYPES, 10 | get_global_client, 11 | ) 12 | from supportal.shifter.models import MobilizeAmericaEvent 13 | 14 | # from ew_common.telemetry import telemetry # isort:skip 15 | 16 | MA_EVENT_GET_MAX = 1000 17 | EVENT_PER_PAGE = 20 18 | 19 | 20 | class Command(BaseCommand): 21 | help = "Import all mobilize america" 22 | 23 | def handle(self, *args, **options): 24 | logging.info(f"Starting Mobilize America event import") 25 | event_count = 0 26 | 27 | for visibility in VISIBILITY_TYPES: 28 | created_events = [] 29 | updated_events = [] 30 | logging.info(f"Indexing {visibility} events") 31 | params = {"timeslot_start": "gte_now", "visibility": visibility} 32 | res = get_global_client().list_organization_events(params=params) 33 | page_count = 0 34 | events_for_visibiity = 0 35 | for page in res: 36 | logging.info(f"PAGE: {page_count}") 37 | page_count += 1 38 | for event in page["data"]: 39 | events_for_visibiity += 1 40 | shifter_event, created = MobilizeAmericaEvent.objects.update_or_create_from_json( 41 | event 42 | ) 43 | if created: 44 | created_events.append(str(shifter_event.id)) 45 | else: 46 | updated_events.append(str(shifter_event.id)) 47 | event_count += events_for_visibiity 48 | if events_for_visibiity == MA_EVENT_GET_MAX: 49 | # telemetry.event( 50 | # "Shifter Mobilize America Event Import at 1000", 51 | # page_count=page_count, 52 | # visibility=visibility, 53 | # event_count=events_for_visibiity, 54 | # ) 55 | created_event_ids_string = ", ".join(created_events) 56 | logging.info( 57 | f"Created the following {visibility} events: {created_event_ids_string}" 58 | ) 59 | updated_event_ids_string = ", ".join(updated_events) 60 | logging.info( 61 | f"Updated the following {visibility} events: {updated_event_ids_string}" 62 | ) 63 | 64 | # Find all the events that were not updated in the last 20 minutes 65 | # and mark them as inactive in the database 66 | updated_at_cut_off = timezone.now() - timedelta(minutes=20) 67 | MobilizeAmericaEvent.objects.filter(updated_at__lte=updated_at_cut_off).update( 68 | is_active=False 69 | ) 70 | return f"Loaded events: {event_count}" 71 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/import_us_zip5s.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import gzip 3 | import os 4 | 5 | from django.conf import settings 6 | from django.contrib.gis.geos import Point 7 | from django.core.management import BaseCommand 8 | from django.db import transaction 9 | 10 | from supportal.shifter.models import USZip5 11 | 12 | MIN_EXPECTED_ZIPS = 41000 13 | 14 | 15 | class Command(BaseCommand): 16 | """Import zipcode data from a gzipped csv. See tc/data""" 17 | 18 | def add_arguments(self, parser): 19 | default_file_path = os.path.join( 20 | settings.BASE_DIR, "..", "datasets", "us_zip5s.csv.gz" 21 | ) 22 | parser.add_argument( 23 | "--file", 24 | nargs="?", 25 | default=default_file_path, 26 | help="Full path to the zipcode file", 27 | ) 28 | parser.add_argument( 29 | "--expect_at_least", 30 | nargs="?", 31 | default=MIN_EXPECTED_ZIPS, 32 | help="Minimum number of expected zips, for validation", 33 | ) 34 | 35 | @transaction.atomic 36 | def handle(self, *args, **options): 37 | min_expected = int(options["expect_at_least"]) 38 | fpath = options["file"] 39 | p = 0 40 | with gzip.open(fpath, "rt") as f: 41 | reader = csv.DictReader(f) 42 | USZip5.objects.all().delete() 43 | for line in reader: 44 | p += 1 45 | if p % 25 == 0: 46 | print(p) 47 | lat = line["latitude"] 48 | lng = line["longitude"] 49 | coordinates = None 50 | if lat and lng: 51 | coordinates = Point(float(lng), float(lat), srid=4326) 52 | fips = int(line["county_fips"]) if line["county_fips"] else None 53 | accuracy = int(line["accuracy"]) if line["accuracy"] else None 54 | USZip5.objects.create( 55 | zip5=line["zip5"], 56 | city=line["city"], 57 | state=line["state"], 58 | county=line["county"], 59 | county_fips=fips, 60 | accuracy=accuracy, 61 | coordinates=coordinates, 62 | ) 63 | count = USZip5.objects.all().count() 64 | if count < min_expected: 65 | raise Exception(f"Wrote fewer zips than expected ({count}), rolling back") 66 | return f"Wrote {count} zips" 67 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/move_zip5s_to_s3.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | import boto3 5 | from django.conf import settings 6 | from django.contrib.gis.geos import Point 7 | from django.core.management import BaseCommand 8 | from django.db import transaction 9 | 10 | from supportal.shifter.models import USZip5 11 | from supportal.shifter.serializers import USZip5Serializer 12 | 13 | MIN_EXPECTED_ZIPS = 41000 14 | 15 | 16 | class Command(BaseCommand): 17 | """Turn zipcode data for website 18 | pipenv run python manage.py move_zip5s_to_s3 19 | """ 20 | 21 | _s3 = None 22 | 23 | def add_arguments(self, parser): 24 | parser.add_argument( 25 | "--limit", 26 | nargs="?", 27 | type=int, 28 | default=1, 29 | help="limit of zip files to produce", 30 | ) 31 | parser.add_argument( 32 | "--path_to_files", 33 | nargs="?", 34 | default="zip5", 35 | help="path to location of the files", 36 | ) 37 | parser.add_argument("--use_s3", action="store_true", help="Should send to s3") 38 | 39 | def _get_or_create_s3(self): 40 | if self._s3: 41 | return self._s3 42 | self._s3 = boto3.resource("s3") 43 | return self._s3 44 | 45 | def send_to_s3(self, data_to_write, file_path): 46 | s3 = self._get_or_create_s3() 47 | s3.Bucket("cdn.elizabethwarren.com").put_object( 48 | Key=file_path, Body=data_to_write 49 | ) 50 | 51 | def write_to_file(self, data_to_write, file_path): 52 | with open(file_path, "w+") as zip_file: 53 | zip_file.write(data_to_write) 54 | 55 | @transaction.atomic 56 | def handle(self, *args, **options): 57 | limit = options["limit"] 58 | base_path = options["path_to_files"] 59 | should_use_s3 = options["use_s3"] 60 | for zip_object in USZip5.objects.all()[:limit]: 61 | file_path = f"{base_path}/{zip_object.zip5}" 62 | zip5ser = USZip5Serializer() 63 | zip5ser.to_representation(zip_object) 64 | data_to_write = json.dumps(zip5ser.to_representation(zip_object)) 65 | if should_use_s3: 66 | self.send_to_s3(data_to_write, file_path) 67 | else: 68 | self.write_to_file(data_to_write, file_path) 69 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/retry_ma_events.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import timedelta 3 | 4 | from django.core.management import BaseCommand 5 | from django.utils import timezone 6 | 7 | from supportal.shifter.models import EventSignup 8 | 9 | 10 | class Command(BaseCommand): 11 | help = "If MA goes down retry the sends that failed" 12 | 13 | def add_arguments(self, parser): 14 | parser.add_argument( 15 | "--limit", type=int, nargs="?", help="Number of events to retry" 16 | ) 17 | parser.add_argument( 18 | "--days", type=int, nargs="?", help="How many days back should we look" 19 | ) 20 | parser.add_argument( 21 | "--no_response", 22 | action="store_true", 23 | help="only resignup people who weren't sent to mobilize", 24 | ) 25 | 26 | def handle(self, *args, **options): 27 | logging.info(f"Starting to retry sends") 28 | limit = options.get("limit", None) 29 | days = options.get("days", None) 30 | not_sent = options.get("no_response") 31 | failed_events = EventSignup.objects.filter(ma_creation_successful=False) 32 | 33 | if days is not None: 34 | create_at_cutoff = timezone.now() - timedelta(days=days) 35 | failed_events = failed_events.filter(created_at__gte=create_at_cutoff) 36 | 37 | if not_sent: 38 | failed_events = failed_events.filter(ma_response__isnull=True) 39 | 40 | if limit: 41 | failed_events = failed_events[:limit] 42 | 43 | events_successfull_resyncd_count = 0 44 | for event in failed_events: 45 | success, _ = event.sync_to_mobilize_america() 46 | if success: 47 | logging.info(f"Successfully sent signup with id {event.id} to MA") 48 | events_successfull_resyncd_count += 1 49 | else: 50 | logging.info(f"Still unable to send signup {event.id} to MA") 51 | event.retried_at = timezone.now() 52 | event.save() 53 | 54 | return f"Resent {events_successfull_resyncd_count} signups" 55 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/update_prioritization.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from django.conf import settings 4 | from django.core.management import BaseCommand 5 | 6 | from supportal.services.google_sheets_service import GoogleSheetsClient 7 | from supportal.shifter.models import MAX_INTEGER_SIZE, MobilizeAmericaEvent, State 8 | 9 | 10 | PRIORITIZATIONS_TAB = "prioritizations" 11 | 12 | MA_EVENT_ID_COLUMN = "ma_event_id" 13 | PRIORITIZATION_COLUMN = "prioritization" 14 | 15 | 16 | class Command(BaseCommand): 17 | help = "Get the prioritizations from the state sheets" 18 | 19 | def handle(self, *args, **options): 20 | logging.info(f"Starting to update prioritizations") 21 | 22 | states_with_prioritization = State.objects.filter( 23 | use_prioritization_doc=True, prioritization_doc__isnull=False 24 | ) 25 | 26 | google_sheets_client = GoogleSheetsClient(settings.GOOGLE_DOCS_CREDENTIALS) 27 | for state in states_with_prioritization: 28 | prioritizations = google_sheets_client.get_values_from_sheet( 29 | url=state.prioritization_doc, 30 | tab_name=PRIORITIZATIONS_TAB, 31 | columns=[MA_EVENT_ID_COLUMN, PRIORITIZATION_COLUMN], 32 | ) 33 | 34 | for prioritization in prioritizations: 35 | event_id = prioritization[MA_EVENT_ID_COLUMN] 36 | state_prioritization_value = prioritization[PRIORITIZATION_COLUMN] 37 | 38 | if isinstance(event_id, int): 39 | if ( 40 | isinstance(state_prioritization_value, str) 41 | and state_prioritization_value.strip() == "" 42 | ) or state_prioritization_value > 10: 43 | state_prioritization_value = MAX_INTEGER_SIZE 44 | event = MobilizeAmericaEvent.objects.filter(id=event_id).first() 45 | if event: 46 | event.state_prioritization = state_prioritization_value 47 | event.save() 48 | 49 | return f"Priotized {states_with_prioritization.count()} states" 50 | -------------------------------------------------------------------------------- /supportal/shifter/management/commands/update_prioritization_meta.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from django.conf import settings 4 | from django.core.management import BaseCommand 5 | 6 | from supportal.services.google_sheets_service import GoogleSheetsClient 7 | from supportal.shifter.models import MobilizeAmericaEvent, State 8 | 9 | STATE_CODE_COLUMN_NAME = "STATE" 10 | USE_PRIORITIZE_DOC_COLUMN_NAME = "USE_DOC" 11 | PRIORITIZATION_DOC_URL_COLUMN_NAME = "PRIORITIZATION_DOC" 12 | PRIORITIZATIONS_TAB = "National Prioritizations" 13 | 14 | 15 | class Command(BaseCommand): 16 | help = "Get the prioritization meta data from the national doc" 17 | 18 | def handle(self, *args, **options): 19 | logging.info(f"Starting to update prioritizations") 20 | 21 | google_sheets_client = GoogleSheetsClient(settings.GOOGLE_DOCS_CREDENTIALS) 22 | state_metas = google_sheets_client.get_values_from_sheet( 23 | url=settings.PRIORITIZATION_META, 24 | tab_name=PRIORITIZATIONS_TAB, 25 | columns=[ 26 | STATE_CODE_COLUMN_NAME, 27 | USE_PRIORITIZE_DOC_COLUMN_NAME, 28 | PRIORITIZATION_DOC_URL_COLUMN_NAME, 29 | ], 30 | ) 31 | for state_meta in state_metas: 32 | state_code = state_meta[STATE_CODE_COLUMN_NAME] 33 | should_use_doc = state_meta[USE_PRIORITIZE_DOC_COLUMN_NAME] == "TRUE" 34 | doc_url = state_meta[PRIORITIZATION_DOC_URL_COLUMN_NAME] 35 | if state_code and doc_url: 36 | State.objects.filter(state_code=state_code).update( 37 | use_prioritization_doc=should_use_doc, prioritization_doc=doc_url 38 | ) 39 | 40 | return f"Updated {len(state_metas)} metas" 41 | -------------------------------------------------------------------------------- /supportal/shifter/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/shifter/migrations/__init__.py -------------------------------------------------------------------------------- /supportal/shifter/mobilize_america_helpers.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import logging 3 | import urllib 4 | from copy import deepcopy 5 | from datetime import datetime 6 | 7 | import pytz 8 | 9 | import zipcodes 10 | from supportal.services.mobilize_america import PUBLIC_VISIBILITY 11 | 12 | __MA_FIELD_WHITELIST = { 13 | "browser_url", 14 | "description", 15 | "event_type", 16 | "high_priority", 17 | "id", 18 | "location", 19 | "tags", 20 | "timeslots", 21 | "timezone", 22 | "title", 23 | # our added fields: 24 | "formatted_time", 25 | "local_start_time", 26 | "times_synopsis", 27 | } 28 | 29 | 30 | def sanitize_event_payload(payload): 31 | """ 32 | Remove potentially sensitive fields from the Mobilize America API event 33 | response. 34 | 35 | This method is required because we fetch events using the authenticated GET 36 | /events API, which can return private fields that we don't want to serve 37 | """ 38 | location = payload.pop("location", None) 39 | address_vis = payload.pop("address_visibility", None) 40 | sanitized = {k: v for k, v in payload.items() if k in __MA_FIELD_WHITELIST} 41 | if address_vis == PUBLIC_VISIBILITY: 42 | sanitized["location"] = location 43 | elif location and location.get("postal_code"): 44 | matching_codes = zipcodes.matching(location.get("postal_code")) 45 | if len(matching_codes) > 0: 46 | zip_code_data = matching_codes[0] 47 | location_from_zip = { 48 | "location": { 49 | "longitude": float(zip_code_data["long"]), 50 | "latitude": float(zip_code_data["lat"]), 51 | } 52 | } 53 | sanitized["location"] = location_from_zip 54 | 55 | return sanitized 56 | 57 | 58 | def filter_timeslots_for_time(event, timeslot_start_after_utc, timeslot_end_before_utc): 59 | event_dupe = deepcopy(event) 60 | tz = pytz.timezone(event["timezone"]) 61 | 62 | timeslots_within_range = [] 63 | for timeslot in event_dupe["timeslots"]: 64 | timeslot_start = __timestamp_to_datetime_in_zone(timeslot["start_date"], tz) 65 | timeslot_end = __timestamp_to_datetime_in_zone(timeslot["end_date"], tz) 66 | is_valid_timeslot = True 67 | 68 | if timeslot_start_after_utc: 69 | is_valid_timeslot = timeslot_start > timeslot_start_after_utc 70 | if timeslot_end_before_utc: 71 | is_valid_timeslot = is_valid_timeslot and ( 72 | timeslot_end < timeslot_end_before_utc 73 | ) 74 | 75 | if is_valid_timeslot: 76 | timeslots_within_range.append(timeslot) 77 | 78 | event_dupe["timeslots"] = timeslots_within_range 79 | return event_dupe 80 | 81 | 82 | def remove_full_timeslots(event): 83 | event_dupe = deepcopy(event) 84 | 85 | open_timeslots = [] 86 | for timeslot in event_dupe["timeslots"]: 87 | if not timeslot["is_full"]: 88 | open_timeslots.append(timeslot) 89 | 90 | event_dupe["timeslots"] = open_timeslots 91 | return event_dupe 92 | 93 | 94 | def add_extras_for_mdata(event, utm_source): 95 | """Given raw Mobilize America event dict, adds extra useful fields. 96 | 97 | - Adds "formatted_time" to each timeslot. 98 | - Adds "times_synopsis" to the top-level event synopsizing all timeslots. 99 | - Adds utm_source to browser URL. 100 | """ 101 | tz = pytz.timezone(event["timezone"]) 102 | for timeslot in event["timeslots"]: 103 | local_timestamp = __timestamp_to_datetime_in_zone(timeslot["start_date"], tz) 104 | timeslot["formatted_time"] = __format_event_start_date_and_time(local_timestamp) 105 | timeslot["local_start_time"] = local_timestamp.strftime("%Y-%m-%dT%H:%M:%S") 106 | event["times_synopsis"] = __format_event_times_synopsis( 107 | event["timeslots"], pytz.timezone(event["timezone"]) 108 | ) 109 | event["browser_url"] = __add_utm_source(event["browser_url"], utm_source) 110 | return event 111 | 112 | 113 | def __timestamp_to_datetime_in_zone(timestamp, tz): 114 | return pytz.utc.localize(datetime.utcfromtimestamp(timestamp)).astimezone(tz) 115 | 116 | 117 | def __format_event_times_synopsis(timeslots, tz): 118 | """Given an event's timeslots and timezone, returns string summarizing dates and times.""" 119 | timeslots_by_date = itertools.groupby( 120 | timeslots, lambda x: __timestamp_to_datetime_in_zone(x["start_date"], tz).date() 121 | ) 122 | date_synopses = [] 123 | times_summaries = [] 124 | formatted_dates = [] 125 | for d, day_timeslots in timeslots_by_date: 126 | times = [] 127 | for timeslot in day_timeslots: 128 | timeslot_start = __timestamp_to_datetime_in_zone(timeslot["start_date"], tz) 129 | times.append(__format_event_start_time(timeslot_start)) 130 | times_summary = __join_with_or(times) 131 | formatted_date = __format_date(d) 132 | date_synopses.append(f"{times_summary} on {formatted_date}") 133 | 134 | times_summaries.append(times_summary) 135 | formatted_dates.append(formatted_date) 136 | 137 | if len(times_summaries) > 1 and times_summaries[1:] == times_summaries[:-1]: 138 | dates_summary = __join_with_or(formatted_dates) 139 | summary = f"{times_summaries[0]} on {dates_summary}" 140 | else: 141 | summary = "; or ".join(date_synopses) 142 | 143 | return summary 144 | 145 | 146 | def __join_with_or(times): 147 | """Returns 'a', 'a or b', or 'a, b, or c'.""" 148 | if not times: 149 | return "" 150 | if len(times) == 1: 151 | return times[0] 152 | if len(times) == 2: 153 | return " or ".join(times) 154 | return ", or ".join([", ".join(times[:-1]), times[-1]]) 155 | 156 | 157 | def __format_event_start_time(t): 158 | """Formats datetime into e.g. 5PM""" 159 | strftime_format = "%-I:%M%p" 160 | return t.strftime(strftime_format).replace(":00", "") 161 | 162 | 163 | def __format_event_start_date_and_time(t): 164 | """Formats datetime into e.g. Tue Jul 30 at 5PM""" 165 | strftime_format = "%a %b %-d at %-I:%M %p" 166 | return t.strftime(strftime_format) 167 | 168 | 169 | def __format_date(d): 170 | """Return e.g. 'Sun 9 Sep'.""" 171 | return d.strftime("%a %b %-d") 172 | 173 | 174 | def __add_utm_source(browser_url, utm_source): 175 | """Adds utm_source to browser_url. 176 | 177 | This is the URL parameter Mobilize America uses for attribution tracking. 178 | """ 179 | if not utm_source: 180 | return browser_url 181 | 182 | if "?" in browser_url: 183 | logging.warning( 184 | f"Mobilize America unexpectedly returned query params in event URL: {browser_url}" 185 | ) 186 | return browser_url 187 | 188 | return f"{browser_url}?utm_source={urllib.parse.quote(utm_source)}" 189 | -------------------------------------------------------------------------------- /supportal/shifter/models.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | from django.contrib.gis.db import models as gis_models 4 | from django.contrib.gis.geos import Point 5 | from django.contrib.postgres.fields import ArrayField, JSONField 6 | from django.core.exceptions import ValidationError 7 | from django.db import models, transaction 8 | from django.utils import timezone 9 | from enumfields import EnumIntegerField 10 | from localflavor.us.models import USStateField, USZipCodeField 11 | from phonenumber_field.modelfields import PhoneNumberField 12 | 13 | from supportal.app.models.base_model_mixin import BaseModelMixin 14 | from supportal.services.mobilize_america import ( 15 | AttendanceRequestPerson, 16 | MobilizeAmericaAPIException, 17 | Referrer, 18 | get_global_client, 19 | ) 20 | from supportal.shifter.common.error_codes import ErrorCodes 21 | 22 | MAX_INTEGER_SIZE = 2147483647 23 | 24 | 25 | class EventSignup(BaseModelMixin): 26 | email = models.EmailField(blank=True) 27 | family_name = models.CharField(blank=False, max_length=255) 28 | given_name = models.CharField(blank=False, max_length=255) 29 | ma_creation_successful = models.BooleanField(default=False) 30 | ma_event_id = models.IntegerField(null=False) 31 | ma_response = JSONField(null=True) 32 | ma_timeslot_ids = ArrayField(models.IntegerField(null=False), null=False) 33 | metadata = JSONField(null=True) 34 | phone = PhoneNumberField(blank=True) 35 | session_id = models.CharField(blank=True, max_length=255) 36 | # CAUTION: we are not allowed to collect opt-ins on Mobilize America's 37 | # behalf, so this field can only be used to opt people in to messaging 38 | # from our campaign. 39 | signed_up_via_shifter = ArrayField(models.IntegerField(null=False), null=True) 40 | honor_ma_attendance = models.BooleanField(default=True) 41 | sms_opt_in = models.BooleanField(default=False) 42 | source = models.CharField(blank=True, max_length=255) 43 | zip5 = USZipCodeField(blank=True) 44 | retried_at = models.DateTimeField(null=True) 45 | heap_id = models.CharField(max_length=1024, null=True) 46 | 47 | def sync_to_mobilize_america(self): 48 | if not self.ma_creation_successful and (self.email and self.zip5): 49 | try: 50 | referrer = Referrer(utm_source=self.source) if self.source else None 51 | self.ma_response, timeslots_signed_up = get_global_client().create_event_attendance( 52 | self.ma_event_id, 53 | list(self.ma_timeslot_ids), 54 | person=AttendanceRequestPerson( 55 | given_name=self.given_name, 56 | family_name=self.family_name, 57 | email_address=self.email, 58 | postal_code=self.zip5, 59 | phone_number=str(self.phone), 60 | ), 61 | referrer=referrer, 62 | honor_ma_attendance=self.honor_ma_attendance, 63 | ) 64 | self.ma_creation_successful = True 65 | self.signed_up_via_shifter = timeslots_signed_up 66 | except MobilizeAmericaAPIException as e: 67 | self.ma_response = e.response 68 | self.ma_creation_successful = False 69 | self.save() 70 | return self.ma_creation_successful, self.ma_response 71 | 72 | 73 | class RecommendedEventRequestLog(BaseModelMixin): 74 | email = models.EmailField(blank=False, db_index=True) 75 | recommended_ma_event_ids = ArrayField(models.IntegerField(null=False), null=False) 76 | request_params = JSONField(null=False) 77 | session_id = models.CharField(blank=True, max_length=255) 78 | 79 | 80 | class State(BaseModelMixin): 81 | state_code = USStateField() 82 | is_caucus = models.BooleanField(default=False) 83 | prioritization_doc = models.CharField(blank=True, max_length=1000) 84 | use_prioritization_doc = models.BooleanField(default=False) 85 | neighbor_states = models.ManyToManyField("self", symmetrical=False) 86 | 87 | 88 | class MobilizeAmericaEventManager(models.Manager): 89 | @staticmethod 90 | def _timeslot_from_json(event_id, j): 91 | ts = MobilizeAmericaTimeslot() 92 | ts.event_id = event_id 93 | ts.end_date = _convert_ma_timestamp(j["end_date"]) 94 | ts.start_date = _convert_ma_timestamp(j["start_date"]) 95 | ts.id = j["id"] 96 | ts.is_full = j["is_full"] 97 | ts.raw = j 98 | return ts 99 | 100 | @transaction.atomic 101 | def update_or_create_from_json(self, payload): 102 | loc = payload.get("location") 103 | coordinates = None 104 | if loc is not None and "location" in loc: 105 | lat = loc["location"].get("latitude") 106 | lng = loc["location"].get("longitude") 107 | coordinates = Point(lng, lat, srid=4326) 108 | state = None 109 | if loc is not None and "region" in loc: 110 | state_code = loc["region"] 111 | state, _ = State.objects.get_or_create(state_code=state_code) 112 | event, created = self.update_or_create( 113 | id=payload["id"], 114 | defaults={ 115 | "title": payload.get("title"), 116 | "event_type": payload.get("event_type"), 117 | "visibility": payload.get("visibility"), 118 | "high_priority": payload.get("high_priority"), 119 | "is_virtual": loc is None, 120 | "coordinates": coordinates, 121 | "tag_ids": [t["id"] for t in payload.get("tags", [])], 122 | "modified_date": _convert_ma_timestamp(payload.get("modified_date")), 123 | "raw": payload, 124 | "state": state, 125 | "is_active": True, 126 | }, 127 | ) 128 | if not created: 129 | # the update_or_create might do this automatically, but it was hard 130 | # to tell so I added it in 131 | event.updated_at = timezone.now() 132 | event.save() 133 | event.timeslots.all().delete() 134 | timeslots = [ 135 | self._timeslot_from_json(event.id, j) for j in payload.get("timeslots", []) 136 | ] 137 | MobilizeAmericaTimeslot.objects.bulk_create(timeslots) 138 | return event, created 139 | 140 | 141 | class MobilizeAmericaEvent(BaseModelMixin): 142 | objects = MobilizeAmericaEventManager() 143 | 144 | coordinates = gis_models.PointField( 145 | "coordinates", geography=True, srid=4326, null=True 146 | ) 147 | event_type = models.CharField(null=True, max_length=30, db_index=True) 148 | # Note: we use the MA id as the primary key so we override Django's 149 | # auto-increment `id` field 150 | id = models.IntegerField(primary_key=True) 151 | is_virtual = models.BooleanField(default=False, db_index=True) 152 | high_priority = models.BooleanField(default=False, db_index=True) 153 | modified_date = models.DateTimeField(null=True, db_index=True) 154 | state = models.ForeignKey(State, null=True, on_delete=models.DO_NOTHING) 155 | raw = JSONField(null=False) 156 | tag_ids = ArrayField(models.IntegerField(), default=list) 157 | title = models.CharField(max_length=1024) 158 | visibility = models.CharField(null=True, max_length=30, db_index=True) 159 | state_prioritization = models.IntegerField(default=MAX_INTEGER_SIZE) 160 | is_active = models.BooleanField(default=True) 161 | 162 | 163 | class MobilizeAmericaTimeslot(BaseModelMixin): 164 | end_date = models.DateTimeField(null=True) 165 | event = models.ForeignKey( 166 | MobilizeAmericaEvent, 167 | on_delete=models.DO_NOTHING, 168 | null=False, 169 | related_name="timeslots", 170 | ) 171 | id = models.IntegerField(primary_key=True) 172 | is_full = models.BooleanField(default=False) 173 | raw = JSONField(null=False) 174 | start_date = models.DateTimeField(null=True) 175 | 176 | 177 | class USZip5(models.Model): 178 | accuracy = models.IntegerField(null=True) 179 | city = models.CharField(max_length=1024, blank=True) 180 | coordinates = gis_models.PointField( 181 | "coordinates", geography=True, srid=4326, null=True 182 | ) 183 | county = models.CharField(max_length=1024, blank=True) 184 | county_fips = models.IntegerField(null=True) 185 | state = USStateField(blank=True) 186 | zip5 = USZipCodeField(blank=False, primary_key=True) 187 | 188 | @property 189 | def longitude(self): 190 | return self.coordinates.x 191 | 192 | @property 193 | def latitude(self): 194 | return self.coordinates.y 195 | 196 | 197 | def _convert_ma_timestamp(unix_time): 198 | return datetime.fromtimestamp(unix_time, timezone.utc) 199 | -------------------------------------------------------------------------------- /supportal/shifter/serializers.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from django.conf import settings 4 | from localflavor.us.models import USZipCodeField 5 | from rest_framework import serializers 6 | from rest_framework.exceptions import NotFound, ValidationError 7 | from rest_framework.serializers import ModelSerializer 8 | 9 | from supportal.services.mobilize_america import PUBLIC_VISIBILITY, get_global_client 10 | from supportal.shifter.event_recommendation_strategies import ( 11 | DBRecommendationStrategy, 12 | MobilizeAmericaAPIRecommendationStrategy, 13 | ) 14 | from supportal.shifter.mobilize_america_helpers import ( 15 | add_extras_for_mdata, 16 | filter_timeslots_for_time, 17 | remove_full_timeslots, 18 | sanitize_event_payload, 19 | ) 20 | from supportal.shifter.models import ( 21 | EventSignup, 22 | MobilizeAmericaEvent, 23 | MobilizeAmericaTimeslot, 24 | RecommendedEventRequestLog, 25 | USZip5, 26 | ) 27 | 28 | 29 | class EventSignupSerializer(ModelSerializer): 30 | class Meta: 31 | model = EventSignup 32 | fields = [ 33 | "session_id", 34 | "email", 35 | "phone", 36 | "given_name", 37 | "family_name", 38 | "heap_id", 39 | "sms_opt_in", 40 | "zip5", 41 | "metadata", 42 | "source", 43 | "ma_event_id", 44 | "ma_timeslot_ids", 45 | "ma_response", 46 | "ma_creation_successful", 47 | "created_at", 48 | "updated_at", 49 | "signed_up_via_shifter", 50 | "honor_ma_attendance", 51 | ] 52 | 53 | read_only_fields = [ 54 | "ma_response", 55 | "ma_creation_successful", 56 | "created_at", 57 | "updated_at", 58 | ] 59 | 60 | def create(self, validated_data): 61 | if ( 62 | not validated_data.get("email") 63 | and not validated_data.get("phone") 64 | and not (validated_data.get("zip5")) 65 | ): 66 | raise ValidationError( 67 | {"detail": "Must include either phone or email or zip5"}, 68 | code="required_field", 69 | ) 70 | obj = super().create(validated_data) 71 | if not validated_data.get("email") or not validated_data.get("zip5"): 72 | return obj 73 | # MA doesn't provide a way to limit event attendance creation to public events 74 | # so we have to check permissions ourselves before syncing. 75 | # Note: we perform the check here rather than in sync_to_mobilize_america() since 76 | # there could be legitimate reasons for us to sign people up to private events, 77 | # we just don't want them to be able to do it themselves through a public API. 78 | if settings.MOBILIZE_AMERICA_DEFAULT_VISIBILITY == PUBLIC_VISIBILITY: 79 | event = MobilizeAmericaEvent.objects.filter( 80 | id=obj.ma_event_id, is_active=True 81 | ) 82 | if not event.exists() or event.first().visibility != PUBLIC_VISIBILITY: 83 | raise NotFound() 84 | obj.sync_to_mobilize_america() 85 | return obj 86 | 87 | 88 | class USZip5Serializer(ModelSerializer): 89 | class Meta: 90 | model = USZip5 91 | fields = ["zip5", "city", "state", "latitude", "longitude"] 92 | 93 | 94 | class RecommendedEventRequestSerializer(serializers.Serializer): 95 | """Serializer responsible for serving event recommendations 96 | 97 | This is a kind of hacky way of using a Serializer, but it's the best way to 98 | get good endpoint validation in DRF :/ 99 | """ 100 | 101 | email = serializers.EmailField(required=False) 102 | event_types = serializers.ListField( 103 | child=serializers.CharField(), allow_empty=True, required=False 104 | ) 105 | is_virtual = serializers.BooleanField(default=False) 106 | limit = serializers.IntegerField(max_value=20, default=3, required=False) 107 | max_dist = serializers.IntegerField(required=False) 108 | session_id = serializers.CharField(required=False) 109 | strategy = serializers.CharField(max_length=20, required=False) 110 | tag_ids = serializers.ListField( 111 | child=serializers.IntegerField(), allow_empty=True, required=False 112 | ) 113 | timeslot_start = serializers.DateTimeField(required=False) 114 | timeslot_end = serializers.DateTimeField(required=False) 115 | utm_source = serializers.CharField(required=False) 116 | zip5 = serializers.ModelField(USZipCodeField(), required=False) 117 | states = serializers.ListField( 118 | child=serializers.CharField(max_length=2), allow_empty=True, required=False 119 | ) 120 | 121 | __rec_kwargs = { 122 | "zip5", 123 | "event_types", 124 | "max_dist", 125 | "tag_ids", 126 | "timeslot_start", 127 | "timeslot_end", 128 | "is_virtual", 129 | "states", 130 | } 131 | 132 | def validate(self, attrs): 133 | if not attrs.get("is_virtual") and not attrs.get("zip5"): 134 | raise ValidationError("Field 'zip5' is required for non-virtual events") 135 | return attrs 136 | 137 | def create(self, validated_data): 138 | log = RecommendedEventRequestLog() 139 | # Log the raw request, not the converted/validated data 140 | log.request_params = self.initial_data 141 | 142 | strategy = self.__get_recommendation_strategy(validated_data) 143 | rbkwargs = {k: v for k, v in validated_data.items() if k in self.__rec_kwargs} 144 | events = strategy.find_events(validated_data["limit"], **rbkwargs) 145 | log.recommended_ma_event_ids = [e["id"] for e in events] 146 | try: 147 | log.save() 148 | except Exception as e: 149 | logging.exception("Failed to save RecommendedEventRequestLog", e) 150 | finally: 151 | return self.__prepare_events( 152 | events, 153 | validated_data.get("utm_source"), 154 | validated_data.get("timeslot_start"), 155 | validated_data.get("timeslot_end"), 156 | ) 157 | 158 | def __get_recommendation_strategy(self, data): 159 | if data.get("strategy") == "mobilize_america": 160 | return MobilizeAmericaAPIRecommendationStrategy 161 | elif data.get("strategy") == "shifter_engine": 162 | return DBRecommendationStrategy 163 | else: 164 | return MobilizeAmericaAPIRecommendationStrategy 165 | 166 | def __prepare_events( 167 | self, events, utm_source, timeslot_start=None, timeslot_end=None 168 | ): 169 | events_with_open_timeslots = [remove_full_timeslots(e) for e in events] 170 | events_with_extra_m_data = [ 171 | add_extras_for_mdata(e, utm_source) for e in events_with_open_timeslots 172 | ] 173 | events_with_filtered_timeslots = [ 174 | filter_timeslots_for_time(e, timeslot_start, timeslot_end) 175 | for e in events_with_extra_m_data 176 | ] 177 | # Don't recommend events without timeslots for users to signup with 178 | return [ 179 | sanitize_event_payload(e) 180 | for e in events_with_filtered_timeslots 181 | if len(e["timeslots"]) 182 | ] 183 | -------------------------------------------------------------------------------- /supportal/shifter/views.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | 4 | from django.conf import settings 5 | from django.contrib.gis.db.models.functions import Distance 6 | from django.contrib.gis.measure import D 7 | from django.db.models import Min 8 | from rest_framework import permissions, status 9 | from rest_framework.exceptions import ValidationError 10 | from rest_framework.generics import ( 11 | CreateAPIView, 12 | GenericAPIView, 13 | ListAPIView, 14 | RetrieveAPIView, 15 | ) 16 | from rest_framework.request import Request 17 | from rest_framework.response import Response 18 | from rest_framework.throttling import AnonRateThrottle 19 | from rest_framework.views import APIView 20 | 21 | # from ew_common.input_validation import extract_postal_code 22 | from supportal.services.mobilize_america import ( 23 | MobilizeAmericaAPIException, 24 | get_global_client, 25 | ) 26 | from supportal.shifter import mobilize_america_helpers 27 | from supportal.shifter.common.error_codes import ( 28 | ErrorCodes, 29 | generate_error_for_code, 30 | get_error_code_and_status, 31 | ) 32 | from supportal.shifter.models import MobilizeAmericaEvent, USZip5 33 | from supportal.shifter.serializers import ( 34 | EventSignupSerializer, 35 | RecommendedEventRequestSerializer, 36 | USZip5Serializer, 37 | ) 38 | 39 | EARLY_STATES = ["IA", "NH", "NV", "SC"] 40 | 41 | 42 | class ShifterWrappedExceptionView(GenericAPIView): 43 | def handle_exception(self, exc): 44 | """ wrap the serializer errors to conform to our exception format""" 45 | exception_response = super().handle_exception(exc) 46 | response_status = exception_response.status_text 47 | formated_response_code = response_status.upper().replace(" ", "_") 48 | error_response = generate_error_for_code( 49 | formated_response_code, exception_response.data 50 | ) 51 | return Response(error_response, exception_response.status_code) 52 | 53 | 54 | class ShifterIPThrottle(AnonRateThrottle): 55 | """IP-based rate limiter""" 56 | 57 | rate = settings.SHIFTER_IP_RATE_LIMIT 58 | 59 | 60 | class ShifterViewMixin(ShifterWrappedExceptionView): 61 | """Default permissions and throttling for all shifter views""" 62 | 63 | permission_classes = [permissions.AllowAny] 64 | throttle_classes = [ShifterIPThrottle] 65 | 66 | 67 | class EventSignupView(CreateAPIView, ShifterViewMixin): 68 | serializer_class = EventSignupSerializer 69 | 70 | def create(self, request, *args, **kwargs): 71 | serializer = self.get_serializer(data=request.data) 72 | try: 73 | serializer.is_valid(raise_exception=True) 74 | self.perform_create(serializer) 75 | except ValidationError as e: 76 | # wrap and catch the exceptions from the serializer 77 | return Response( 78 | generate_error_for_code(ErrorCodes.VALIDATION.name, e.detail), 79 | status=e.status_code, 80 | ) 81 | 82 | if not serializer.data.get("ma_creation_successful"): 83 | response = serializer.data.get("ma_response") 84 | if response: 85 | # if we don't even try to send it to MA, don't wory about this 86 | error_response, status_code = get_error_code_and_status(response) 87 | if ( 88 | status_code < status.HTTP_500_INTERNAL_SERVER_ERROR 89 | and status_code != status.HTTP_429_TOO_MANY_REQUESTS 90 | ): # retry 500s and 429s on our side 91 | return Response(error_response, status=status_code) 92 | 93 | headers = self.get_success_headers(serializer.data) 94 | return Response( 95 | serializer.data, status=status.HTTP_201_CREATED, headers=headers 96 | ) 97 | 98 | 99 | class MobilizeAmericaEventView(RetrieveAPIView, ShifterViewMixin): 100 | def get(self, request, **kwargs): 101 | try: 102 | event_id = int(kwargs.get("id")) 103 | except ValueError: 104 | return Response( 105 | generate_error_for_code( 106 | ErrorCodes.INVALID_EVENT_ID.name, {"detail": "Invalid Event ID"} 107 | ), 108 | status=status.HTTP_400_BAD_REQUEST, 109 | ) 110 | event = MobilizeAmericaEvent.objects.filter(id=event_id, is_active=True) 111 | if event.exists(): 112 | res = event.first().raw 113 | else: 114 | try: 115 | # get the response from mobilize america if it's not in our DB 116 | res = get_global_client().get_organization_event(event_id)["data"] 117 | except MobilizeAmericaAPIException as e: 118 | error_response, status_code = get_error_code_and_status(e.response) 119 | return Response(error_response, status=status_code) 120 | 121 | # The frontend is responsible for removing the full timeslots for switchboard/embedded shifter 122 | sanitized_res = mobilize_america_helpers.sanitize_event_payload(res) 123 | return Response(sanitized_res, status=status.HTTP_200_OK) 124 | 125 | 126 | class USZip5View(RetrieveAPIView, ShifterWrappedExceptionView): 127 | permission_classes = [permissions.AllowAny] 128 | serializer_class = USZip5Serializer 129 | lookup_field = "zip5" 130 | queryset = USZip5.objects.all() 131 | 132 | 133 | class EarlyStateView(ListAPIView, ShifterViewMixin): 134 | def get(self, request, **kwargs): 135 | zip5 = request.query_params.get("zip5") 136 | if not zip5 or len(zip5) != 5: 137 | raise ValidationError("zip5 is required") 138 | try: 139 | coordinates = USZip5.objects.get(zip5=zip5).coordinates 140 | except USZip5.DoesNotExist: 141 | raise ValidationError(f"zip5 {zip5} not found!") 142 | 143 | fargs = {"state__in": EARLY_STATES} 144 | max_dist = request.query_params.get("max_dist") 145 | if max_dist: 146 | fargs["coordinates__distance_lte"] = (coordinates, D(mi=int(max_dist))) 147 | 148 | states = list( 149 | USZip5.objects.filter(**fargs) 150 | .values("state") 151 | # Using MIN here is kind of arbitrary, but it should work better than 152 | # other aggregates for people in border states. 153 | # If we actually start using max_dist, we may want to base this not 154 | # on the event table rather than the zip table. 155 | .annotate(distance=Min(Distance("coordinates", coordinates))) 156 | .order_by("distance") 157 | ) 158 | res = { 159 | "count": len(states), 160 | "data": [ 161 | {"state": s["state"], "min_distance": int(s["distance"].mi)} 162 | for s in states 163 | ], 164 | } 165 | return Response(res, 200) 166 | 167 | 168 | class RecommendedEventView(ShifterViewMixin, APIView): 169 | __list_params = {"event_types", "tag_ids", "states"} 170 | 171 | def get(self, request: Request, **kwargs): 172 | """Returns list of events from Mobilize America.""" 173 | params = get_query_parameter_dict(request, self.__list_params) 174 | self.__backwards_compat_convert_params(params) 175 | 176 | # Clean up zip5s for mdata. Because of this, the validator will throw 177 | # an incorrect error message for invalid zip5s (missing). 178 | # Maybe create a separate endpoint or add and additional param 179 | # for mdatas if this becomes a problem for the frontend. 180 | if "zip5" in params: 181 | # TODO: get the postal code from the zip of zip5 182 | # params["zip5"] = extract_postal_code(str(params["zip5"])) 183 | 184 | ser = RecommendedEventRequestSerializer(data=params) 185 | ser.is_valid(raise_exception=True) 186 | try: 187 | events = ser.save() 188 | except USZip5.DoesNotExist: 189 | return Response( 190 | generate_error_for_code(ErrorCodes.ZIP_INVALID.name, {}), 191 | status.HTTP_400_BAD_REQUEST, 192 | ) 193 | except MobilizeAmericaAPIException as e: 194 | logging.exception("Got error from Mobilize America") 195 | error_response, status_code = get_error_code_and_status(e.response) 196 | return Response(error_response, status=status_code) 197 | return Response({"count": len(events), "data": events}, 200) 198 | 199 | def __backwards_compat_convert_params(self, params): 200 | """ 201 | Temporary helper that converts request parameters passed by existing Mobile 202 | Commons mdatas to our new format. 203 | 204 | 205 | Differences: 206 | - tag_id is now tag_ids 207 | - while the new backend is being tested, default to the MA API strategy 208 | """ 209 | if "tag_id" in params: 210 | params["tag_ids"] = params.pop("tag_id").split(",") 211 | 212 | 213 | def get_query_parameter_dict(request: Request, list_fields): 214 | """ 215 | Convert the request's query parameter MultiValueDict to a regular dictionary 216 | with special handling for list fields. 217 | 218 | For parameters where we expect a list, a caller may pass a comma-separated 219 | list of values, e.g 'recommended_events?tag_ids=1,2'. We don't, however, 220 | support repeating the query parameter, e.g, in 'recommended_events?tag_ids=1&tag_ids=2' 221 | the value of tag_id will be '[2]' not '[1, 2]'. This is different from the 222 | Mobilize America API, which supports the latter convention and not the former. 223 | """ 224 | return { 225 | k: v.split(",") if k in list_fields else v 226 | for k, v in request.query_params.items() 227 | } 228 | -------------------------------------------------------------------------------- /supportal/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/tests/__init__.py -------------------------------------------------------------------------------- /supportal/tests/app/management/commands/test_expire_assignments.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from io import StringIO 3 | 4 | import pytest 5 | from django.core.management import call_command 6 | from django.utils import timezone 7 | from model_bakery import baker 8 | 9 | from supportal.app.common.enums import CanvassResult 10 | 11 | 12 | def expire_assignments(*args, **kwargs): 13 | call_command("expire_assignments", **kwargs) 14 | 15 | 16 | @pytest.fixture 17 | def old_cambridge_assignment(cambridge_leader_user, cambridge_prospect): 18 | cambridge_assignment = baker.make( 19 | "VolProspectAssignment", user=cambridge_leader_user, person=cambridge_prospect 20 | ) 21 | cambridge_assignment.created_at = datetime.datetime( 22 | 2019, 10, 26, tzinfo=timezone.utc 23 | ) 24 | cambridge_assignment.save() 25 | assert cambridge_assignment.expired_at is None 26 | return cambridge_assignment 27 | 28 | 29 | @pytest.mark.django_db 30 | def test_expire_uncontacted_assignments(old_cambridge_assignment): 31 | out = StringIO() 32 | expire_assignments(stdout=out) 33 | old_cambridge_assignment.refresh_from_db() 34 | 35 | assert old_cambridge_assignment.expired_at is not None 36 | assert "Expired 1 assignments." in out.getvalue() 37 | 38 | 39 | @pytest.mark.django_db 40 | def test_expire_unsuccessfully_contacted_assignments(old_cambridge_assignment): 41 | old_cambridge_assignment.create_contact_event( 42 | result=CanvassResult.UNAVAILABLE_LEFT_MESSAGE 43 | ) 44 | old_cambridge_assignment.save() 45 | out = StringIO() 46 | expire_assignments(stdout=out) 47 | 48 | old_cambridge_assignment.refresh_from_db() 49 | assert old_cambridge_assignment.expired_at is not None 50 | assert "Expired 1 assignments." in out.getvalue() 51 | 52 | 53 | @pytest.mark.django_db 54 | def test_successfully_contacted_dont_expire(old_cambridge_assignment): 55 | # Make sure that having a previous unsuccessful contact event doesn't cause 56 | # the contact to get expired. 57 | old_cambridge_assignment.create_contact_event( 58 | result=CanvassResult.UNAVAILABLE_LEFT_MESSAGE 59 | ) 60 | old_cambridge_assignment.create_contact_event( 61 | result=CanvassResult.SUCCESSFUL_CANVASSED 62 | ) 63 | 64 | old_cambridge_assignment.save() 65 | out = StringIO() 66 | expire_assignments(stdout=out) 67 | old_cambridge_assignment.refresh_from_db() 68 | 69 | assert old_cambridge_assignment.expired_at is None 70 | assert "Expired 0 assignments." in out.getvalue() 71 | 72 | 73 | @pytest.mark.django_db 74 | def test_expire_zero_assignments(): 75 | out = StringIO() 76 | expire_assignments(stdout=out) 77 | assert "Expired 0 assignments." in out.getvalue() 78 | -------------------------------------------------------------------------------- /supportal/tests/app/management/commands/test_normalize_emails.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | import pytest 4 | from django.conf import settings 5 | 6 | from supportal.app.management.commands.normalize_emails import Command 7 | from supportal.app.models import User 8 | from supportal.app.models.user import UserManager, _cognito_client 9 | 10 | 11 | @pytest.mark.django_db 12 | def test_normalize_email_command(mocker): 13 | mock_cognito = Mock() 14 | mocker.patch( 15 | "supportal.app.models.user._get_cognito_client", return_value=mock_cognito 16 | ) 17 | 18 | user1 = User.objects.create_user( 19 | "user1", "ishouldntchange@example.com", skip_cognito=True 20 | ) 21 | user1.refresh_from_db() 22 | original_u1_updated_at = user1.updated_at 23 | u2_email = "LowerCaseMe@example.com" 24 | user2 = User.objects.create_user("user2", u2_email, skip_cognito=True) 25 | # need to set this manually because create_user normalizes emails before save 26 | user2.email = u2_email 27 | user2.save() 28 | 29 | Command().handle() 30 | 31 | user1.refresh_from_db() 32 | user2.refresh_from_db() 33 | mock_cognito.admin_update_user_attributes.assert_called_once_with( 34 | UserPoolId=settings.COGNITO_USER_POOL, 35 | Username="user2", 36 | UserAttributes=[ 37 | {"Name": "email", "Value": "lowercaseme@example.com"}, 38 | {"Name": "email_verified", "Value": "True"}, 39 | ], 40 | ) 41 | assert user2.email == "lowercaseme@example.com" 42 | assert user1.updated_at == original_u1_updated_at 43 | -------------------------------------------------------------------------------- /supportal/tests/app/management/commands/test_unskip_prospects.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from io import StringIO 3 | 4 | import pytest 5 | from django.core.management import call_command 6 | from django.utils import timezone 7 | from model_bakery import baker 8 | 9 | OLD_CREATED_DATE = datetime.datetime(2019, 10, 26, tzinfo=timezone.utc) 10 | 11 | 12 | @pytest.fixture 13 | def old_cambridge_assignment(cambridge_leader_user, cambridge_prospect): 14 | cambridge_assignment = baker.make( 15 | "VolProspectAssignment", 16 | user=cambridge_leader_user, 17 | person=cambridge_prospect, 18 | suppressed_at=timezone.now(), 19 | expired_at=timezone.now(), 20 | ) 21 | cambridge_assignment.created_at = OLD_CREATED_DATE 22 | cambridge_assignment.save() 23 | return cambridge_assignment 24 | 25 | 26 | @pytest.fixture 27 | def current_new_assignment(hayes_valley_leader_user, cambridge_prospect): 28 | cambridge_assignment = baker.make( 29 | "VolProspectAssignment", 30 | user=hayes_valley_leader_user, 31 | person=cambridge_prospect, 32 | ) 33 | return cambridge_assignment 34 | 35 | 36 | @pytest.mark.django_db 37 | def test_unskip_assignments(old_cambridge_assignment): 38 | out = StringIO() 39 | call_command( 40 | "unskip_prospects", 41 | stdout=out, 42 | user=old_cambridge_assignment.user.email, 43 | run=True, 44 | ) 45 | 46 | old_cambridge_assignment.refresh_from_db() 47 | assert old_cambridge_assignment.suppressed_at is None 48 | assert old_cambridge_assignment.expired_at is None 49 | assert old_cambridge_assignment.created_at > OLD_CREATED_DATE 50 | 51 | 52 | @pytest.mark.django_db 53 | def test_if_person_supressed_dont_unskip(old_cambridge_assignment): 54 | out = StringIO() 55 | old_cambridge_assignment.person.suppressed_at = timezone.now() 56 | old_cambridge_assignment.person.save() 57 | call_command( 58 | "unskip_prospects", 59 | stdout=out, 60 | user=old_cambridge_assignment.user.email, 61 | run=True, 62 | ) 63 | 64 | old_cambridge_assignment.refresh_from_db() 65 | assert old_cambridge_assignment.suppressed_at is not None 66 | assert old_cambridge_assignment.expired_at is not None 67 | assert old_cambridge_assignment.created_at == OLD_CREATED_DATE 68 | 69 | 70 | @pytest.mark.django_db 71 | def test_if_active_assignments_dont_unskip( 72 | old_cambridge_assignment, current_new_assignment 73 | ): 74 | out = StringIO() 75 | call_command( 76 | "unskip_prospects", 77 | stdout=out, 78 | user=old_cambridge_assignment.user.email, 79 | run=True, 80 | ) 81 | 82 | old_cambridge_assignment.refresh_from_db() 83 | assert old_cambridge_assignment.suppressed_at is not None 84 | assert old_cambridge_assignment.expired_at is not None 85 | assert old_cambridge_assignment.created_at == OLD_CREATED_DATE 86 | -------------------------------------------------------------------------------- /supportal/tests/app/models/test_person.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.contrib.gis.geos import Point 3 | from model_bakery import baker 4 | 5 | from supportal.app.models import Person 6 | 7 | 8 | @pytest.mark.django_db 9 | def test_full_name(somerville_prospect): 10 | assert somerville_prospect.full_name == "" 11 | 12 | 13 | @pytest.mark.django_db 14 | def test_trimmed_name(somerville_prospect): 15 | assert somerville_prospect.trimmed_last_name() == "K." 16 | 17 | 18 | @pytest.mark.django_db 19 | def test_coordinates(somerville_prospect): 20 | assert somerville_prospect.coordinates == Point(-71.081398, 42.386637, srid=4326) 21 | 22 | 23 | @pytest.mark.django_db 24 | def test_coordinates_serialize_deserialize_to_from_db(somerville_prospect): 25 | fetched_person = Person.objects.get(pk=somerville_prospect.id) 26 | assert fetched_person.coordinates == Point(-71.081398, 42.386637, srid=4326) 27 | 28 | 29 | @pytest.mark.django_db 30 | def test_from_reference( 31 | cambridge_leader, cambridge_prospect, somerville_prospect, california_prospect 32 | ): 33 | result = ( 34 | Person.objects.from_reference(cambridge_leader.coordinates, radius_mi=100) 35 | .exclude(pk=cambridge_leader.pk) 36 | .all() 37 | ) 38 | 39 | # Result is ordered with nearest first. Does not include the prospect in 40 | # California, which is too far away. 41 | assert [x.city for x in result] == ["Cambridge", "Somerville"] 42 | 43 | 44 | @pytest.mark.django_db 45 | def test_null_coordinates(): 46 | locationless_person = baker.make("Person", coordinates=None) 47 | fetched_person = Person.objects.get(pk=locationless_person.id) 48 | assert fetched_person.coordinates is None 49 | -------------------------------------------------------------------------------- /supportal/tests/app/models/test_user.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | 3 | import pytest 4 | from model_bakery import baker 5 | 6 | from supportal.app.common.enums import CanvassResult 7 | from supportal.app.models import User 8 | 9 | 10 | @pytest.mark.django_db 11 | def test_create_normal_user(cambridge_leader): 12 | """Normal users should be created without usable passwords""" 13 | u = User.objects.create_user( 14 | "someusernamethatwedontcareabout", 15 | cambridge_leader.email, 16 | skip_cognito=True, 17 | first_name=cambridge_leader.first_name, 18 | last_name=cambridge_leader.last_name, 19 | phone=cambridge_leader.phone, 20 | address="123 Different St.", 21 | city="Different city", 22 | state="MA", 23 | zip5="12345", 24 | is_admin=False, 25 | self_reported_team_name="People who have inconsistent data", 26 | person=cambridge_leader, 27 | verified_at=datetime.now(tz=timezone.utc), 28 | ) 29 | u.save() 30 | assert not u.has_usable_password() 31 | assert u.person.id == cambridge_leader.id 32 | assert u.assignment_contacts_count == 0 33 | assert u.has_invite is False 34 | assert u.is_admin is False 35 | assert u.is_staff is False 36 | 37 | 38 | @pytest.mark.django_db 39 | def test_create_ew_email_user(cambridge_leader): 40 | """Normal users should be created without usable passwords""" 41 | u = User.objects.create_user( 42 | "someusernamethatwedontcareabout", 43 | "sgoldblatt@elizabethwarren.com", 44 | skip_cognito=True, 45 | first_name=cambridge_leader.first_name, 46 | last_name=cambridge_leader.last_name, 47 | phone=cambridge_leader.phone, 48 | address="123 Different St.", 49 | city="Different city", 50 | state="MA", 51 | zip5="12345", 52 | self_reported_team_name="People who have inconsistent data", 53 | ) 54 | u.save() 55 | assert not u.has_usable_password() 56 | assert u.assignment_contacts_count == 0 57 | assert u.is_admin 58 | assert u.is_staff 59 | assert u.is_superuser is False 60 | 61 | 62 | @pytest.mark.django_db 63 | def test_assignment_contact_count(cambridge_leader_user): 64 | assert cambridge_leader_user.assignment_contacts_count == 0 65 | vpa_unreachable = baker.make("VolProspectAssignment", user=cambridge_leader_user) 66 | vpa_unreachable.create_contact_event( 67 | result=CanvassResult.UNREACHABLE_MOVED, metadata={"moved_to": "CA"} 68 | ) 69 | vpa_unavailable = baker.make("VolProspectAssignment", user=cambridge_leader_user) 70 | vpa_unavailable.create_contact_event(result=CanvassResult.UNAVAILABLE_CALL_BACK) 71 | vpa_unavailable.create_contact_event(result=CanvassResult.UNAVAILABLE_CALL_BACK) 72 | vpa_successful = baker.make("VolProspectAssignment", user=cambridge_leader_user) 73 | vpa_successful.create_contact_event(result=CanvassResult.SUCCESSFUL_CANVASSED) 74 | baker.make( 75 | "VolProspectAssignment", 76 | user=cambridge_leader_user, 77 | suppressed_at=datetime.now(tz=timezone.utc), 78 | ) 79 | baker.make( 80 | "VolProspectAssignment", 81 | user=cambridge_leader_user, 82 | expired_at=datetime.now(tz=timezone.utc), 83 | ) 84 | assert cambridge_leader_user.assignment_contacts_count == 3 85 | 86 | 87 | @pytest.mark.django_db 88 | def test_has_invite( 89 | cambridge_leader_user, 90 | hayes_valley_leader_user, 91 | mattapan_leader_user, 92 | roslindale_leader_user, 93 | ): 94 | assert hayes_valley_leader_user.has_invite is False 95 | cambridge_leader_user.added_by = hayes_valley_leader_user 96 | cambridge_leader_user.created_at = datetime.now(tz=timezone.utc) 97 | cambridge_leader_user.save() 98 | assert cambridge_leader_user.assignment_contacts_count == 0 99 | assert hayes_valley_leader_user.has_invite is False 100 | 101 | for i in range(0, 10): 102 | hvpa = baker.make("VolProspectAssignment", user=hayes_valley_leader_user) 103 | hvpa.create_contact_event( 104 | result=CanvassResult.UNREACHABLE_MOVED, metadata={"moved_to": "CA"} 105 | ) 106 | assert hayes_valley_leader_user.has_invite is False 107 | 108 | for i in range(0, 10): 109 | vpa = baker.make("VolProspectAssignment", user=cambridge_leader_user) 110 | vpa.create_contact_event(result=CanvassResult.SUCCESSFUL_CANVASSED) 111 | 112 | assert hayes_valley_leader_user.assignment_contacts_count == 10 113 | assert hayes_valley_leader_user.has_invite 114 | 115 | for i in range(0, 2): 116 | mattapan_leader_user.added_by = hayes_valley_leader_user 117 | mattapan_leader_user.created_at = datetime.now(tz=timezone.utc) 118 | mattapan_leader_user.save() 119 | roslindale_leader_user.added_by = hayes_valley_leader_user 120 | roslindale_leader_user.created_at = datetime.now(tz=timezone.utc) 121 | roslindale_leader_user.save() 122 | 123 | hayes_valley_leader_user.refresh_from_db() 124 | assert hayes_valley_leader_user.has_invite is False 125 | 126 | 127 | @pytest.mark.django_db 128 | def test_last_login_does_not_update_on_save(mattapan_leader_user): 129 | previous_updated_at = mattapan_leader_user.updated_at 130 | previous_last_login = mattapan_leader_user.last_login 131 | 132 | mattapan_leader_user.first_name = "Apple" 133 | mattapan_leader_user.save() 134 | 135 | mattapan_leader_user.refresh_from_db() 136 | assert mattapan_leader_user.last_login == previous_last_login 137 | assert mattapan_leader_user.updated_at > previous_updated_at 138 | 139 | 140 | @pytest.mark.django_db 141 | def test_last_login_updates_on_login(client, user, auth): 142 | assert user.last_login is None 143 | res = client.get("/v1/me", **auth) 144 | assert res.status_code == 200 145 | 146 | user.refresh_from_db() 147 | assert user.last_login 148 | 149 | 150 | @pytest.mark.django_db 151 | def test_create_superuser(): 152 | """Superusers should have usable passwords for admin access""" 153 | u = User.objects.create_superuser( 154 | "superuser", 155 | "superuser@fake.com", 156 | "My Super Secure P@ssw0rd!", 157 | skip_cognito=True, 158 | first_name="Abba", 159 | last_name="Zaba", 160 | ) 161 | u.save() 162 | assert u.has_usable_password() 163 | assert u.person is None 164 | 165 | 166 | @pytest.mark.django_db 167 | def test_lower_case_email_address(): 168 | u = User.objects.create_user( 169 | "some_user", "LowerCaseMe@example.com", skip_cognito=True 170 | ) 171 | u.save() 172 | assert u.email == "lowercaseme@example.com" 173 | -------------------------------------------------------------------------------- /supportal/tests/app/test_authentication_backend.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from rest_framework.authentication import exceptions 3 | 4 | from supportal.app.authentication_backend import CognitoJWTAuthentication, validate_jwt 5 | from supportal.app.models import APIKey 6 | from supportal.tests import utils 7 | 8 | CLIENT_ID = "1234abcdef" 9 | 10 | 11 | @pytest.fixture 12 | def api_key(superuser): 13 | return APIKey.objects.create(client_id=CLIENT_ID, user=superuser) 14 | 15 | 16 | @pytest.fixture 17 | def backend(): 18 | return CognitoJWTAuthentication() 19 | 20 | 21 | @pytest.mark.django_db 22 | def test_access_token_auth(rf, superuser, api_key, backend): 23 | token = utils.create_access_jwt(api_key.client_id) 24 | req = rf.get("/foo", HTTP_AUTHORIZATION=utils.auth_header(token)) 25 | user, token_data = backend.authenticate(req) 26 | assert user == superuser 27 | assert token_data["client_id"] == CLIENT_ID 28 | 29 | 30 | @pytest.mark.django_db 31 | def test_id_token_auth(rf, user, backend): 32 | token = utils.create_id_jwt(user) 33 | req = rf.get("/foo", HTTP_AUTHORIZATION=utils.auth_header(token)) 34 | res_user, token_data = backend.authenticate(req) 35 | assert res_user == user 36 | assert token_data["cognito:username"] == "testuser" 37 | 38 | 39 | @pytest.mark.django_db 40 | def test_that_kid_jwks_misalignment_throws_403(user): 41 | with pytest.raises(exceptions.AuthenticationFailed): 42 | assert validate_jwt( 43 | utils.create_id_jwt(user, key_id="this is not going to work") 44 | ) 45 | 46 | 47 | @pytest.mark.django_db 48 | def test_inactive_users_fail_auth(rf, user, backend): 49 | user.is_active = False 50 | user.save() 51 | with pytest.raises(exceptions.AuthenticationFailed): 52 | token = utils.create_id_jwt(user) 53 | req = rf.get("/foo", HTTP_AUTHORIZATION=utils.auth_header(token)) 54 | backend.authenticate(req) 55 | 56 | 57 | @pytest.mark.django_db 58 | def test_user_impersonation(rf, user, roslindale_leader_user, backend): 59 | user.is_admin = True 60 | user.impersonated_user = roslindale_leader_user 61 | user.save() 62 | u, _ = backend.authenticate(rf.get("/foo", **utils.id_auth(user))) 63 | assert u == roslindale_leader_user 64 | 65 | 66 | @pytest.mark.django_db 67 | def test_non_admins_cannot_impersonate(rf, user, roslindale_leader_user, backend): 68 | user.is_admin = False 69 | user.impersonated_user = roslindale_leader_user 70 | user.save() 71 | u, _ = backend.authenticate(rf.get("/foo", **utils.id_auth(user))) 72 | assert u != roslindale_leader_user 73 | -------------------------------------------------------------------------------- /supportal/tests/app/views/test_email_views.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | from rest_framework import status 5 | 6 | 7 | @pytest.mark.django_db 8 | def test_unsubscribes_user(api_client, supportal_admin_user): 9 | assert supportal_admin_user.unsubscribed_at is None 10 | res = api_client.post( 11 | f"/v1/unsubscribe", 12 | data=json.dumps({"email": supportal_admin_user.email}), 13 | content_type="application/json", 14 | ) 15 | 16 | supportal_admin_user.refresh_from_db() 17 | assert supportal_admin_user.unsubscribed_at is not None 18 | assert res.status_code == status.HTTP_200_OK 19 | 20 | 21 | @pytest.mark.django_db 22 | def test_fails_to_unsubscribe_non_existant_user(api_client, supportal_admin_user): 23 | assert supportal_admin_user.unsubscribed_at is None 24 | res = api_client.post( 25 | f"/v1/unsubscribe", 26 | data=json.dumps({"email": "fake-random-fake@fake.com"}), 27 | content_type="application/json", 28 | ) 29 | 30 | supportal_admin_user.refresh_from_db() 31 | assert supportal_admin_user.unsubscribed_at is None 32 | assert res.status_code == status.HTTP_400_BAD_REQUEST 33 | -------------------------------------------------------------------------------- /supportal/tests/app/views/test_person_views.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from django.contrib.gis.geos import fromstr as geos_fromstr 3 | from model_bakery import baker 4 | from rest_framework import status 5 | 6 | from supportal.app.models import Person 7 | from supportal.app.serializers import FullPersonSerializer 8 | from supportal.tests.utils import id_auth 9 | 10 | CAMBRIDGE_LEADER_PAYLOAD = { 11 | "city": "Cambridge", 12 | "coordinates": "SRID=4326;POINT (-71.121898 42.39568)", 13 | "email": "marlenesmith@example.com", 14 | "first_name": "Marlene", 15 | "is_vol_leader": True, 16 | "is_vol_prospect": False, 17 | "last_name": "Smith", 18 | "middle_name": "", 19 | "myc_state_and_id": None, 20 | "ngp_id": "", 21 | "phone": "+16175555555", 22 | "address": "74 Winthrop St", 23 | "state": "MA", 24 | "suffix": "", 25 | "vol_yes_at": None, 26 | "zip5": "02138", 27 | } 28 | 29 | SOMERVILLE_PROSPECT_PAYLOAD = { 30 | "city": "Somerville", 31 | "coordinates": "SRID=4326;POINT (-71.081398 42.386637)", 32 | "email": "jkb@example.com", 33 | "first_name": "J", 34 | "is_vol_leader": False, 35 | "is_vol_prospect": True, 36 | "last_name": "", 37 | "middle_name": "", 38 | "myc_state_and_id": None, 39 | "ngp_id": "", 40 | "phone": "+", 41 | "address": "10 Fake St.", 42 | "state": "MA", 43 | "suffix": "", 44 | "vol_yes_at": "2019-10-13", 45 | "zip5": "02145", 46 | } 47 | 48 | CALIFORNIA_PROSPECT_PAYLOAD = { 49 | "city": "Richmond", 50 | "coordinates": "SRID=4326;POINT (-19 2.386383 37.921534)", 51 | "email": "r@example.com", 52 | "first_name": "R", 53 | "is_vol_leader": False, 54 | "is_vol_prospect": True, 55 | "last_name": "K", 56 | "middle_name": "", 57 | "myc_state_and_id": None, 58 | "ngp_id": "", 59 | "phone": "+", 60 | "address": "", 61 | "state": "CA", 62 | "suffix": "", 63 | "vol_yes_at": "2019-10-21", 64 | "zip5": "94801", 65 | } 66 | 67 | 68 | def assert_person_payloads_same(a, b): 69 | for k in a.keys(): 70 | if k == "coordinates": 71 | point_a = geos_fromstr(a[k]) 72 | point_b = geos_fromstr(b[k]) 73 | assert pytest.approx(point_a.x, point_b.x) 74 | assert pytest.approx(point_a.y, point_b.y) 75 | elif k in {"id", "created_at", "updated_at"}: 76 | pass 77 | else: 78 | assert a[k] == b[k] 79 | 80 | 81 | @pytest.mark.django_db 82 | def test_list_unauthorized(api_client, cambridge_leader_user): 83 | resp = api_client.get("/v1/people/") 84 | 85 | assert resp.status_code == status.HTTP_401_UNAUTHORIZED 86 | 87 | 88 | @pytest.mark.django_db 89 | def test_list_fails_nonadmin( 90 | api_client, cambridge_leader_user, somerville_prospect, california_prospect 91 | ): 92 | resp = api_client.get("/v1/people/", **id_auth(cambridge_leader_user)) 93 | 94 | assert resp.status_code == status.HTTP_403_FORBIDDEN 95 | 96 | 97 | @pytest.mark.django_db 98 | def test_create_forbidden(api_client, cambridge_leader_user): 99 | payload = SOMERVILLE_PROSPECT_PAYLOAD 100 | resp = api_client.post("/v1/people/", payload, **id_auth(cambridge_leader_user)) 101 | 102 | assert resp.status_code == status.HTTP_403_FORBIDDEN 103 | 104 | 105 | @pytest.mark.django_db 106 | def test_create_one(api_client, superuser): 107 | payload = SOMERVILLE_PROSPECT_PAYLOAD 108 | resp = api_client.post("/v1/people/", payload, **id_auth(superuser)) 109 | assert resp.status_code == status.HTTP_201_CREATED 110 | 111 | golden_person = FullPersonSerializer( 112 | baker.prepare_recipe("supportal.tests.somerville_prospect") 113 | ).data 114 | 115 | person = Person.objects.get(ngp_id="") 116 | assert_person_payloads_same(FullPersonSerializer(person).data, golden_person) 117 | 118 | 119 | @pytest.mark.django_db 120 | def test_update_one(api_client, superuser, somerville_prospect): 121 | count = Person.objects.filter(ngp_id="123456").count() 122 | payload = {**SOMERVILLE_PROSPECT_PAYLOAD, "phone": "+"} 123 | resp = api_client.post("/v1/people/", payload, **id_auth(superuser)) 124 | assert resp.status_code == status.HTTP_201_CREATED 125 | 126 | count = Person.objects.filter(ngp_id="").count() 127 | person = Person.objects.get(ngp_id="") 128 | assert person.phone == "+" 129 | 130 | 131 | @pytest.mark.django_db 132 | def test_create_two_with_null_ngp_id(api_client, superuser): 133 | """Test that creation with null NGP id works as intended. 134 | 135 | i.e. no upsert, only create. 136 | """ 137 | payload = {**SOMERVILLE_PROSPECT_PAYLOAD, "ngp_id": None} 138 | resp = api_client.post("/v1/people/", payload, **id_auth(superuser)) 139 | assert resp.status_code == status.HTTP_201_CREATED 140 | 141 | payload = {**CALIFORNIA_PROSPECT_PAYLOAD, "ngp_id": None} 142 | resp = api_client.post("/v1/people/", payload, **id_auth(superuser)) 143 | assert resp.status_code == status.HTTP_201_CREATED 144 | 145 | people = Person.objects.filter(ngp_id=None, is_demo=False).all() 146 | assert len(people) == 2 147 | 148 | 149 | @pytest.mark.django_db 150 | def test_create_many(api_client, superuser): 151 | payload = [ 152 | CAMBRIDGE_LEADER_PAYLOAD, 153 | SOMERVILLE_PROSPECT_PAYLOAD, 154 | CALIFORNIA_PROSPECT_PAYLOAD, 155 | ] 156 | resp = api_client.post("/v1/people/", payload, **id_auth(superuser)) 157 | 158 | golden_cambridge_leader = FullPersonSerializer( 159 | baker.prepare_recipe("supportal.tests.cambridge_leader") 160 | ).data 161 | golden_somerville_prospect = FullPersonSerializer( 162 | baker.prepare_recipe("supportal.tests.somerville_prospect") 163 | ).data 164 | golden_california_prospect = FullPersonSerializer( 165 | baker.prepare_recipe("supportal.tests.california_prospect") 166 | ).data 167 | 168 | assert_person_payloads_same( 169 | FullPersonSerializer( 170 | Person.objects.get(ngp_id=golden_cambridge_leader["ngp_id"]) 171 | ).data, 172 | golden_cambridge_leader, 173 | ) 174 | assert_person_payloads_same( 175 | FullPersonSerializer( 176 | Person.objects.get(ngp_id=golden_somerville_prospect["ngp_id"]) 177 | ).data, 178 | golden_somerville_prospect, 179 | ) 180 | assert_person_payloads_same( 181 | FullPersonSerializer( 182 | Person.objects.get(ngp_id=golden_california_prospect["ngp_id"]) 183 | ).data, 184 | golden_california_prospect, 185 | ) 186 | -------------------------------------------------------------------------------- /supportal/tests/services/test_mobilize_america.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | import responses 5 | from requests.exceptions import ConnectionError 6 | 7 | from supportal import settings 8 | from supportal.services import mobilize_america 9 | from supportal.services.mobilize_america import ( 10 | AttendanceRequestPerson, 11 | MobilizeAmericaAPIException, 12 | MobilizeAmericaClient, 13 | ) 14 | from supportal.tests.services.mock_mobilize_america_responses import ( 15 | CREATE_ATTENDANCE_RESPONSE, 16 | LIST_EVENTS_RESPONSE, 17 | LIST_EVENTS_RESPONSE_PAGE_2, 18 | NOT_FOUND_RESPONSE, 19 | ) 20 | 21 | 22 | @responses.activate 23 | def test_list_events(): 24 | responses.add( 25 | responses.GET, 26 | "https://localhost:8000/mobilize/v1/organizations/1/events?visibility=PUBLIC", 27 | body=json.dumps(LIST_EVENTS_RESPONSE), 28 | match_querystring=True, 29 | ) 30 | responses.add( 31 | responses.GET, 32 | "https://localhost:8000/mobilize/v1/organizations/1/events?page=2&visibility=PUBLIC", 33 | body=json.dumps(LIST_EVENTS_RESPONSE_PAGE_2), 34 | match_querystring=True, 35 | ) 36 | res = mobilize_america.get_global_client().list_organization_events() 37 | page1 = next(res) 38 | assert {e["id"] for e in page1["data"]} == {172958, 175252, 179454} 39 | assert len(responses.calls) == 1 # make sure pagination is lazy 40 | 41 | page2 = next(res) 42 | assert {e["id"] for e in page2["data"]} == {9620} 43 | with pytest.raises(StopIteration): 44 | next(res) 45 | assert len(responses.calls) == 2 46 | 47 | 48 | @responses.activate 49 | def test_connection_error_retry(): 50 | n = 0 51 | 52 | def callback(resp): 53 | nonlocal n 54 | n += 1 55 | if n == 1: 56 | raise ConnectionError("Connection reset by peer") 57 | return resp 58 | 59 | with responses.RequestsMock(response_callback=callback) as r: 60 | r.add( 61 | responses.GET, 62 | "https://localhost:8000/mobilize/v1/organizations/1/events?visibility=PUBLIC", 63 | body=json.dumps(LIST_EVENTS_RESPONSE), 64 | match_querystring=True, 65 | ) 66 | res = mobilize_america.get_global_client().list_organization_events() 67 | page1 = next(res) 68 | assert n == 2 69 | assert {e["id"] for e in page1["data"]} == {172958, 175252, 179454} 70 | 71 | 72 | @responses.activate 73 | def test_list_events_visibility(): 74 | client = MobilizeAmericaClient( 75 | 1, 76 | "PRIVATE", 77 | settings.MOBILIZE_AMERICA_BASE_URL, 78 | settings.MOBILIZE_AMERICA_API_KEY, 79 | ) 80 | responses.add( 81 | responses.GET, 82 | "https://localhost:8000/mobilize/v1/organizations/1/events?visibility=PRIVATE", 83 | body=json.dumps(LIST_EVENTS_RESPONSE), 84 | match_querystring=True, 85 | ) 86 | client.list_organization_events() 87 | assert len(responses.calls) == 1 88 | 89 | 90 | @responses.activate 91 | def test_create_event_attendance(): 92 | responses.add( 93 | responses.POST, 94 | "https://localhost:8000/mobilize/v1/organizations/1/events/17/attendances", 95 | body=json.dumps(CREATE_ATTENDANCE_RESPONSE), 96 | ) 97 | responses.add( 98 | responses.GET, 99 | f"https://localhost:8000/mobilize/v1/organizations/1/events/17/attendances", 100 | body=json.dumps({"data": []}), 101 | ) 102 | mobilize_america.get_global_client().create_event_attendance( 103 | 17, 104 | timeslot_ids=[40896, 40894], 105 | person=AttendanceRequestPerson( 106 | given_name="Matteo", 107 | family_name="B", 108 | email_address="mbanerjee@elizabethwarren.com", 109 | postal_code="11238", 110 | ), 111 | ) 112 | assert len(responses.calls) == 2 113 | 114 | 115 | @responses.activate 116 | def test_create_event_attendance_already_exists(): 117 | responses.add( 118 | responses.GET, 119 | f"https://localhost:8000/mobilize/v1/organizations/1/events/17/attendances", 120 | body=json.dumps(CREATE_ATTENDANCE_RESPONSE), 121 | ) 122 | mobilize_america.get_global_client().create_event_attendance( 123 | 17, 124 | timeslot_ids=[40896, 40894], 125 | person=AttendanceRequestPerson( 126 | given_name="Matteo", 127 | family_name="B", 128 | email_address="mbanerjee@elizabethwarren.com", 129 | postal_code="11238", 130 | ), 131 | ) 132 | assert len(responses.calls) == 1 133 | 134 | 135 | @responses.activate 136 | def test_error_mapping(): 137 | responses.add( 138 | responses.GET, 139 | "https://localhost:8000/mobilize/v1/organizations/1/events", 140 | body=json.dumps(NOT_FOUND_RESPONSE), 141 | status=404, 142 | ) 143 | with pytest.raises(MobilizeAmericaAPIException) as exec_info: 144 | mobilize_america.get_global_client().list_organization_events() 145 | assert exec_info.value.response["error"]["detail"] == "Not found." 146 | -------------------------------------------------------------------------------- /supportal/tests/services/test_sheets_service.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from supportal.services.google_sheets_service import GoogleSheetsClient 4 | 5 | 6 | @unittest.mock.patch("supportal.services.google_sheets_service.Credentials") 7 | @unittest.mock.patch("supportal.services.google_sheets_service.pygsheets.client.Client") 8 | def test_get_values_from_sheet(*args, **kwargs): 9 | client = GoogleSheetsClient("{}") 10 | mock_client = client.client 11 | spreadsheet_mock = unittest.mock.MagicMock() 12 | worksheet_mock = unittest.mock.MagicMock() 13 | 14 | mock_client.open_by_url.return_value = spreadsheet_mock 15 | spreadsheet_mock.worksheet_by_title.return_value = worksheet_mock 16 | worksheet_mock.get_all_records.return_value = [ 17 | {"column1": "val", "column2": "val2"}, 18 | {"column1": ""}, 19 | ] 20 | results = client.get_values_from_sheet("fake", "tab", ["column1"]) 21 | 22 | assert len(results) == 2 23 | for resp in results: 24 | assert "column1" in resp 25 | assert "column2" not in resp 26 | 27 | mock_client.open_by_url.assert_called_with("fake") 28 | spreadsheet_mock.worksheet_by_title.assert_called_with("tab") 29 | worksheet_mock.get_all_records.assert_called_with() 30 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/test_import_mobilize_america_events.py: -------------------------------------------------------------------------------- 1 | import json 2 | from copy import deepcopy 3 | from datetime import datetime, timedelta 4 | 5 | import freezegun 6 | import pytest 7 | import responses 8 | from model_bakery import baker 9 | 10 | from supportal.services.mobilize_america import EVENT_TYPES 11 | from supportal.shifter.management.commands.import_mobilize_america_events import Command 12 | from supportal.shifter.models import MobilizeAmericaEvent 13 | from supportal.tests.services.mock_mobilize_america_responses import ( 14 | LIST_EVENTS_IA_GOTC_RESPONSE, 15 | LIST_EVENTS_RESPONSE, 16 | LIST_EVENTS_RESPONSE_PAGE_2, 17 | ) 18 | 19 | 20 | def _add_event_type_response(visibility, list_events_response): 21 | responses.add( 22 | responses.GET, 23 | f"https://localhost:8000/mobilize/v1/organizations/1/events?visibility={visibility}×lot_start=gte_now", 24 | body=json.dumps(list_events_response), 25 | match_querystring=True, 26 | ) 27 | 28 | 29 | @pytest.mark.django_db 30 | @responses.activate 31 | def test_handle(): 32 | assert MobilizeAmericaEvent.objects.count() == 0 33 | page2_url = "https://localhost:8000/mobilize/v1/organizations/1/events?page=2&visibility=PUBLIC×lot_start=gte_now&event_types=PHONE_BANK" 34 | list_events_response = deepcopy(LIST_EVENTS_RESPONSE) 35 | list_events_response["next"] = page2_url 36 | private_res = deepcopy(LIST_EVENTS_IA_GOTC_RESPONSE) 37 | private_res["data"] = [private_res["data"][0]] 38 | private_res["next"] = None 39 | private_res["count"] = 1 40 | private_res["data"][0]["visibility"] = "PRIVATE" 41 | _add_event_type_response("PUBLIC", list_events_response) 42 | responses.add( 43 | responses.GET, 44 | f"https://localhost:8000/mobilize/v1/organizations/1/events?page=2&visibility=PUBLIC×lot_start=gte_now&event_types=PHONE_BANK", 45 | body=json.dumps(LIST_EVENTS_RESPONSE_PAGE_2), 46 | match_querystring=True, 47 | ) 48 | _add_event_type_response("PRIVATE", private_res) 49 | 50 | processed = Command().handle() 51 | events = list(MobilizeAmericaEvent.objects.all()) 52 | total_events_count = ( 53 | len(list_events_response["data"]) 54 | + len(LIST_EVENTS_RESPONSE_PAGE_2["data"]) 55 | + len(private_res["data"]) 56 | ) 57 | assert processed == f"Loaded events: {str(total_events_count)}" 58 | assert {e.id for e in events} == {9620, 172958, 175252, 179454, 173342} 59 | 60 | 61 | @freezegun.freeze_time(datetime.now() + timedelta(days=1)) 62 | @pytest.mark.django_db 63 | @responses.activate 64 | def test_marks_old_events_as_inactive(cambridge_event): 65 | assert MobilizeAmericaEvent.objects.count() == 1 66 | assert cambridge_event.is_active 67 | list_events_response = deepcopy(LIST_EVENTS_RESPONSE) 68 | list_events_response["next"] = None 69 | _add_event_type_response("PUBLIC", list_events_response) 70 | _add_event_type_response("PRIVATE", list_events_response) 71 | 72 | processed = Command().handle() 73 | events = list(MobilizeAmericaEvent.objects.exclude(id=cambridge_event.id)) 74 | assert {e.id for e in events} == {172958, 175252, 179454} 75 | for e in events: 76 | assert e.is_active 77 | cambridge_event.refresh_from_db() 78 | assert not cambridge_event.is_active 79 | 80 | 81 | @freezegun.freeze_time(datetime.now() + timedelta(days=1)) 82 | @pytest.mark.django_db 83 | @responses.activate 84 | def test_marks_events_as_active(cambridge_event): 85 | assert MobilizeAmericaEvent.objects.count() == 1 86 | cambridge_event.is_active = False 87 | cambridge_event.save() 88 | 89 | list_events_response = deepcopy(LIST_EVENTS_RESPONSE) 90 | list_events_response["data"] = [list_events_response["data"][0]] 91 | list_events_response["data"][0]["id"] = cambridge_event.id 92 | list_events_response["next"] = None 93 | _add_event_type_response("PUBLIC", list_events_response) 94 | _add_event_type_response("PRIVATE", list_events_response) 95 | 96 | Command().handle() 97 | event = MobilizeAmericaEvent.objects.get(id=cambridge_event.id) 98 | assert event.is_active 99 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/test_import_us_zip5s.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from supportal.shifter.management.commands.import_us_zip5s import Command 6 | from supportal.shifter.models import USZip5 7 | 8 | TEST_FILE_PATH = os.path.join( 9 | os.path.dirname(os.path.realpath(__file__)), "us_10_test_zip5s.csv.gz" 10 | ) 11 | 12 | 13 | @pytest.mark.django_db 14 | def test_handle(): 15 | assert USZip5.objects.count() == 0 16 | processed_incrementally = Command().handle(file=TEST_FILE_PATH, expect_at_least=10) 17 | zips = list(USZip5.objects.all()) 18 | assert USZip5.objects.count() == 10 19 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/test_retry_ma_events.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import pytest 4 | from localflavor.us.models import USZipCodeField 5 | from model_bakery import baker 6 | 7 | from supportal.shifter.management.commands.retry_ma_events import Command 8 | from supportal.shifter.models import EventSignup 9 | 10 | 11 | @pytest.mark.django_db 12 | def test_retry_ma_events(): 13 | event_1 = baker.make("EventSignup", zip5="94102", ma_creation_successful=False) 14 | event_2 = baker.make("EventSignup", zip5="94102", ma_creation_successful=True) 15 | 16 | with unittest.mock.patch.object( 17 | EventSignup, "sync_to_mobilize_america", return_value=(True, None) 18 | ): 19 | Command().handle() 20 | 21 | event_1.refresh_from_db() 22 | event_2.refresh_from_db() 23 | 24 | assert event_1.retried_at is not None 25 | assert event_2.retried_at is None 26 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/test_update_prioritization.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | import pytest 5 | 6 | from supportal.shifter.management.commands.update_prioritization import ( 7 | MA_EVENT_ID_COLUMN, 8 | PRIORITIZATION_COLUMN, 9 | Command, 10 | ) 11 | from supportal.shifter.models import MAX_INTEGER_SIZE, MobilizeAmericaEvent, State 12 | 13 | TEST_FILE_PATH = os.path.join( 14 | os.path.dirname(os.path.realpath(__file__)), "us_10_test_zip5s.csv.gz" 15 | ) 16 | 17 | 18 | @pytest.mark.django_db 19 | def test_handle( 20 | iowa_state, cambridge_event, virtual_phone_bank, high_pri_virtual_phone_bank 21 | ): 22 | with unittest.mock.patch( 23 | "supportal.shifter.management.commands.update_prioritization.GoogleSheetsClient" 24 | ) as mock: 25 | mock.return_value.get_values_from_sheet.return_value = [ 26 | {MA_EVENT_ID_COLUMN: cambridge_event.id, PRIORITIZATION_COLUMN: ""}, 27 | {MA_EVENT_ID_COLUMN: 123, PRIORITIZATION_COLUMN: ""}, 28 | {MA_EVENT_ID_COLUMN: virtual_phone_bank.id, PRIORITIZATION_COLUMN: 11}, 29 | { 30 | MA_EVENT_ID_COLUMN: high_pri_virtual_phone_bank.id, 31 | PRIORITIZATION_COLUMN: 5, 32 | }, 33 | ] 34 | assert cambridge_event.state_prioritization == MAX_INTEGER_SIZE 35 | assert virtual_phone_bank.state_prioritization == MAX_INTEGER_SIZE 36 | assert high_pri_virtual_phone_bank.state_prioritization == MAX_INTEGER_SIZE 37 | 38 | Command().handle() 39 | 40 | cambridge_event.refresh_from_db() 41 | virtual_phone_bank.refresh_from_db() 42 | high_pri_virtual_phone_bank.refresh_from_db() 43 | 44 | assert cambridge_event.state_prioritization == MAX_INTEGER_SIZE 45 | assert virtual_phone_bank.state_prioritization == MAX_INTEGER_SIZE 46 | assert high_pri_virtual_phone_bank.state_prioritization == 5 47 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/test_update_prioritization_meta.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import pytest 4 | 5 | from supportal.shifter.management.commands.update_prioritization_meta import ( 6 | PRIORITIZATION_DOC_URL_COLUMN_NAME, 7 | STATE_CODE_COLUMN_NAME, 8 | USE_PRIORITIZE_DOC_COLUMN_NAME, 9 | Command, 10 | ) 11 | from supportal.shifter.models import State 12 | 13 | 14 | @pytest.mark.django_db 15 | def test_handle(): 16 | hawaii_state = State.objects.create(state_code="HI") 17 | california_state = State.objects.create(state_code="CA") 18 | ri_state = State.objects.create(state_code="RI") 19 | 20 | with unittest.mock.patch( 21 | "supportal.shifter.management.commands.update_prioritization_meta.GoogleSheetsClient" 22 | ) as mock: 23 | mock.return_value.get_values_from_sheet.return_value = [ 24 | { 25 | STATE_CODE_COLUMN_NAME: hawaii_state.state_code, 26 | USE_PRIORITIZE_DOC_COLUMN_NAME: "FALSE", 27 | PRIORITIZATION_DOC_URL_COLUMN_NAME: "woot", 28 | }, 29 | { 30 | STATE_CODE_COLUMN_NAME: california_state.state_code, 31 | USE_PRIORITIZE_DOC_COLUMN_NAME: "TRUE", 32 | PRIORITIZATION_DOC_URL_COLUMN_NAME: "", 33 | }, 34 | { 35 | STATE_CODE_COLUMN_NAME: ri_state.state_code, 36 | USE_PRIORITIZE_DOC_COLUMN_NAME: "TRUE", 37 | PRIORITIZATION_DOC_URL_COLUMN_NAME: "woot", 38 | }, 39 | ] 40 | assert hawaii_state.use_prioritization_doc is False 41 | assert california_state.use_prioritization_doc is False 42 | assert ri_state.use_prioritization_doc is False 43 | 44 | Command().handle() 45 | 46 | hawaii_state.refresh_from_db() 47 | california_state.refresh_from_db() 48 | ri_state.refresh_from_db() 49 | 50 | assert hawaii_state.use_prioritization_doc is False 51 | assert california_state.use_prioritization_doc is False 52 | assert ri_state.use_prioritization_doc 53 | 54 | assert hawaii_state.prioritization_doc == "woot" 55 | assert california_state.prioritization_doc == "" 56 | assert ri_state.prioritization_doc == "woot" 57 | -------------------------------------------------------------------------------- /supportal/tests/shifter/management/commands/us_10_test_zip5s.csv.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Elizabeth-Warren/supportal-backend/e55b0e8fd154730bab1708f27386b2adcb18cfbc/supportal/tests/shifter/management/commands/us_10_test_zip5s.csv.gz -------------------------------------------------------------------------------- /supportal/tests/shifter/test_models.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from copy import deepcopy 4 | from datetime import datetime, timezone 5 | 6 | import pytest 7 | import responses 8 | from django.contrib.gis.geos import Point 9 | 10 | from supportal.shifter.models import ( 11 | EventSignup, 12 | MobilizeAmericaEvent, 13 | RecommendedEventRequestLog, 14 | ) 15 | from supportal.tests.services.mock_mobilize_america_responses import ( 16 | CREATE_ATTENDANCE_RESPONSE, 17 | LIST_EVENTS_RESPONSE, 18 | ) 19 | 20 | 21 | @responses.activate 22 | @pytest.mark.django_db 23 | def test_create_event_signup_and_sync(): 24 | responses.add( 25 | responses.POST, 26 | "https://localhost:8000/mobilize/v1/organizations/1/events/17/attendances", 27 | body=json.dumps(CREATE_ATTENDANCE_RESPONSE), 28 | ) 29 | responses.add( 30 | responses.GET, 31 | f"https://localhost:8000/mobilize/v1/organizations/1/events/17/attendances", 32 | body=json.dumps({"data": []}), 33 | ), 34 | 35 | es = EventSignup( 36 | session_id="some-random-identifier", 37 | email="@elizabethwarren.com", 38 | given_name="", 39 | family_name="", 40 | sms_opt_in=False, 41 | zip5="11238", 42 | metadata={"foo": "bar"}, 43 | source="web", 44 | ma_event_id=17, 45 | ma_timeslot_ids=[1, 2, 3], 46 | ) 47 | es.save() 48 | assert es.id is not None 49 | assert not es.ma_creation_successful 50 | assert not es.ma_response 51 | assert len(responses.calls) == 0 52 | 53 | es.sync_to_mobilize_america() 54 | assert len(responses.calls) == 2 55 | assert es.ma_creation_successful 56 | assert es.ma_response == CREATE_ATTENDANCE_RESPONSE 57 | 58 | 59 | @pytest.mark.django_db 60 | def test_create_event_recommendation_log(): 61 | log = RecommendedEventRequestLog( 62 | session_id="fadkfksdmf", 63 | email="@elizabethwarren.com", 64 | request_params={"zip5": "11238", "max_dist": 5}, 65 | recommended_ma_event_ids=[17], 66 | ) 67 | log.save() 68 | assert log.id is not None 69 | 70 | 71 | def __same_ts(dt, ts): 72 | return dt == datetime.fromtimestamp(ts, timezone.utc) 73 | 74 | 75 | def __check_timeslot(ma_event_id, payload, timeslot): 76 | assert timeslot.event_id == ma_event_id 77 | assert __same_ts(timeslot.start_date, payload["start_date"]) 78 | assert __same_ts(timeslot.end_date, payload["end_date"]) 79 | assert timeslot.is_full == payload["is_full"] 80 | assert timeslot.raw == payload 81 | 82 | 83 | def __check_ma_event_matches_payload( 84 | ma_event, payload, virtual_expected, state_expected 85 | ): 86 | assert ma_event.id == payload["id"] 87 | assert ma_event.title == payload["title"] 88 | assert ma_event.event_type == payload["event_type"] 89 | assert ma_event.visibility == payload["visibility"] 90 | assert __same_ts(ma_event.modified_date, payload["modified_date"]) 91 | assert ma_event.raw == payload 92 | assert ma_event.tag_ids == [t["id"] for t in payload["tags"]] 93 | timeslot_payloads = {d["id"]: d for d in payload["timeslots"]} 94 | for ts in ma_event.timeslots.all(): 95 | assert ts.id in timeslot_payloads 96 | __check_timeslot(ma_event.id, timeslot_payloads[ts.id], ts) 97 | if virtual_expected: 98 | assert ma_event.is_virtual 99 | assert ma_event.coordinates is None 100 | else: 101 | assert not ma_event.is_virtual 102 | lat = payload["location"]["location"]["latitude"] 103 | lng = payload["location"]["location"]["longitude"] 104 | assert ma_event.coordinates == Point(lng, lat, srid=4326) 105 | assert state_expected == ma_event.state.state_code 106 | 107 | 108 | @pytest.mark.django_db 109 | def test_update_and_create_mobilize_america_event_from_json(ca_zip5): 110 | payload = deepcopy(LIST_EVENTS_RESPONSE["data"][0]) 111 | payload["location"]["region"] = ca_zip5.state 112 | assert not MobilizeAmericaEvent.objects.filter(id=payload["id"]).exists() 113 | ma_event, created = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 114 | assert created 115 | __check_ma_event_matches_payload( 116 | ma_event, payload, virtual_expected=False, state_expected=ca_zip5.state 117 | ) 118 | # test update 119 | del payload["location"] 120 | payload["title"] = "Changed title" 121 | ma_event, created = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 122 | assert not created 123 | __check_ma_event_matches_payload( 124 | ma_event, payload, virtual_expected=True, state_expected=None 125 | ) 126 | 127 | 128 | @pytest.mark.django_db 129 | def test_create_update_delete_timeslot(): 130 | payload = deepcopy(LIST_EVENTS_RESPONSE["data"][0]) 131 | ma_event, _ = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 132 | created_timeslot_id = 123456789 133 | payload["timeslots"].append( 134 | { 135 | "id": created_timeslot_id, 136 | "start_date": 123, 137 | "end_date": int(time.time()), 138 | "is_full": False, 139 | } 140 | ) 141 | assert not ma_event.timeslots.filter(id=created_timeslot_id).exists() 142 | ma_event, _ = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 143 | assert ma_event.timeslots.filter(id=created_timeslot_id).exists() 144 | 145 | deleted_timeslot_id = payload["timeslots"].pop()["id"] 146 | assert ma_event.timeslots.filter(id=deleted_timeslot_id).exists() 147 | ma_event, _ = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 148 | assert not ma_event.timeslots.filter(id=deleted_timeslot_id).exists() 149 | 150 | new_start_date = 1 151 | updated_timeslot_id = payload["timeslots"][0]["id"] 152 | payload["timeslots"][0]["start_date"] = new_start_date 153 | ts = ma_event.timeslots.filter(id=updated_timeslot_id).first() 154 | assert not __same_ts(ts.start_date, new_start_date) 155 | ma_event, _ = MobilizeAmericaEvent.objects.update_or_create_from_json(payload) 156 | ts = ma_event.timeslots.filter(id=updated_timeslot_id).first() 157 | assert __same_ts(ts.start_date, new_start_date) 158 | -------------------------------------------------------------------------------- /supportal/tests/test_throttles.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | 3 | from rest_framework import permissions 4 | from rest_framework.generics import GenericAPIView 5 | from rest_framework.response import Response 6 | 7 | from supportal.throttles import PrefixScopedRateThrottle 8 | 9 | 10 | class FooThrottle(PrefixScopedRateThrottle): 11 | scope_prefix = "foo" 12 | 13 | 14 | class BarThrottle(PrefixScopedRateThrottle): 15 | scope_prefix = "bar" 16 | 17 | 18 | class ThrottleTestView(GenericAPIView): 19 | throttle_classes = [FooThrottle, BarThrottle] 20 | throttle_scope = "test" 21 | permission_classes = [permissions.AllowAny] 22 | 23 | def get_throttles(self): 24 | return super().get_throttles() 25 | 26 | def get(self, r): 27 | return Response(None, status=204) 28 | 29 | 30 | def test_throttles_compose(rf, settings): 31 | settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"].update( 32 | {"foo.test": "1/sec", "bar.test": "3/hour"} 33 | ) 34 | 35 | view = ThrottleTestView().as_view() 36 | assert view(rf.get("/")).status_code == 204 37 | # Trip the sec throttle 38 | assert view(rf.get("/")).status_code == 429 39 | sleep(2) 40 | assert view(rf.get("/")).status_code == 204 41 | # Trip the hour throttle 42 | assert view(rf.get("/")).status_code == 429 43 | -------------------------------------------------------------------------------- /supportal/tests/test_urls.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from supportal.tests import utils 4 | 5 | 6 | @pytest.mark.django_db 7 | def test_index_auth(client): 8 | """The index (/) endpoint does not require authentication""" 9 | res = client.get("/") 10 | assert res.status_code == 200 11 | 12 | 13 | @pytest.mark.django_db 14 | def test_me_fails_no_token(client): 15 | """/me fails if not authenticated""" 16 | res = client.get("/v1/me") 17 | assert res.status_code == 401 18 | 19 | 20 | @pytest.mark.django_db 21 | def test_me_fails_invalid_token(client, user): 22 | """/me fails if receiving an expired token""" 23 | token = utils.create_id_jwt(user, expires_in_seconds=-100) 24 | res = client.get("/v1/me", HTTP_AUTHORIZATION=utils.auth_header(token)) 25 | assert res.status_code == 401 26 | 27 | 28 | @pytest.mark.django_db 29 | def test_me_success(client, auth): 30 | """/me succeeds if properly authenticated""" 31 | res = client.get("/v1/me", **auth) 32 | assert res.status_code == 200 33 | -------------------------------------------------------------------------------- /supportal/tests/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import time 4 | import uuid 5 | 6 | import jwt 7 | from django.conf import settings 8 | from jwt.algorithms import RSAAlgorithm 9 | 10 | # RSA key pair copied from pyjwt test: 11 | # https://github.com/jpadilla/pyjwt/tree/master/tests/keys 12 | 13 | with open( 14 | os.path.join(settings.BASE_DIR, "supportal", "tests", "jwk_rsa_private_key.json") 15 | ) as f: 16 | JWK_PRIVATE_KEY = json.load(f) 17 | 18 | 19 | def auth_header(token): 20 | return b"Bearer " + token 21 | 22 | 23 | def create_id_jwt(user, expires_in_seconds=3600, key_id=None): 24 | """Create a Cognito id token for a User""" 25 | now_unix_ts = int(time.time()) 26 | return _create_jwt( 27 | { 28 | "token_use": "id", 29 | "iss": settings.COGNITO_USER_POOL_URL, 30 | "sub": user.username, 31 | "aud": settings.COGNITO_USER_LOGIN_CLIENT_ID, 32 | "event_id": str(uuid.uuid4()), 33 | "auth_time": now_unix_ts, 34 | "iat": now_unix_ts, 35 | "exp": now_unix_ts + expires_in_seconds, 36 | "email_verified": True, 37 | "email": user.email, 38 | "cognito:username": user.username, 39 | }, 40 | key_id=key_id, 41 | ) 42 | 43 | 44 | def create_access_jwt(client_id, expires_in_seconds=3600): 45 | """Create a Cognito access token for a superuser""" 46 | now_unix_ts = int(time.time()) 47 | return _create_jwt( 48 | { 49 | "token_use": "access", 50 | "iss": settings.COGNITO_USER_POOL_URL, 51 | "sub": client_id, 52 | "event_id": str(uuid.uuid4()), 53 | "auth_time": now_unix_ts, 54 | "iat": now_unix_ts, 55 | "exp": now_unix_ts + expires_in_seconds, 56 | "client_id": client_id, 57 | } 58 | ) 59 | 60 | 61 | def _create_jwt(payload, key_id=None): 62 | if key_id is None: 63 | key_id = JWK_PRIVATE_KEY["kid"] 64 | 65 | key = json.dumps(JWK_PRIVATE_KEY) 66 | secret = RSAAlgorithm.from_jwk(key) 67 | return jwt.encode( 68 | payload, secret, algorithm="RS256", headers={"kid": key_id, "alg": "RS256"} 69 | ) 70 | 71 | 72 | def id_auth(user): 73 | return {"HTTP_AUTHORIZATION": auth_header(create_id_jwt(user))} 74 | -------------------------------------------------------------------------------- /supportal/throttles.py: -------------------------------------------------------------------------------- 1 | from django.core.exceptions import ImproperlyConfigured 2 | from rest_framework.throttling import ScopedRateThrottle 3 | 4 | 5 | class PrefixScopedRateThrottle(ScopedRateThrottle): 6 | """A Composable ScopedRateThrottle 7 | 8 | The Problem: 9 | While DRF allows you to use multiple throttle classes, it's not possible to use 10 | multiple ScopedRateThrottles, for example to limit a specific endpoint to X 11 | requests a minute and Y requests a day, because they would end up using the 12 | same cache key. This class allows you to supply a prefix for ScopedRateThrottle's 13 | cache keys, and therefore to use multiple throttles without cache key collisions. 14 | 15 | Usage: 16 | Subclass this class and provide a `scope_prefix`. When defining throttle rates in 17 | in settings.py, prefix the view's throttle scope with your throttle's prefix separated 18 | by a period: "{prefix}.{view_scope}". 19 | 20 | Note: Throttle rates need to be defined for all views that have throttle_scope defined. 21 | To change this behavior, override ScopedRateThrottle#allow_request and have it handle 22 | errors thrown by the `get_rate` function. 23 | 24 | 25 | Example: 26 | # define your throttle in path/to/throttles.py 27 | class FooThrottle(PrefixScopedRateThrottle): 28 | scope_prefix = "foo" 29 | 30 | class BarThrottle(PrefixScopedRateThrottle): 31 | scope_prefix = "bar" 32 | 33 | # In settings.py: 34 | REST_FRAMEWORK = { 35 | 'DEFAULT_THROTTLE_CLASSES': [ 36 | 'path.to.throttles.FooThrottle', 37 | 'path.to.throttles.BarThrottle', 38 | ], 39 | 'DEFAULT_THROTTLE_RATES': { 40 | 'foo.view_1_scope': '100/day', 41 | 'bar.view_1_scope': '1/minute', 42 | 'foo.view_2_scope': '200/day', 43 | 'bar.view_2_scope': '2/minute', 44 | } 45 | } 46 | """ 47 | 48 | scope_prefix = None 49 | 50 | def __init__(self): 51 | if not getattr(self, "scope_prefix", None): 52 | raise ImproperlyConfigured( 53 | f"Missing scope prefix for {self.__class__.__name__}" 54 | ) 55 | super().__init__() 56 | 57 | def get_rate(self): 58 | """Determine the string representation of the allowed request rate in DEFAULT_THROTTLES""" 59 | throttle_rate_key = f"{self.scope_prefix}.{self.scope}" 60 | try: 61 | return self.THROTTLE_RATES[throttle_rate_key] 62 | except KeyError: 63 | raise ImproperlyConfigured( 64 | f"No default throttle rate set for {throttle_rate_key} scope" 65 | ) 66 | 67 | def get_cache_key(self, request, view): 68 | original_cache_key = super().get_cache_key(request, view) 69 | return f"{self.scope_prefix}_{original_cache_key}" 70 | 71 | 72 | class HourScopedRateThrottle(PrefixScopedRateThrottle): 73 | scope_prefix = "hour" 74 | 75 | def parse_rate(self, rate): 76 | num_requests, duration = super().parse_rate(rate) 77 | if duration != 3600: 78 | raise ImproperlyConfigured( 79 | "HourScopedRateThrottle only accepts rates in hours" 80 | ) 81 | return num_requests, duration 82 | 83 | 84 | class DayScopedRateThrottle(PrefixScopedRateThrottle): 85 | scope_prefix = "day" 86 | 87 | def parse_rate(self, rate): 88 | num_requests, duration = super().parse_rate(rate) 89 | if duration != 86400: 90 | raise ImproperlyConfigured( 91 | "DayScopedRateThrottle only accepts rates in days" 92 | ) 93 | return num_requests, duration 94 | -------------------------------------------------------------------------------- /supportal/urls.py: -------------------------------------------------------------------------------- 1 | """supportal URL Configuration 2 | 3 | The `urlpatterns` list routes URLs to views. For more information please see: 4 | https://docs.djangoproject.com/en/2.2/topics/http/urls/ 5 | Examples: 6 | Function views 7 | 1. Add an import: from my_app import views 8 | 2. Add a URL to urlpatterns: path('', views.home, name='home') 9 | Class-based views 10 | 1. Add an import: from other_app.views import Home 11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') 12 | Including another URLconf 13 | 1. Import the include() function: from django.urls import include, path 14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) 15 | """ 16 | import json 17 | 18 | from django.conf import settings 19 | from django.contrib import admin 20 | from django.http import HttpResponseNotFound 21 | from django.urls import include, path, re_path 22 | from rest_framework import routers 23 | 24 | from supportal.app import views 25 | from supportal.app.views.email_views import UnsubscribeView 26 | from supportal.app.views.invite_views import InviteViewSet, VerifyView 27 | from supportal.app.views.person_views import PersonViewSet 28 | from supportal.app.views.user_views import FullUserViewSet, MeView 29 | from supportal.app.views.vol_prospect_views import ( 30 | VolProspectAssignmentViewSet, 31 | VolProspectContactEventViewSet, 32 | ) 33 | from supportal.docs import documentation_urls 34 | from supportal.shifter.views import ( 35 | EarlyStateView, 36 | EventSignupView, 37 | MobilizeAmericaEventView, 38 | RecommendedEventView, 39 | USZip5View, 40 | ) 41 | 42 | app_name = "supportal" 43 | 44 | router = routers.DefaultRouter() 45 | router.register(r"people", PersonViewSet) 46 | router.register(r"users", FullUserViewSet) 47 | router.register( 48 | r"vol_prospect_assignments", 49 | VolProspectAssignmentViewSet, 50 | basename="volprospectassignment", 51 | ) 52 | # should events be nested under assignments? 53 | router.register( 54 | r"vol_prospect_contact_events", 55 | VolProspectContactEventViewSet, 56 | basename="volprospectcontactevent", 57 | ) 58 | 59 | router.register(r"invites", InviteViewSet, basename="invites") 60 | 61 | app_urls = router.urls + [ 62 | path("me", MeView.as_view()), 63 | path("unsubscribe", UnsubscribeView.as_view(), name="unsubscribe"), 64 | path("verify", VerifyView.as_view()), 65 | ] 66 | 67 | shifter_urls = [ 68 | path("event_signups", EventSignupView.as_view()), 69 | path("early_states", EarlyStateView.as_view()), 70 | path("events/", MobilizeAmericaEventView.as_view()), 71 | path("recommended_events", RecommendedEventView.as_view()), 72 | re_path("^zip5s/(?P\d+)$", USZip5View.as_view()), 73 | ] 74 | 75 | api_urls = [ 76 | path("", views.index), 77 | re_path("^(?P(v1))/", include(app_urls)), 78 | re_path("^(?P(v1))/shifter/", include(shifter_urls)), 79 | ] 80 | 81 | 82 | urlpatterns = documentation_urls 83 | if settings.DJANGO_ADMIN_ONLY: 84 | # Production, admin-only instance 85 | urlpatterns += [path("", admin.site.urls)] 86 | elif settings.DJANGO_ADMIN_ENABLED: 87 | # Development, full app with the admin interface 88 | urlpatterns += [path("admin/", admin.site.urls)] + api_urls 89 | else: 90 | # Production, full app no admin 91 | urlpatterns += api_urls 92 | -------------------------------------------------------------------------------- /supportal/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for supportal project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/ 8 | """ 9 | import os 10 | 11 | from django.core.wsgi import get_wsgi_application 12 | 13 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "supportal.settings") 14 | 15 | application = get_wsgi_application() 16 | --------------------------------------------------------------------------------