├── Brewfile ├── .husky └── pre-commit ├── server ├── server.js ├── db │ ├── _migration.stub.js │ ├── _seed.stub.js │ ├── util.js │ ├── index.js │ ├── migrations │ │ ├── 20210729063333_create-variants-table.js │ │ ├── 20210514142754_create-volunteeers-table.js │ │ ├── 20210729053119_create-transactions-table.js │ │ ├── 20210514201314_create-sent-letters-table.js │ │ ├── 20211230165819_harden-volunteers-table.js │ │ ├── 20211230165827_harden-letter_versions-table.js │ │ ├── 20210513215151_create-campaigns-table.js │ │ ├── 20211224223204_match-production-letter_versions-table.js │ │ ├── 20210514145630_create-letter-versions-table.js │ │ ├── 20211230165803_harden-campaigns-table.js │ │ ├── 20211222210238_match-production-campaigns-table.js │ │ └── 20211224223012_match-production-volunteers-table.js │ └── seeds │ │ └── development │ │ ├── seed-campaigns-table.js │ │ └── seed-letter_versions-table.js ├── auth │ ├── messages │ │ └── messages.service.js │ ├── check-jwt.js │ └── config │ │ └── env.dev.js ├── routes │ └── api │ │ ├── letter_versions.js │ │ ├── authentication.js │ │ ├── campaigns.js │ │ ├── checkout.js │ │ ├── representatives.js │ │ └── lob.js ├── __tests__ │ └── integration │ │ ├── representatives.test.js │ │ └── lob.test.js └── app.js ├── .prettierrc ├── README.md ├── .github ├── CODEOWNERS ├── dependabot.yml ├── workflows │ ├── lint.yml │ ├── check-formatting.yml │ ├── workflow-lint.yml │ ├── codeql-analysis.yml │ ├── integration-tests.yaml │ └── scorecards-analysis.yml ├── ISSUE_TEMPLATE │ ├── blue design enhancement.md │ └── bug.md └── CONTRIBUTING.md ├── .eslintrc.json ├── .devcontainer ├── on-create-command.sh ├── Dockerfile ├── first-run-notice.txt ├── post-create-command.sh ├── docker-compose.yml └── devcontainer.json ├── script ├── bootstrap ├── drop-db.js └── create-db.js ├── .env.example ├── LICENSE ├── package.json ├── knexfile.js └── .gitignore /Brewfile: -------------------------------------------------------------------------------- 1 | brew "node" 2 | brew "postgresql" 3 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | npm run precommit 5 | -------------------------------------------------------------------------------- /server/server.js: -------------------------------------------------------------------------------- 1 | const app = require('./app') 2 | const port = process.env.PORT || 5000 3 | 4 | app.listen(port, () => console.log(`Server started on port ${port}`)) 5 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "none", 3 | "useTabs": false, 4 | "tabWidth": 2, 5 | "semi": false, 6 | "singleQuote": true, 7 | "arrowParens": "always" 8 | } 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # **NOTE: THIS REPO IS NOW A PART OF https://github.com/ProgramEquity/amplify** 2 | # **TO MAKE MODIFICATIONS TO THE BACK END OF THE AMPLIFY APP** 3 | # **PLEASE MODIFY THE `server` DIR IN THE ABOVE REPO** 4 | -------------------------------------------------------------------------------- /server/db/_migration.stub.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | async up(knex) { 3 | await knex.schema.doSomethingForReal() 4 | }, 5 | 6 | async down(knex) { 7 | await knex.schema.doSomethingForReal() 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | 2 | // All PRs will require one dev team approval. To give another team PR approval add them below 3 | * @ProgramEquity/devs 4 | 5 | // Design & Devs should be able to approve doc changes 6 | *.md @ProgramEquity/devs @ProgramEquity/design 7 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "commonjs": true, 4 | "es2021": true, 5 | "node": true, 6 | "jest": true 7 | }, 8 | "extends": ["eslint:recommended", "prettier"], 9 | "parserOptions": { 10 | "ecmaVersion": 12 11 | }, 12 | "rules": {} 13 | } 14 | -------------------------------------------------------------------------------- /.devcontainer/on-create-command.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # Copy our welcome message 6 | if [ -f "./.devcontainer/first-run-notice.txt" ]; then 7 | sudo cp --force ./.devcontainer/first-run-notice.txt /usr/local/etc/vscode-dev-containers/first-run-notice.txt 8 | fi 9 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # Update 'VARIANT' to pick a default version of Node.js: 16, 14, 12. 2 | # Append -bullseye to pin to local arm64/Apple Silicon. 3 | # Append -buster to pin to Debian. 4 | ARG VARIANT=16-buster 5 | 6 | FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT} 7 | -------------------------------------------------------------------------------- /.devcontainer/first-run-notice.txt: -------------------------------------------------------------------------------- 1 | 👋 Welcome to developing ProgramEquity/amplify-back-end in GitHub Codespaces! 2 | 3 | 👢 Your environment is all set up. 4 | 5 | 🚀 To get started, start the server with the VS code task (⇧⌘B) or 6 | run `npm start`. 7 | verify at `/api/campaigns` 8 | 9 | ❓ Having troubles? Visit #devs in Slack for support! 10 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: '/' 5 | schedule: 6 | interval: monthly 7 | open-pull-requests-limit: 20 # default is 5 8 | 9 | - package-ecosystem: 'github-actions' 10 | directory: '/' 11 | schedule: 12 | interval: monthly 13 | open-pull-requests-limit: 10 # default is 5 14 | -------------------------------------------------------------------------------- /server/db/_seed.stub.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | async seed(knex) { 3 | // Deletes ALL existing entries 4 | await knex('table_name').del() 5 | 6 | // Inserts seed entries 7 | await knex('table_name').insert([ 8 | { id: 1, colName: 'rowValue1' }, 9 | { id: 2, colName: 'rowValue2' }, 10 | { id: 3, colName: 'rowValue3' } 11 | ]) 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /server/auth/messages/messages.service.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Service Methods 3 | */ 4 | 5 | const getPublicMessage = () => { 6 | return { 7 | message: "The API doesn't require an access token to share this message." 8 | } 9 | } 10 | 11 | const getProtectedMessage = () => { 12 | return { 13 | message: 'The API successfully validated your access token.' 14 | } 15 | } 16 | 17 | module.exports = { 18 | getPublicMessage, 19 | getProtectedMessage 20 | } 21 | -------------------------------------------------------------------------------- /server/db/util.js: -------------------------------------------------------------------------------- 1 | function getEnv() { 2 | let targetEnv = process.env.NODE_ENV || 'development' 3 | // Prefer `--env test` and `--env=test` command line arguments if provided 4 | process.argv.forEach((val, i) => { 5 | if (val === '--env' && process.argv[i + 1]) { 6 | targetEnv = process.argv[i + 1] 7 | } else if (val.startsWith('--env=') && val.length > 6) { 8 | targetEnv = val.slice(6) 9 | } 10 | }) 11 | return targetEnv 12 | } 13 | 14 | module.exports = { 15 | getEnv 16 | } 17 | -------------------------------------------------------------------------------- /server/auth/check-jwt.js: -------------------------------------------------------------------------------- 1 | const jwt = require('express-jwt') 2 | const jwksRsa = require('jwks-rsa') 3 | const { domain, audience } = require('./config/env.dev') 4 | 5 | const checkJwt = jwt({ 6 | secret: jwksRsa.expressJwtSecret({ 7 | cache: true, 8 | rateLimit: true, 9 | jwksRequestsPerMinute: 5, 10 | jwksUri: `https://${domain}/.well-known/jwks.json` 11 | }), 12 | 13 | audience: audience, 14 | issuer: `https://${domain}/`, 15 | algorithms: ['RS256'] 16 | }) 17 | 18 | module.exports = { 19 | checkJwt 20 | } 21 | -------------------------------------------------------------------------------- /server/routes/api/letter_versions.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const { createClient } = require('../../db') 3 | const router = express.Router() 4 | const db = createClient() 5 | 6 | router.get('/:campaignId', async (req, res) => { 7 | const campaignId = req.params.campaignId 8 | try { 9 | const result = await db('letter_versions').where('campaign_id', campaignId) 10 | res.send(result) 11 | } catch (error) { 12 | console.log(error) 13 | res.status(500).send({ error: 'Whoops' }) 14 | } 15 | }) 16 | 17 | module.exports = router 18 | -------------------------------------------------------------------------------- /server/db/index.js: -------------------------------------------------------------------------------- 1 | const knex = require('knex') 2 | const { getEnv } = require('./util') 3 | const knexfile = require('../../knexfile') 4 | 5 | function createClient(config) { 6 | return knex(config || getConfig()) 7 | } 8 | 9 | function getConfig(env) { 10 | const targetEnv = env || getEnv() 11 | const config = knexfile[targetEnv] 12 | 13 | if (!config) { 14 | throw new Error( 15 | `No config found in "knexfile.js" for environment "${targetEnv}"` 16 | ) 17 | } 18 | 19 | return config 20 | } 21 | 22 | module.exports = { 23 | createClient, 24 | getConfig, 25 | getEnv 26 | } 27 | -------------------------------------------------------------------------------- /server/db/migrations/20210729063333_create-variants-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'variants' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | await knex.schema.createTable(tableName, (table) => { 6 | table.increments() 7 | table.float('price') 8 | table.string('sku') 9 | table.integer('quantity').defaultTo(1) 10 | table.string('description') 11 | table.integer('campaign_id').notNullable() 12 | table.timestamps() 13 | 14 | table.foreign('campaign_id').references('campaigns.id') 15 | }) 16 | }, 17 | 18 | async down(knex) { 19 | await knex.schema.dropTable() 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /server/db/migrations/20210514142754_create-volunteeers-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'volunteers' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Create the table 6 | await knex.schema.createTable(tableName, (table) => { 7 | // Auto-incrementing non-nullable unsigned integer primary key "id" field 8 | table.increments() 9 | 10 | // Simple fields 11 | table.string('name').notNullable() 12 | table.string('email').notNullable() 13 | table.text('physical_address').notNullable() 14 | 15 | // Unique indexes 16 | table.unique(['email']) 17 | }) 18 | }, 19 | 20 | async down(knex) { 21 | // Drop the table 22 | await knex.schema.dropTable(tableName) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /server/routes/api/authentication.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | const express = require('express') 3 | const router = express.Router() 4 | 5 | const { 6 | getPublicMessage, 7 | getProtectedMessage 8 | } = require('../../auth/messages/messages.service') 9 | const { checkJwt } = require('../../auth/check-jwt') 10 | 11 | router.get('/isAuthenticated', checkJwt, (req, res) => { 12 | res.send(true) 13 | }) 14 | 15 | router.get('/public-message', (req, res) => { 16 | const message = getPublicMessage() 17 | res.status(200).send(message) 18 | }) 19 | 20 | router.get('/protected-message', checkJwt, (req, res) => { 21 | const message = getProtectedMessage() 22 | res.status(200).send(message) 23 | }) 24 | 25 | module.exports = router 26 | -------------------------------------------------------------------------------- /.devcontainer/post-create-command.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # Install all dependencies 6 | npm install 7 | 8 | # Setup the database 9 | npm run db:create 10 | npm run db:migrate 11 | npm run db:seed 12 | 13 | # TODO? 14 | # Clone the frontend repo 15 | 16 | #SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)" 17 | #WORKSPACES_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)" 18 | #REPO_NAME="amplify-front-end" 19 | #REPO_OWNER="ProgramEquity" 20 | 21 | #cd "${WORKSPACES_DIR}" 22 | #if [ ! -d "$REPO_NAME" ]; then 23 | # git clone "https://github.com/$REPO_OWNER/$REPO_NAME" 24 | #else 25 | # echo "Already cloned '$REPO_OWNER/$REPO_NAME' into '$REPO_NAME' directory" 26 | #fi 27 | 28 | # TODO? 29 | # Start the server 30 | #npm start 31 | -------------------------------------------------------------------------------- /script/bootstrap: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | command -v brew >/dev/null 2>&1 || { 6 | echo "==> Installing Homebrew..." 7 | bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" 8 | } 9 | 10 | echo "==> Updating Homebrew..." 11 | brew update 12 | 13 | brew bundle check >/dev/null 2>&1 || { 14 | echo "==> Installing Homebrew dependencies..." 15 | brew bundle 16 | } 17 | 18 | echo "==> Starting PostgreSQL service..." 19 | brew services start postgres 20 | 21 | echo "==> Installing Node dependencies..." 22 | npm install 23 | 24 | echo "==> Creating and setting up databases..." 25 | npm run db:create 26 | 27 | echo "" 28 | echo '‼️ Create a ".env" file based on the ".env.example" file, then enter all of your environment variables.' 29 | -------------------------------------------------------------------------------- /server/routes/api/campaigns.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const { createClient } = require('../../db') 3 | const router = express.Router() 4 | const db = createClient() 5 | 6 | router.get('/:id', async (req, res) => { 7 | const id = req.params.id 8 | try { 9 | const result = await db('campaigns').where('id', id) 10 | res.send(result) 11 | } catch (error) { 12 | console.log(error) 13 | res.status(500).send({ error: 'Whoops' }) 14 | } 15 | }) 16 | 17 | router.get('/', async (req, res) => { 18 | try { 19 | const result = await db.select('*').from('campaigns') 20 | console.log(result) 21 | res.send(result) 22 | } catch (error) { 23 | console.log(error) 24 | res.status(500).send({ error: 'Whoops' }) 25 | } 26 | }) 27 | 28 | module.exports = router 29 | -------------------------------------------------------------------------------- /server/db/migrations/20210729053119_create-transactions-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'transactions' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | await knex.schema.createTable(tableName, (table) => { 6 | table.increments() 7 | table.string('stripe_transaction_id').notNullable() 8 | table.float('amount').notNullable() 9 | table.string('currency').notNullable() 10 | table.string('email').notNullable() 11 | table.string('status') 12 | table.string('payment_method').notNullable() 13 | table.string('payment_method_type').notNullable() 14 | table.integer('stripe_payment_created') 15 | table.string('stripe_client_secret') 16 | table.timestamps() 17 | }) 18 | }, 19 | 20 | async down(knex) { 21 | await knex.schema.dropTable(tableName) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint code 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | 7 | permissions: 8 | contents: read 9 | 10 | # This allows a subsequently queued workflow run to interrupt previous runs 11 | concurrency: 12 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | lint: 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 2 19 | steps: 20 | - name: Check out repo 21 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 22 | 23 | - name: Setup node 24 | uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561 25 | with: 26 | node-version: 16.x 27 | cache: npm 28 | 29 | - name: Install dependencies 30 | run: npm ci 31 | 32 | - name: Run linter 33 | run: npm run lint:check 34 | -------------------------------------------------------------------------------- /server/__tests__/integration/representatives.test.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | const app = require('../../app') 3 | const request = require('supertest') 4 | 5 | afterEach(() => { 6 | jest.clearAllMocks() 7 | }) 8 | 9 | afterAll(async () => { 10 | await new Promise((resolve) => setTimeout(() => resolve(), 500)) // avoid jest open handle error 11 | }) 12 | 13 | describe('/api/representatives/:zipcode', () => { 14 | test('returns 200 status', async () => { 15 | const response = await request(app).get('/api/representatives/92107') 16 | expect(response.status).toBe(200) 17 | }) 18 | test('returns 200 status', async () => { 19 | const response = await request(app).get('/api/representatives/92107-1234') 20 | expect(response.status).toBe(200) 21 | }) 22 | test('returns 400 status', async () => { 23 | const response = await request(app).get('/api/representatives/cat') 24 | expect(response.status).toBe(400) 25 | }) 26 | }) 27 | -------------------------------------------------------------------------------- /.github/workflows/check-formatting.yml: -------------------------------------------------------------------------------- 1 | name: Check formatting 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | 7 | permissions: 8 | contents: read 9 | 10 | # This allows a subsequently queued workflow run to interrupt previous runs 11 | concurrency: 12 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | check: 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 2 19 | steps: 20 | - name: Check out repo 21 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 22 | 23 | - name: Setup node 24 | uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561 25 | with: 26 | node-version: 16.x 27 | cache: npm 28 | 29 | - name: Install dependencies 30 | run: npm ci 31 | 32 | - name: Check code style 33 | run: npm run format:check 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/blue design enhancement.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: UI/UX Design Task 3 | about: Template to create accessibility issues that need to be designed for 4 | title: 'UI/UX task' 5 | labels: accessibility 6 | assignees: '@evelynluu2' 7 | 8 | --- 9 | 10 | **What screen is this?** 11 | 12 | **Which component?** 13 | 14 | **What is the change propoosed? (add a figma screenshot, follow the workflow here)** 15 | 16 | **Which constituent accessibility concern does this address?** 17 | - [ ] UI/UX acessibiility 18 | - [ ] better mobile design 19 | 20 | **What are frontend tasks?** (if theres any tasks needed outside of frontend, pick a different color) 21 | 22 | _List files that need to be changed next to task_ 23 | 24 | **CC:** @frontend-team member, @frontend-coordinator 25 | 26 | 27 | -------------------------- 28 | For Coordinator 29 | - [ ] add appropriate labels: "good-first-issue", "frontend", "screen label" 30 | - [ ] assign time label 31 | 32 | 33 | -------------------------------------------------------------------------------- /server/auth/config/env.dev.js: -------------------------------------------------------------------------------- 1 | const dotenv = require('dotenv') 2 | 3 | dotenv.config() 4 | 5 | const audience = process.env.AUTH0_AUDIENCE 6 | const domain = process.env.AUTH0_DOMAIN 7 | const serverPort = process.env.SERVER_PORT 8 | const clientOriginUrl = process.env.CLIENT_ORIGIN_URL 9 | 10 | if (!audience) { 11 | throw new Error( 12 | '.env is missing the definition of an AUTH0_AUDIENCE environmental variable' 13 | ) 14 | } 15 | 16 | if (!domain) { 17 | throw new Error( 18 | '.env is missing the definition of an AUTH0_DOMAIN environmental variable' 19 | ) 20 | } 21 | 22 | if (!serverPort) { 23 | throw new Error( 24 | '.env is missing the definition of a API_PORT environmental variable' 25 | ) 26 | } 27 | 28 | if (!clientOriginUrl) { 29 | throw new Error( 30 | '.env is missing the definition of a APP_ORIGIN environmental variable' 31 | ) 32 | } 33 | 34 | const clientOrigins = ['http://localhost:4040'] 35 | 36 | module.exports = { 37 | audience, 38 | domain, 39 | serverPort, 40 | clientOriginUrl, 41 | clientOrigins 42 | } 43 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | ########################### 2 | # Example `.env` file # 3 | ########################### 4 | 5 | # API key for the Google Civic API 6 | # See https://github.com/Ally-Guide/amplify-back-end/blob/main/.github/CONTRIBUTING.md#using-the-google-civic-information-api-locally 7 | CIVIC_API_KEY= 8 | 9 | # Production environment API key for the Lob API 10 | # This value can be found in Heroku application config var of the same name 11 | LOB_API_KEY= 12 | 13 | # Test environment API key for the Lob API 14 | # This is only required for running the integration tests successfully! 15 | # This value can be found in Heroku application config var of the same name 16 | TEST_LOB_API_KEY= 17 | 18 | # Auth0 authentication parameters 19 | # For local development, you can just use these literal nonsense values for now 20 | SERVER_PORT=6060 21 | CLIENT_ORIGIN_URL=http://localhost:4040 22 | AUTH0_AUDIENCE=your_Auth0_identifier_value 23 | AUTH0_DOMAIN=your_Auth0_domain 24 | 25 | # Stripe 26 | STRIPE_SECRET_KEY=sk_test_51IravfFqipIA40A3FOW6EzlXlJiXjL9V0FXKfb9n7cxh25Ww9QMA9aWwCzTSQscBOQFcB7s1TI6UCtW1JG83Hz1z000Sg2vSIr 27 | -------------------------------------------------------------------------------- /.github/workflows/workflow-lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint workflows 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | paths: 7 | - '.github/workflows/*.yml' 8 | - '.github/workflows/*.yaml' 9 | 10 | permissions: 11 | contents: read 12 | 13 | # This allows a subsequently queued workflow run to interrupt previous runs 14 | concurrency: 15 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | lint-workflows: 20 | runs-on: ubuntu-latest 21 | timeout-minutes: 2 22 | steps: 23 | - name: Check out repo 24 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 25 | 26 | - name: Run linter 27 | uses: cschleiden/actions-linter@caffd707beda4fc6083926a3dff48444bc7c24aa 28 | with: 29 | # ".github/workflows/scorecards-analysis.yml" is an exception as `on: branch_protection_rule` is not recognized yet 30 | workflows: '[".github/workflows/*.yml", ".github/workflows/*.yaml", "!.github/workflows/scorecards-analysis.yml"]' 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Ally Guide 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /server/db/seeds/development/seed-campaigns-table.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | async seed(knex) { 3 | // Deletes ALL existing entries 4 | await knex('letter_versions').del() // Because these have foreign keys linked to the campaigns table 5 | await knex('campaigns').del() 6 | 7 | // Inserts seed entries 8 | await knex('campaigns').insert([ 9 | { 10 | id: 1, 11 | organization: 'M4BL', 12 | name: 'The Breath Act', 13 | cause: 'Civic Rights', 14 | type: 'Grant', 15 | page_url: 'www.thebreatheact.org', 16 | letters_counter: 0 17 | }, 18 | { 19 | id: 2, 20 | organization: 'AAAJ', 21 | name: 'AAAJ', 22 | cause: 'Education', 23 | type: 'Accelerator', 24 | page_url: 'www.aaaj.org ', 25 | letters_counter: 0 26 | }, 27 | { 28 | id: 3, 29 | organization: 'TheSoapBox Project', 30 | name: 'ClimateCare', 31 | cause: 'Climate Justice', 32 | type: 'Starter', 33 | page_url: 'www.thesoapboxproject.com', 34 | letters_counter: 0 35 | } 36 | ]) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: 'CodeQL' 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | # The PR base branches below must be a subset of the push branches above 9 | branches: 10 | - main 11 | # Only execute on PRs if relevant files changed 12 | paths: 13 | - '**/*.js' 14 | - '.github/workflows/codeql-analysis.yml' 15 | schedule: 16 | - cron: '27 1 * * 0' 17 | 18 | permissions: 19 | actions: read 20 | contents: read 21 | security-events: write 22 | 23 | # This allows a subsequently queued workflow run to interrupt previous runs 24 | concurrency: 25 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 26 | cancel-in-progress: true 27 | 28 | jobs: 29 | analyze: 30 | runs-on: ubuntu-latest 31 | 32 | steps: 33 | - name: Checkout repository 34 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 35 | 36 | # Initializes the CodeQL tools for scanning. 37 | - name: Initialize CodeQL 38 | uses: github/codeql-action/init@2b46439dd5477d8a1659811cdc500d35e601a1cb 39 | with: 40 | languages: javascript 41 | 42 | - name: Perform CodeQL Analysis 43 | uses: github/codeql-action/analyze@2b46439dd5477d8a1659811cdc500d35e601a1cb 44 | -------------------------------------------------------------------------------- /script/drop-db.js: -------------------------------------------------------------------------------- 1 | const { createClient, getConfig, getEnv } = require('../server/db') 2 | 3 | // See https://www.postgresql.org/docs/13/errcodes-appendix.html 4 | const INVALID_CATALOG_NAME_ERROR = '3D000' 5 | 6 | destroyDatabase() 7 | 8 | async function destroyDatabase() { 9 | const targetEnv = getEnv() 10 | if (targetEnv === 'production') { 11 | throw new Error('This script should not be used in production!') 12 | } 13 | 14 | const config = getConfig(targetEnv) 15 | await dropDatabase(config) 16 | } 17 | 18 | async function dropDatabase(config) { 19 | const { database } = config.connection 20 | let db 21 | 22 | try { 23 | // Connect with system database selected 24 | db = createClient({ 25 | ...config, 26 | connection: { 27 | ...config.connection, 28 | database: 'postgres' 29 | } 30 | }) 31 | 32 | // Drop the database if it exists 33 | await db.raw(`DROP DATABASE ${database}`) 34 | console.log(`Dropped database "${database}"!`) 35 | } catch (error) { 36 | if (error.code === INVALID_CATALOG_NAME_ERROR) { 37 | console.warn(`Error dropping database "${database}": it does not exist!`) 38 | } else { 39 | console.error(`Error dropping database "${database}": ${error.message}`) 40 | throw error 41 | } 42 | } finally { 43 | // Disconnect 44 | await db.destroy() 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /server/db/migrations/20210514201314_create-sent-letters-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'sent_letters' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Create the table 6 | await knex.schema.createTable(tableName, (table) => { 7 | // Auto-incrementing non-nullable unsigned integer primary key "id" field 8 | table.increments() 9 | 10 | // Foreign key references 11 | table.integer('letter_version_id').unsigned().notNullable() 12 | table.foreign('letter_version_id').references('letter_versions.id') 13 | 14 | table.integer('volunteer_id').unsigned().notNullable() 15 | table.foreign('volunteer_id').references('volunteers.id') 16 | 17 | // The Lob API response ID, for tracking and management purposes 18 | table.string('request_id').notNullable() 19 | 20 | // Timestamp field 21 | table.timestamp('requested_at').notNullable().defaultTo(knex.fn.now()) 22 | 23 | // Simple fields 24 | table.string('rep_name').notNullable() 25 | table.text('rep_address').notNullable() 26 | 27 | // Indexes 28 | table.index(['letter_version_id']) 29 | table.index(['volunteer_id']) 30 | table.index(['rep_name']) 31 | 32 | // Unique indexes 33 | table.unique(['request_id']) 34 | }) 35 | }, 36 | 37 | async down(knex) { 38 | // Drop the table 39 | await knex.schema.dropTable(tableName) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /server/db/migrations/20211230165819_harden-volunteers-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'volunteers' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Alter the table 6 | await knex.schema.alterTable(tableName, function (table) { 7 | // Add NOT NULL constraints to simple fields 8 | table.string('email').notNullable().alter() 9 | table.string('first_name').notNullable().alter() 10 | table.string('last_name').notNullable().alter() 11 | table.string('street_address').notNullable().alter() 12 | table.string('city').notNullable().alter() 13 | table.string('state').notNullable().alter() 14 | table.string('zip').notNullable().alter() 15 | 16 | // Add unique indexes 17 | table.unique(['email']) 18 | }) 19 | }, 20 | 21 | async down(knex) { 22 | // Alter the table 23 | await knex.schema.alterTable(tableName, function (table) { 24 | // Drop unique indexes 25 | table.dropUnique(['email']) 26 | 27 | // Drop NOT NULL constraints from simple fields 28 | table.string('email').nullable().alter() 29 | table.string('first_name').nullable().alter() 30 | table.string('last_name').nullable().alter() 31 | table.string('street_address').nullable().alter() 32 | table.string('city').nullable().alter() 33 | table.string('state').nullable().alter() 34 | table.string('zip').nullable().alter() 35 | }) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /server/db/migrations/20211230165827_harden-letter_versions-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'letter_versions' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | await knex.schema.alterTable(tableName, function (table) { 6 | // Rename columns 7 | table.renameColumn('campaignid', 'campaign_id') 8 | }) 9 | 10 | await knex.schema.alterTable(tableName, function (table) { 11 | // Add NOT NULL constraints to simple fields 12 | table.integer('campaign_id').unsigned().notNullable().alter() 13 | table.string('template_id').notNullable().alter() 14 | 15 | // Add columns 16 | table.string('municipality') 17 | 18 | // Add indexes 19 | table.index(['campaign_id']) 20 | table.index(['template_id']) 21 | }) 22 | }, 23 | 24 | async down(knex) { 25 | // Alter the table 26 | await knex.schema.alterTable(tableName, function (table) { 27 | // Drop NOT NULL constraints from simple fields 28 | table.integer('campaign_id').unsigned().nullable().alter() 29 | table.string('template_id').nullable().alter() 30 | 31 | // Drop columns 32 | table.dropColumn('municipality') 33 | 34 | // Drop indexes 35 | table.dropIndex(['campaign_id']) 36 | table.dropIndex(['template_id']) 37 | }) 38 | 39 | await knex.schema.alterTable(tableName, function (table) { 40 | // Rename columns 41 | table.renameColumn('campaign_id', 'campaignid') 42 | }) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /server/db/migrations/20210513215151_create-campaigns-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'campaigns' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Create the table 6 | await knex.schema.createTable(tableName, (table) => { 7 | // Auto-incrementing non-nullable unsigned integer primary key "id" field 8 | table.increments() 9 | 10 | // Simple fields 11 | table.string('name').notNullable() 12 | table.string('organization').notNullable() 13 | table.text('page_url').notNullable() 14 | table.integer('letters_sent').notNullable() 15 | 16 | // Fields using native enum types 17 | table 18 | .enum('cause', ['Civic Rights', 'Education', 'Climate Justice'], { 19 | useNative: true, 20 | enumName: 'cause_type' 21 | }) 22 | .notNullable() 23 | 24 | table 25 | .enum('type', ['Starter', 'Accelerator', 'Grant'], { 26 | useNative: true, 27 | enumName: 'campaign_type' 28 | }) 29 | .notNullable() 30 | 31 | // Indexes 32 | table.index(['name']) 33 | table.index(['organization']) 34 | 35 | // Unique indexes 36 | table.unique(['name', 'organization']) 37 | }) 38 | }, 39 | 40 | async down(knex) { 41 | // Drop the table 42 | await knex.schema.dropTable(tableName) 43 | 44 | // Manually remove the native enum types 45 | await knex.raw(`DROP TYPE IF EXISTS cause_type;`) 46 | await knex.raw(`DROP TYPE IF EXISTS campaign_type;`) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /.github/workflows/integration-tests.yaml: -------------------------------------------------------------------------------- 1 | name: Integration Tests 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | 7 | permissions: 8 | contents: read 9 | 10 | # This allows a subsequently queued workflow run to interrupt previous runs 11 | concurrency: 12 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | test: 17 | if: ${{ github.repository == 'ProgramEquity/amplify-back-end' }} 18 | runs-on: ubuntu-latest 19 | timeout-minutes: 5 20 | steps: 21 | - name: Check out repo 22 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 23 | 24 | - name: Setup node 25 | uses: actions/setup-node@1f8c6b94b26d0feae1e387ca63ccbdc44d27b561 26 | with: 27 | node-version: 16.x 28 | cache: npm 29 | 30 | - name: Install dependencies 31 | run: npm ci 32 | 33 | - name: Run tests 34 | run: npm test -- server/__tests__/integration/ 35 | env: 36 | # Google Civic API key 37 | CIVIC_API_KEY: ${{ secrets.TEST_CIVIC_API_KEY }} 38 | # Test environment Lob API key 39 | LOB_API_KEY: ${{ secrets.TEST_LOB_API_KEY }} 40 | # Auth0 authentication parameters with nonsensical sample values 41 | SERVER_PORT: 6060 42 | CLIENT_ORIGIN_URL: http://localhost:4040 43 | AUTH0_AUDIENCE: your_Auth0_identifier_value 44 | AUTH0_DOMAIN: your_Auth0_domain 45 | -------------------------------------------------------------------------------- /server/db/migrations/20211224223204_match-production-letter_versions-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'letter_versions' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Alter the table 6 | await knex.schema.alterTable(tableName, function (table) { 7 | // Drop NOT NULL constraints from simple fields 8 | table.integer('campaign_id').unsigned().nullable().alter() 9 | table.string('template_id').nullable().alter() 10 | 11 | // Drop columns 12 | table.dropColumn('municipality') 13 | 14 | // Drop indexes 15 | table.dropIndex(['campaign_id']) 16 | 17 | // Drop unique indexes 18 | table.dropUnique(['template_id']) 19 | }) 20 | 21 | await knex.schema.alterTable(tableName, function (table) { 22 | // Rename columns 23 | table.renameColumn('campaign_id', 'campaignid') 24 | }) 25 | }, 26 | 27 | async down(knex) { 28 | await knex.schema.alterTable(tableName, function (table) { 29 | // Rename columns 30 | table.renameColumn('campaignid', 'campaign_id') 31 | }) 32 | 33 | await knex.schema.alterTable(tableName, function (table) { 34 | // Add NOT NULL constraints to simple fields 35 | table.integer('campaign_id').unsigned().notNullable().alter() 36 | table.string('template_id').notNullable().alter() 37 | 38 | // Add columns 39 | table.string('municipality') 40 | 41 | // Add indexes 42 | table.index(['campaign_id']) 43 | 44 | // Add unique indexes 45 | table.unique(['template_id']) 46 | }) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /server/app.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | const express = require('express') 3 | const cors = require('cors') 4 | const app = express() 5 | const rateLimit = require('express-rate-limit') 6 | 7 | // Middleware 8 | app.use(express.json()) 9 | app.use(cors()) 10 | 11 | // Rate Limiting 12 | // Enable if you're behind a reverse proxy (Heroku, Bluemix, AWS ELB or API Gateway, Nginx, etc) 13 | // see https://expressjs.com/en/guide/behind-proxies.html 14 | app.set('trust proxy', true) 15 | 16 | // to unblock for now 17 | const apiLimiter = rateLimit({ 18 | windowMs: 1 * 60 * 1000, // 1 minutes 19 | max: 100000 20 | }) 21 | 22 | // only apply to requests that begin with /api/ 23 | app.use('/api', apiLimiter) 24 | 25 | const representatives = require('./routes/api/representatives') 26 | const campaigns = require('./routes/api/campaigns') 27 | const authentication = require('./routes/api/authentication') 28 | const letter_versions = require('./routes/api/letter_versions') 29 | const lob = require('./routes/api/lob') 30 | const checkout = require('./routes/api/checkout') 31 | // const give = require('./routes/api/give'); 32 | 33 | // const email = require('./routes/api/email') 34 | 35 | app.use('/api/representatives', representatives) 36 | app.use('/api/campaigns', campaigns) 37 | app.use('/api/authentication', authentication) 38 | app.use('/api/letter_versions', letter_versions) 39 | app.use('/api/lob', lob) 40 | app.use('/api/checkout', checkout) 41 | // app.use('/api/give', give); 42 | // app.use('/api/library', library); 43 | // app.use('/api/email', email); 44 | 45 | module.exports = app 46 | -------------------------------------------------------------------------------- /.devcontainer/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | app: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | args: 9 | # Update 'VARIANT' to pick a default version of Node.js: 16, 14, 12. 10 | # Append -bullseye to pin to local arm64/Apple Silicon. 11 | # Append -buster to pin to Debian. 12 | VARIANT: '16-buster' 13 | 14 | # Specify environment variables to set 15 | environment: 16 | - POSTGRESS_DB=postgres 17 | - POSTGRES_USER=postgres 18 | - POSTGRES_PASSWORD=postgres 19 | - POSTGRES_PORT=5433 20 | 21 | volumes: 22 | - ..:/workspace:cached 23 | 24 | # Overrides default command so things don't shut down after the process ends. 25 | command: sleep infinity 26 | 27 | # Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function. 28 | network_mode: service:db 29 | 30 | # Uncomment the next line to use a non-root user for all processes. 31 | # user: node 32 | 33 | db: 34 | image: postgres:latest 35 | restart: unless-stopped 36 | volumes: 37 | - postgres-data:/var/lib/postgresql/data 38 | environment: 39 | POSTGRES_DB: postgres 40 | POSTGRES_USER: postgres 41 | POSTGRES_PASSWORD: postgres 42 | POSTGRES_PORT: 5433 43 | expose: 44 | - '5433' # Publishes port to other containers but NOT to host machine 45 | ports: 46 | - '5433:5433' # Expose PostgreSQL to the host machine on port 5433 47 | command: '-p 5433' # Run the container's PostgreSQL server on non-standard port 5433 as well 48 | 49 | volumes: 50 | postgres-data: null 51 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug 3 | about: Template to create bug/buildsafe issues that need to be designed for 4 | title: 'QA Bug' 5 | labels: QA 6 | assignees: 'teakopp' 7 | 8 | --- 9 | 10 | 11 | Remember, an issue is not the place to ask questions. You can use [Stack Overflow](http://stackoverflow.com/questions/tagged/angular-meteor) for that, or you may want to start a discussion on the [Meteor forum](https://forums.meteor.com/). 12 | 13 | Before you open an issue, please check if a similar issue already exists or has been closed before. 14 | 15 | 16 | 17 | ## Expected Behavior 18 | 19 | 20 | ## Current Behavior 21 | 22 | 23 | ## Possible Solution 24 | 25 | 26 | ## Steps to Reproduce 27 | 28 | 29 | 1. 30 | 2. 31 | 3. 32 | 4. 33 | 34 | ## Context (Environment) 35 | 36 | 37 | 38 | 39 | 40 | ## Detailed Description 41 | 42 | 43 | ## Possible Implementation 44 | 45 | 46 | ## Labels 47 | Please add the following labels dependent on the bug type 48 | - Security Bug: Creates an issue that can cause harm 49 | - Vulnerability Bug: Creates an issue to update a supply chain dependency error 50 | - Functional Bug: something that is causing a crash of the app 51 | -------------------------------------------------------------------------------- /.github/workflows/scorecards-analysis.yml: -------------------------------------------------------------------------------- 1 | name: Scorecards supply-chain security 2 | 3 | on: 4 | # Only the default branch is supported. 5 | branch_protection_rule: 6 | schedule: 7 | - cron: '43 10 * * 6' 8 | push: 9 | branches: 10 | - main 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | analysis: 17 | runs-on: ubuntu-latest 18 | 19 | permissions: 20 | actions: read 21 | contents: read 22 | # Needed to upload the results to code-scanning dashboard. 23 | security-events: write 24 | 25 | steps: 26 | - name: Check out repo 27 | uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 28 | with: 29 | persist-credentials: false 30 | 31 | - name: Run analysis 32 | uses: ossf/scorecard-action@c8416b0b2bf627c349ca92fc8e3de51a64b005cf 33 | with: 34 | results_file: results.sarif 35 | results_format: sarif 36 | # Read-only PAT token. To create it, 37 | # follow the steps in https://github.com/ossf/scorecard-action#pat-token-creation. 38 | repo_token: ${{ secrets.SCORECARD_READ_TOKEN }} 39 | # Publish the results to enable scorecard badges. For more details, see 40 | # https://github.com/ossf/scorecard-action#publishing-results. 41 | # For private repositories, `publish_results` will automatically be set to `false`, 42 | # regardless of the value entered here. 43 | publish_results: true 44 | 45 | # Upload the results as artifacts (optional). 46 | - name: Upload artifact 47 | uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2 48 | with: 49 | name: SARIF file 50 | path: results.sarif 51 | retention-days: 5 52 | 53 | # Upload the results to GitHub's code scanning dashboard. 54 | - name: Upload to code-scanning 55 | uses: github/codeql-action/upload-sarif@2b46439dd5477d8a1659811cdc500d35e601a1cb 56 | with: 57 | sarif_file: results.sarif 58 | -------------------------------------------------------------------------------- /server/routes/api/checkout.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-unused-vars */ 2 | 3 | const express = require('express') 4 | const { createClient } = require('../../db') 5 | const router = express.Router() 6 | const db = createClient() 7 | const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY) 8 | 9 | router.post('/create-transaction', async (req, res) => { 10 | const { transaction, email, campaignId, donationId } = req.body || {} 11 | if (!transaction || !email) { 12 | return null 13 | } 14 | 15 | const formattedTransaction = { 16 | stripe_transaction_id: transaction.id, 17 | amount: transaction.amount, 18 | stripe_client_secret: transaction.client_secret, 19 | currency: transaction.currency, 20 | payment_method: transaction.payment_method, 21 | payment_method_type: transaction.payment_method_types[0], 22 | email // to-do: get user email from the server auth, if possible 23 | } 24 | 25 | try { 26 | await db('transactions').insert(formattedTransaction) 27 | res.send({ 28 | status: 'ok' 29 | }) 30 | } catch (error) { 31 | console.log({ error }) 32 | } 33 | }) 34 | 35 | // 1. send a request to `/create-payment-intent` 36 | // with a `donationAmount` as string or integer 37 | // If user doesn't select any particular `donationAmount`, send `1` in the donationAmount 38 | // 2. This API will return the client secret. Use it to complete the transaction in the UI 39 | 40 | router.post('/create-payment-intent', async (req, res) => { 41 | try { 42 | const acceptableCharges = [1, 2, 20, 50] 43 | const { donationAmount } = req.body || {} 44 | const parsedDonationAmount = parseInt(donationAmount, 10) 45 | 46 | if (!acceptableCharges.includes(parsedDonationAmount)) { 47 | return res.status(400).send({ error: 'Invalid Amount' }) 48 | } 49 | 50 | const paymentIntent = await stripe.paymentIntents.create({ 51 | amount: parsedDonationAmount * 100, // in cents 52 | currency: 'usd' 53 | }) 54 | res.send({ 55 | clientSecret: paymentIntent.client_secret 56 | }) 57 | } catch (error) { 58 | console.log({ error }) 59 | } 60 | }) 61 | 62 | module.exports = router 63 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.205.2/containers/javascript-node-postgres 3 | // Update the VARIANT arg in docker-compose.yml to pick a Node.js version 4 | { 5 | "name": "ProgramEquity/amplify-back-end", 6 | "dockerComposeFile": "docker-compose.yml", 7 | "service": "app", 8 | "workspaceFolder": "/workspace", 9 | 10 | // Set minimum specificiations for Codespaces machine types 11 | "hostRequirements": { 12 | "cpus": 2, 13 | "memory": "2gb", 14 | "storage": "4gb" 15 | }, 16 | 17 | // Set *default* container specific settings.json values on container create. 18 | "settings": { 19 | "sqltools.connections": [ 20 | { 21 | "name": "Container database", 22 | "driver": "PostgreSQL", 23 | "previewLimit": 50, 24 | "server": "localhost", 25 | "port": 5433, // Default PostgreSQL port is 5432 26 | "database": "postgres", 27 | "username": "postgres", 28 | "password": "postgres" 29 | } 30 | ], 31 | "files.watcherExclude": { 32 | "**/.git": true, 33 | "**/node_modules": true 34 | } 35 | }, 36 | 37 | // Add the IDs of extensions you want installed when the container is created. 38 | "extensions": [ 39 | "dbaeumer.vscode-eslint", 40 | "mtxr.sqltools", 41 | "mtxr.sqltools-driver-pg" 42 | ], 43 | 44 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 45 | "forwardPorts": [5000, 5433], 46 | "portsAttributes": { 47 | "5000": { 48 | "label": "app" 49 | }, 50 | "5433": { 51 | "label": "db" 52 | } 53 | }, 54 | 55 | "containerEnv": { 56 | "POSTGRES_PORT": 5433 57 | }, 58 | 59 | // Use 'onCreateCommand' to run commands when the container is being created. 60 | "onCreateCommand": ".devcontainer/on-create-command.sh", 61 | 62 | // Use 'postCreateCommand' to run commands after the container is created. 63 | "postCreateCommand": ".devcontainer/post-create-command.sh", 64 | 65 | // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root 66 | "remoteUser": "node" 67 | } 68 | -------------------------------------------------------------------------------- /server/db/seeds/development/seed-letter_versions-table.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | async seed(knex) { 3 | // Deletes ALL existing entries 4 | await knex('letter_versions').del() 5 | 6 | // Inserts seed entries 7 | await knex('letter_versions').insert([ 8 | { 9 | id: 1, 10 | template_id: 'tmpl_1057bb6f50f81fb ', 11 | campaign_id: 1, 12 | office_division: 'Federal', 13 | state: null, 14 | county: null 15 | }, 16 | { 17 | id: 2, 18 | template_id: 'tmpl_1057bb6j23k81fg', 19 | campaign_id: 1, 20 | office_division: 'State', 21 | state: 'California', 22 | county: null 23 | }, 24 | { 25 | id: 3, 26 | template_id: 'tmpl_1057bb322qwj2f', 27 | campaign_id: 1, 28 | office_division: 'County', 29 | state: 'California', 30 | county: null 31 | }, 32 | { 33 | id: 4, 34 | template_id: 'tmpl_1057bb32jhiw81fg', 35 | campaign_id: 2, 36 | office_division: 'County', 37 | state: 'California', 38 | county: 'Sacramento' 39 | }, 40 | { 41 | id: 5, 42 | template_id: 'tmpl_1057bb21jk1lssmm', 43 | campaign_id: 2, 44 | office_division: 'State', 45 | state: 'California', 46 | county: 'LA' 47 | }, 48 | { 49 | id: 6, 50 | template_id: 'tmpl_1057bb3320ed30', 51 | campaign_id: 2, 52 | office_division: 'Federal', 53 | state: null, 54 | county: null 55 | }, 56 | { 57 | id: 7, 58 | template_id: 'tmpl_1057bb342jmdlslal', 59 | campaign_id: 3, 60 | office_division: 'Federal', 61 | state: null, 62 | county: null 63 | }, 64 | { 65 | id: 8, 66 | template_id: 'tmpl_1057bb12reew81fg', 67 | campaign_id: 3, 68 | office_division: 'State', 69 | state: 'California', 70 | county: null 71 | }, 72 | { 73 | id: 9, 74 | template_id: 'tmpl_1057bb3e23kwkkq', 75 | campaign_id: 3, 76 | office_division: 'County', 77 | state: 'California', 78 | county: 'San Francisco' 79 | } 80 | ]) 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /server/db/migrations/20210514145630_create-letter-versions-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'letter_versions' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Create the table 6 | await knex.schema.createTable(tableName, (table) => { 7 | // Auto-incrementing non-nullable unsigned integer primary key "id" field 8 | table.increments() 9 | 10 | // Simple fields 11 | table.string('template_id').notNullable() 12 | 13 | // Foreign key references 14 | table.integer('campaign_id').unsigned().notNullable() 15 | table.foreign('campaign_id').references('campaigns.id') 16 | 17 | // Fields using native enum types 18 | // table 19 | // .enum( 20 | // 'office_division', 21 | // ['Federal', 'State', 'County', 'Municipality'], 22 | // { useNative: true, enumName: 'political_division' } 23 | // ) 24 | // .notNullable() 25 | // .defaultTo('Federal') 26 | 27 | // More simple fields 28 | table.string('office_division') 29 | table.string('state') 30 | table.string('county') 31 | table.string('municipality') 32 | 33 | // Indexes 34 | table.index(['campaign_id']) 35 | 36 | // Unique indexes 37 | table.unique(['template_id']) 38 | }) 39 | 40 | // 41 | // Special CHECK constraints to verify row integrity 42 | // 43 | 44 | // If "office_division" is set to 'Federal', then: 45 | // - the value for "state" must be NULL 46 | // - the value for "county" must be NULL 47 | // - the value for "municipality" must be NULL 48 | // await knex.raw( 49 | // ` 50 | // ALTER TABLE "${tableName}" 51 | // ADD CONSTRAINT "office_divisions_are_valid" 52 | // CHECK ( 53 | // ( 54 | // office_division = 'Federal' AND 55 | // state IS NULL AND 56 | // county IS NULL AND 57 | // municipality IS NULL 58 | // ) 59 | // ); 60 | // ` 61 | // ) 62 | }, 63 | 64 | async down(knex) { 65 | // Drop the table (and its special CHECK constraints) 66 | await knex.schema.dropTable(tableName) 67 | 68 | // Manually remove the native enum types 69 | // await knex.raw(`DROP TYPE IF EXISTS political_division;`) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "private": true, 3 | "name": "amplify-back-end", 4 | "version": "1.0.0", 5 | "description": "The API backend for ProgramEquity", 6 | "author": "Ally Guide", 7 | "license": "MIT", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/Ally-Guide/amplify-back-end.git" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/Ally-Guide/amplify-back-end/issues" 14 | }, 15 | "homepage": "https://www.programequity.com/", 16 | "main": "server/server.js", 17 | "scripts": { 18 | "postinstall": "husky install", 19 | "start": "node server/server.js", 20 | "dev": "nodemon server/server.js", 21 | "lint": "eslint --fix .", 22 | "lint:check": "eslint .", 23 | "format": "prettier --write . '!**/*.md'", 24 | "format:check": "prettier --check . '!**/*.md'", 25 | "test": "jest --detectOpenHandles --forceExit", 26 | "precommit": "npm run lint:check && npm run format:check", 27 | "db:create": "npm run db:create:dev && npm run db:create:test", 28 | "db:create:dev": "node script/create-db.js --env development", 29 | "db:create:test": "node script/drop-db.js --env test && node script/create-db.js --env test", 30 | "db:migrate": "npm run db:migrate:dev && npm run db:migrate:test", 31 | "db:migrate:dev": "knex migrate:latest --verbose --env development", 32 | "db:migrate:test": "knex migrate:latest --verbose --env test", 33 | "db:migrate:prod": "knex migrate:latest --verbose --env production", 34 | "db:seed": "knex seed:run", 35 | "heroku-postbuild": "npm run db:migrate:prod" 36 | }, 37 | "engines": { 38 | "node": ">= 14.0.0" 39 | }, 40 | "dependencies": { 41 | "axios": "^0.25.0", 42 | "cors": "^2.8.5", 43 | "date-fns": "^2.28.0", 44 | "dotenv": "^16.0.0", 45 | "express": "^4.17.2", 46 | "express-jwt": "^6.0.0", 47 | "express-rate-limit": "^6.2.1", 48 | "jwks-rsa": "^2.0.3", 49 | "knex": "^1.0.3", 50 | "lob": "^6.5.5", 51 | "pg": "^8.6.0", 52 | "stripe": "^8.202.0" 53 | }, 54 | "devDependencies": { 55 | "eslint": "^8.8.0", 56 | "eslint-config-prettier": "^8.3.0", 57 | "husky": "^7.0.0", 58 | "jest": "^27.4.7", 59 | "nodemon": "^2.0.7", 60 | "prettier": "2.5.1", 61 | "supertest": "^6.2.2" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | const { getEnv } = require('./server/db/util') 2 | 3 | const targetEnv = getEnv() 4 | const isProduction = targetEnv === 'production' 5 | const { POSTGRES_USER, POSTGRES_PASSWORD } = process.env 6 | const POSTGRES_PORT = parseInt(process.env.POSTGRES_PORT, 10) || undefined 7 | 8 | // Required for Heroku PostgreSQL 9 | // See: https://stackoverflow.com/questions/66497248/heroku-postgres-not-able-to-connect-error-no-pg-hba-conf-entry-for-host 10 | if (isProduction) { 11 | // Set this new environment variable for the process 12 | process.env.PGSSLMODE = 'no-verify' 13 | } 14 | 15 | const baseConfig = { 16 | client: 'postgresql', 17 | 18 | pool: { 19 | min: 2, 20 | max: 10 21 | }, 22 | 23 | migrations: { 24 | tableName: 'knex_migrations', 25 | directory: './server/db/migrations', 26 | stub: './server/db/_migration.stub.js' 27 | }, 28 | 29 | seeds: { 30 | // This value intentionally results in a failure if not overridden (or handled) 31 | directory: `./server/db/seeds/non-existent-directory`, 32 | stub: './server/db/_seed.stub.js' 33 | }, 34 | 35 | // Turn these off in production for performance reasons 36 | asyncStackTraces: !isProduction, 37 | 38 | // Required for Heroku PostgreSQL 39 | ...(isProduction && { 40 | ssl: { 41 | rejectUnauthorized: false 42 | } 43 | }) 44 | } 45 | 46 | // Export the configuration matrix 47 | module.exports = { 48 | development: { 49 | ...baseConfig, 50 | connection: { 51 | database: 'pe_dev', 52 | ...(POSTGRES_USER && { user: POSTGRES_USER }), 53 | ...(POSTGRES_PASSWORD && { password: POSTGRES_PASSWORD }), 54 | ...(POSTGRES_PORT && { port: POSTGRES_PORT }) 55 | }, 56 | seeds: { 57 | ...baseConfig.seeds, 58 | directory: './server/db/seeds/development' 59 | } 60 | }, 61 | 62 | test: { 63 | ...baseConfig, 64 | connection: { 65 | database: 'pe_test', 66 | ...(POSTGRES_USER && { user: POSTGRES_USER }), 67 | ...(POSTGRES_PASSWORD && { password: POSTGRES_PASSWORD }), 68 | ...(POSTGRES_PORT && { port: POSTGRES_PORT }) 69 | }, 70 | seeds: { 71 | ...baseConfig.seeds, 72 | directory: './server/db/seeds/test' 73 | } 74 | }, 75 | 76 | production: { 77 | ...baseConfig, 78 | connection: process.env.DATABASE_URL 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /server/db/migrations/20211230165803_harden-campaigns-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'campaigns' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Alter the table 6 | await knex.schema.alterTable(tableName, function (table) { 7 | // Add NOT NULL constraints to simple fields 8 | table.string('name').notNullable().alter() 9 | table.string('organization').notNullable().alter() 10 | table.integer('letters_counter').notNullable().alter() 11 | 12 | // Change type from string to text and add NOT NULL constraint 13 | table.text('page_url').notNullable().alter() 14 | 15 | // Change type from string to native enum types 16 | table 17 | .enum('cause', ['Civic Rights', 'Education', 'Climate Justice'], { 18 | useNative: true, 19 | enumName: 'cause_type' 20 | }) 21 | .notNullable() 22 | .alter() 23 | 24 | table 25 | .enum('type', ['Starter', 'Accelerator', 'Grant'], { 26 | useNative: true, 27 | enumName: 'campaign_type' 28 | }) 29 | .notNullable() 30 | .alter() 31 | 32 | // Add indexes 33 | table.index(['name']) 34 | table.index(['organization']) 35 | 36 | // Add unique indexes 37 | table.unique(['name', 'organization']) 38 | }) 39 | }, 40 | 41 | async down(knex) { 42 | // Alter the table 43 | await knex.schema.alterTable(tableName, function (table) { 44 | // Drop unique indexes 45 | table.dropUnique(['name', 'organization']) 46 | 47 | // Drop indexes 48 | table.dropIndex(['organization']) 49 | table.dropIndex(['name']) 50 | 51 | // Drop NOT NULL constraints from simple fields 52 | table.string('name').nullable().alter() 53 | table.string('organization').nullable().alter() 54 | table.integer('letters_counter').nullable().alter() 55 | 56 | // Change type from text to string and drop NOT NULL constraint 57 | table.string('page_url').nullable().alter() 58 | 59 | // Change type from native enum to string and drop NOT NULL constraint 60 | table.string('cause').nullable().alter() 61 | table.string('type').nullable().alter() 62 | }) 63 | 64 | // Manually remove the native enum types 65 | await knex.raw(`DROP TYPE IF EXISTS cause_type;`) 66 | await knex.raw(`DROP TYPE IF EXISTS campaign_type;`) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # Snowpack dependency directory (https://snowpack.dev/) 45 | web_modules/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | .parcel-cache 78 | 79 | # Next.js build output 80 | .next 81 | out 82 | 83 | # Nuxt.js build / generate output 84 | .nuxt 85 | dist 86 | 87 | # Gatsby files 88 | .cache/ 89 | # Comment in the public line in if your project uses Gatsby and not Next.js 90 | # https://nextjs.org/blog/next-9-1#public-directory-support 91 | # public 92 | 93 | # vuepress build output 94 | .vuepress/dist 95 | 96 | # Serverless directories 97 | .serverless/ 98 | 99 | # FuseBox cache 100 | .fusebox/ 101 | 102 | # DynamoDB Local files 103 | .dynamodb/ 104 | 105 | # TernJS port file 106 | .tern-port 107 | 108 | # Stores VSCode versions used for testing VSCode extensions 109 | .vscode-test 110 | 111 | # yarn v2 112 | .yarn/cache 113 | .yarn/unplugged 114 | .yarn/build-state.yml 115 | .yarn/install-state.gz 116 | .pnp.* 117 | 118 | # Homebrew 119 | Brewfile.lock.json 120 | -------------------------------------------------------------------------------- /server/db/migrations/20211222210238_match-production-campaigns-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'campaigns' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Alter the table 6 | await knex.schema.alterTable(tableName, function (table) { 7 | // Drop unique indexes 8 | table.dropUnique(['name', 'organization']) 9 | 10 | // Drop indexes 11 | table.dropIndex(['organization']) 12 | table.dropIndex(['name']) 13 | 14 | // Drop NOT NULL constraints from simple fields 15 | table.string('name').nullable().alter() 16 | table.string('organization').nullable().alter() 17 | table.integer('letters_sent').nullable().alter() 18 | 19 | // Change type from text to string and drop NOT NULL constraint 20 | table.string('page_url').nullable().alter() 21 | 22 | // Change type from native enum to string and drop NOT NULL constraint 23 | table.string('cause').nullable().alter() 24 | table.string('type').nullable().alter() 25 | }) 26 | 27 | await knex.schema.alterTable(tableName, function (table) { 28 | // Rename column 29 | table.renameColumn('letters_sent', 'letters_counter') 30 | }) 31 | 32 | // Manually remove the native enum types 33 | await knex.raw(`DROP TYPE IF EXISTS cause_type;`) 34 | await knex.raw(`DROP TYPE IF EXISTS campaign_type;`) 35 | }, 36 | 37 | async down(knex) { 38 | // Alter the table 39 | await knex.schema.alterTable(tableName, function (table) { 40 | // Rename column 41 | table.renameColumn('letters_counter', 'letters_sent') 42 | }) 43 | 44 | await knex.schema.alterTable(tableName, function (table) { 45 | // Add NOT NULL constraints to simple fields 46 | table.string('name').notNullable().alter() 47 | table.string('organization').notNullable().alter() 48 | table.integer('letters_sent').notNullable().alter() 49 | 50 | // Change type from string to text and add NOT NULL constraint 51 | table.text('page_url').notNullable().alter() 52 | 53 | // Fields using native enum types 54 | table 55 | .enum('cause', ['Civic Rights', 'Education', 'Climate Justice'], { 56 | useNative: true, 57 | enumName: 'cause_type' 58 | }) 59 | .notNullable() 60 | .alter() 61 | 62 | table 63 | .enum('type', ['Starter', 'Accelerator', 'Grant'], { 64 | useNative: true, 65 | enumName: 'campaign_type' 66 | }) 67 | .notNullable() 68 | .alter() 69 | 70 | // Add indexes 71 | table.index(['name']) 72 | table.index(['organization']) 73 | 74 | // Add unique indexes 75 | table.unique(['name', 'organization']) 76 | }) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /script/create-db.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const { createClient, getConfig, getEnv } = require('../server/db') 4 | 5 | // See https://www.postgresql.org/docs/13/errcodes-appendix.html 6 | const DUPLICATE_DATABASE_ERROR = '42P04' 7 | 8 | bootstrapDatabase() 9 | 10 | async function bootstrapDatabase() { 11 | const targetEnv = getEnv() 12 | if (targetEnv === 'production') { 13 | throw new Error('This script should not be used in production!') 14 | } 15 | 16 | const config = getConfig(targetEnv) 17 | await createDatabase(config) 18 | await migrateToLatestSchemas(config) 19 | await runDataSeeders(config) 20 | } 21 | 22 | async function createDatabase(config) { 23 | const { database } = config.connection 24 | let db 25 | 26 | try { 27 | // Connect with system database selected 28 | db = createClient({ 29 | ...config, 30 | connection: { 31 | ...config.connection, 32 | database: 'postgres' 33 | } 34 | }) 35 | 36 | // Create the database if it doesn't already exist 37 | await db.raw(`CREATE DATABASE ${database}`) 38 | console.log(`Created database "${database}"!`) 39 | } catch (error) { 40 | if (error.code === DUPLICATE_DATABASE_ERROR) { 41 | console.warn(`Error creating database "${database}": it already exists!`) 42 | } else { 43 | console.error(`Error creating database "${database}": ${error.message}`) 44 | throw error 45 | } 46 | } finally { 47 | // Disconnect 48 | await db.destroy() 49 | } 50 | } 51 | 52 | async function migrateToLatestSchemas(config) { 53 | const { database } = config.connection 54 | let db 55 | 56 | try { 57 | db = createClient(config) 58 | await db.migrate.latest() 59 | console.log(`Migrated database "${database}" to latest schemas!`) 60 | } catch (error) { 61 | console.error( 62 | `Error migrating to latest schemas for database "${database}": ${error.message}` 63 | ) 64 | throw error 65 | } finally { 66 | // Disconnect 67 | await db.destroy() 68 | } 69 | } 70 | 71 | // eslint-disable-next-line no-unused-vars 72 | async function runDataSeeders(config) { 73 | const { database } = config.connection 74 | let db 75 | 76 | if (!config || !config.seeds || !config.seeds.directory) { 77 | console.warn('Skipping! No data seed directory is configured.') 78 | return 79 | } 80 | 81 | const seedDir = path.resolve(process.cwd(), config.seeds.directory) 82 | const seedDirStats = fs.statSync(seedDir, { throwIfNoEntry: false }) 83 | if (!seedDirStats) { 84 | console.warn( 85 | `Skipping! The data seed directory does not exist: ${config.seeds.directory}` 86 | ) 87 | return 88 | } 89 | 90 | try { 91 | if (!seedDirStats.isDirectory()) { 92 | throw new Error( 93 | `The data seed path exists but is not a directory: ${config.seeds.directory}` 94 | ) 95 | } 96 | 97 | db = createClient(config) 98 | await db.seed.run() 99 | console.log(`Created seed data in database "${database}"!`) 100 | } catch (error) { 101 | console.error( 102 | `Error creating seed data for database "${database}": ${error.message}` 103 | ) 104 | throw error 105 | } finally { 106 | // Disconnect 107 | if (db) { 108 | await db.destroy() 109 | } 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /server/routes/api/representatives.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | 3 | const express = require('express') 4 | const axios = require('axios') 5 | 6 | const router = express.Router() 7 | 8 | const CIVIC_API_KEY = getCivicApiKey() 9 | 10 | // Endpoints 11 | 12 | // Get 13 | router.get('/:zipCode', async (req, res) => { 14 | const congressMembers = [] 15 | const { zipCode } = req.params 16 | 17 | if (!zipCode.match(/^\d{5}(-\d{4})?$/)) { 18 | res.status(400).send({ 19 | error: 20 | 'Invalid zip code format, valid examples are 84054-6013 or 84054. The zipcode used was ' + 21 | zipCode 22 | }) 23 | return 24 | } 25 | try { 26 | const response = await axios.get( 27 | 'https://www.googleapis.com/civicinfo/v2/representatives', 28 | { 29 | params: { 30 | key: CIVIC_API_KEY, 31 | address: zipCode 32 | } 33 | } 34 | ) 35 | 36 | const { offices, officials } = response.data 37 | offices 38 | .slice(2) // skip President and VP 39 | .forEach((officeType) => { 40 | officeType.officialIndices.forEach((position) => { 41 | const rep = officials[position] 42 | const repInfo = { 43 | name: rep.name || '', 44 | title: officeType.name || '', 45 | address_line1: '', 46 | address_line2: '', 47 | address_city: '', 48 | address_state: '', 49 | address_zip: '', 50 | address_country: 'US', 51 | email: 52 | (Array.isArray(rep.emails) && rep.emails[0]) || 'Not Made Public', 53 | twitter: 'Not Made Public', 54 | facebook: 'Not Made Public', 55 | contactPage: (Array.isArray(rep.urls) && rep.urls[0]) || '', 56 | photoUrl: 57 | rep.photoUrl || 58 | 'https://cdn.pixabay.com/photo/2016/08/08/09/17/avatar-1577909_1280.png' 59 | } 60 | 61 | if (Array.isArray(rep.address) && rep.address[0]) { 62 | repInfo.address_line1 = rep.address[0].line1 63 | repInfo.address_city = rep.address[0].city 64 | repInfo.address_state = rep.address[0].state 65 | repInfo.address_zip = rep.address[0].zip 66 | } 67 | 68 | if (Array.isArray(rep.channels) && rep.channels.length > 0) { 69 | const facebook = rep.channels.find( 70 | ({ type }) => type === 'Facebook' 71 | ) 72 | if (facebook) { 73 | repInfo.facebook = facebook.id 74 | } 75 | const twitter = rep.channels.find(({ type }) => type === 'Twitter') 76 | if (twitter) { 77 | repInfo.twitter = twitter.id 78 | } 79 | } 80 | congressMembers.push(repInfo) 81 | }) 82 | }) 83 | 84 | res.send(congressMembers) 85 | } catch (error) { 86 | console.log(error) 87 | res.status(500).send({ error: 'Whoops' }) 88 | } 89 | }) 90 | 91 | module.exports = router 92 | 93 | // Temporary implementation for fallback with deprecation warnings 94 | function getCivicApiKey() { 95 | const { CIVIC_API_KEY, CivicAPI } = process.env 96 | const civicApiKey = CIVIC_API_KEY || CivicAPI 97 | 98 | if (CivicAPI) { 99 | if (CIVIC_API_KEY) { 100 | console.warn('Using "CIVIC_API_KEY" environment variable.') 101 | console.warn( 102 | 'Please remove your deprecated "CivicAPI" environment variable!' 103 | ) 104 | } else { 105 | console.warn( 106 | 'Expected "CIVIC_API_KEY" environment variable was not found.' 107 | ) 108 | console.warn( 109 | 'Falling back to deprecated "CivicAPI" environment variable....' 110 | ) 111 | console.warn('Please update your environment to use the expected key!') 112 | } 113 | } 114 | 115 | return civicApiKey 116 | } 117 | -------------------------------------------------------------------------------- /server/db/migrations/20211224223012_match-production-volunteers-table.js: -------------------------------------------------------------------------------- 1 | const tableName = 'volunteers' 2 | 3 | module.exports = { 4 | async up(knex) { 5 | // Prepare the table 6 | await knex.schema.alterTable(tableName, function (table) { 7 | // Drop unique indexes 8 | table.dropUnique(['email']) 9 | 10 | // Drop NOT NULL constraints from simple fields 11 | table.string('name').nullable().alter() 12 | table.string('email').nullable().alter() 13 | table.text('physical_address').nullable().alter() 14 | 15 | // Add new nullable simple fields 16 | table.string('first_name') 17 | table.string('last_name') 18 | table.string('street_address') 19 | table.string('address_two') 20 | table.string('city') 21 | table.string('state') 22 | table.string('zip') 23 | }) 24 | 25 | // Transition the data 26 | const volunteerList = await knex.select('*').from(tableName) 27 | for (const volunteer of volunteerList) { 28 | const nameObj = convertNameStringToObject(volunteer.name) 29 | const addressObj = convertAddressStringToObject( 30 | volunteer.physical_address 31 | ) 32 | await knex(tableName) 33 | .where({ id: volunteer.id }) 34 | .update({ 35 | ...nameObj, 36 | ...addressObj 37 | }) 38 | } 39 | 40 | // Clean up the table 41 | await knex.schema.alterTable(tableName, function (table) { 42 | // Drop old columns 43 | table.dropColumn('name') 44 | table.dropColumn('physical_address') 45 | }) 46 | }, 47 | 48 | async down(knex) { 49 | // Prepare the table 50 | await knex.schema.alterTable(tableName, function (table) { 51 | // Create simple fields 52 | table.string('name').notNullable() 53 | table.text('physical_address').notNullable() 54 | 55 | // Add NOT NULL constraints to simple fields 56 | table.string('email').notNullable().alter() 57 | 58 | // Add unique indexes 59 | table.unique(['email']) 60 | }) 61 | 62 | // Transition the data 63 | const volunteerList = await knex.select('*').from(tableName) 64 | for (const volunteer of volunteerList) { 65 | const nameStr = convertNameObjectToString(volunteer) 66 | const addressStr = convertAddressObjectToString(volunteer) 67 | await knex(tableName).where({ id: volunteer.id }).update({ 68 | name: nameStr, 69 | physical_address: addressStr 70 | }) 71 | } 72 | 73 | // Clean up the table 74 | await knex.schema.alterTable(tableName, function (table) { 75 | // Drop old columns 76 | table.dropColumn('first_name') 77 | table.dropColumn('last_name') 78 | table.dropColumn('street_address') 79 | table.dropColumn('address_two') 80 | table.dropColumn('city') 81 | table.dropColumn('state') 82 | table.dropColumn('zip') 83 | }) 84 | } 85 | } 86 | 87 | function convertAddressStringToObject(addressStr) { 88 | const reAddress = 89 | /^((?[^,]+), )?((?.+), )?(?[^,]+), (?[^,]+) (?[\d-]+)$/ 90 | const match = addressStr.match(reAddress) 91 | if (!match) return null 92 | return { 93 | street_address: match.groups.line1, 94 | address_two: match.groups.line2, 95 | city: match.groups.city, 96 | state: match.groups.state, 97 | zip: match.groups.zip 98 | } 99 | } 100 | 101 | function convertAddressObjectToString({ 102 | street_address: line1, 103 | address_two: line2, 104 | city, 105 | state, 106 | zip 107 | }) { 108 | let streets = '' 109 | if (line1) { 110 | streets += `${line1}, ` 111 | } 112 | if (line2) { 113 | streets += `${line2}, ` 114 | } 115 | return `${streets}${city}, ${state} ${zip}` 116 | } 117 | 118 | function convertNameStringToObject(nameStr) { 119 | const names = nameStr.split(' ') 120 | return { 121 | first_name: names[0], 122 | last_name: names.slice(1).join(' ') 123 | } 124 | } 125 | 126 | function convertNameObjectToString({ 127 | first_name: firstName, 128 | last_name: lastName 129 | }) { 130 | return `${firstName} ${lastName}` 131 | } 132 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## For Developers 4 | 5 | ### Setup 6 | 7 | #### Using GitHub Codespaces 8 | 9 | [GitHub Codespaces](https://github.com/features/codespaces) is enabled for this repository for everyone who is a member of the organization _or_ acknowledged as an invited "Outside Collaborator". 10 | 11 | You can learn more about using Codespaces from the [official GitHub documentation](https://docs.github.com/codespaces). 12 | 13 | :information_source: When using the Codespaces environment, you should be provided all of the environment variables necessary to get up and running. However, if you find that any of them are not working as expected (please let us know!), you may need to manually update some of them via directions in the [Configuration](#configuration) section below. 14 | 15 | #### Using a Docker environment 16 | 17 | We recommend using VS Code with the [Microsoft "Remote Development" extension pack](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.vscode-remote-extensionpack) to quickly spin up a full-featured development environment using Docker on your local system: 18 | 19 | 1. Follow the [installation guide](https://code.visualstudio.com/docs/remote/containers#_installation) 20 | 2. Then either: 21 | - Clone the repository locally and follow the [quick start guide to open that folder in a container](https://code.visualstudio.com/docs/remote/containers#_quick-start-open-an-existing-folder-in-a-container); 22 | - Or follow the [quick start guide to open a container for the repository or a pull request _without_ cloning it to your local system](https://code.visualstudio.com/docs/remote/containers#_quick-start-open-a-git-repository-or-github-pr-in-an-isolated-container-volume) 23 | 24 | :warning: If you use the Docker environment, you will still need to configure a handful of environment variables that are described in the [Configuration](#configuration) section below. 25 | 26 | #### Using your local system 27 | 28 | ##### Prerequisites 29 | 30 | If you are using a MacOS or Linux system, you can setup all of the prerequisites by running one convenient script: 31 | 32 | ```shell 33 | script/bootstrap 34 | ``` 35 | 36 | ##### Linux prerequisites 37 | 38 | Here are some steps you can follow to install the prerequisites on RHEL, CentOS or Fedora Linux. 39 | 40 | First, reset and install a more recent version of Node.js like version 16. 41 | 42 | ```bash 43 | sudo yum module reset -y nodejs 44 | sudo yum module install -y nodejs:16 45 | ``` 46 | 47 | Next, install the Node.js and Node.js Package Manager packages. 48 | 49 | ```bash 50 | pkcon install -y nodejs 51 | ``` 52 | 53 | ###### Node.js & npm 54 | 55 | Ensure a modern version of [Node.js (and npm)](https://nodejs.org/en/download/) are installed. 56 | 57 | Currently used versions in production: 58 | - Node.js @ `14.x` 59 | - npm @ `6.x` 60 | 61 | ###### PostgreSQL server 62 | 63 | Ensure a [PostgreSQL server is installed](https://www.postgresql.org/download/) and running. 64 | 65 | Currently used version in production: 66 | - PostgreSQL @ `13.x` 67 | 68 | ##### Getting Started 69 | 70 | 1. Clone the repository. 71 | 72 | 2. If you are using a MacOS or Linux system, you can setup all of the prerequisites by running one convenient script: 73 | 74 | ```shell 75 | script/bootstrap 76 | ``` 77 | 78 | If you can successfully run that script, you may skip ahead to Step 5. 79 | 80 | ###### Linux PostgreSQL server 81 | 82 | Here are some steps you can follow to install the PostgreSQL relational database on RHEL, CentOS or Fedora Linux. 83 | 84 | ```bash 85 | pkcon install -y postgresql-server 86 | ``` 87 | 88 | 3. Install the dependencies. From a terminal, navigate to the project's root directory and run: 89 | 90 | ```shell 91 | npm install 92 | ``` 93 | 94 | ℹ️ This should should not be necessary if you successfully ran `script/bootstrap`. 95 | 96 | 4. Create and seed the local PostgreSQL databases: 97 | 98 | ```shell 99 | npm run db:create 100 | ``` 101 | followed by 102 | ```shell 103 | npm run db:seed 104 | ``` 105 | 106 | ℹ️ This should should not be necessary if you successfully ran `script/bootstrap`. 107 | 108 | 5. Ensure you have [configured your environment](#configuration). 109 | 110 | 6. Run the tests to ensure everything is working as expected: 111 | 112 | ```shell 113 | npm test 114 | ``` 115 | 116 | 7. Start the server, either simply: 117 | 118 | ```shell 119 | # Simple approach 120 | npm start 121 | ``` 122 | 123 | or with `nodemon` to automatically restart on changes: 124 | 125 | ```shell 126 | # nodemon approach with automatic restarts 127 | npm run dev 128 | ``` 129 | 130 | ### Configuration 131 | 132 | #### Using the Google Civic Information API locally 133 | 134 | 1. Using your preferred Google account, head to the [Google API Console's Credentials page](https://console.developers.google.com/apis/credentials) to create a new Project and API key. If you need a bit more guidance, refer to [Google's guide](https://developers.google.com/civic-information/docs/using_api). 135 | 2. Hit the following URL, substituting your own API key in: 136 | 137 | ``` 138 | https://www.googleapis.com/civicinfo/v2/elections?key= 139 | ``` 140 | 141 | Chances are likely that you will receive a `403` response with an error message in the JSON response body explaining that you must enable the Google Civic Information API for your new Project. It will also provide a URL to visit to do just that (enable it), so please navigate to that. For reference, its format will be similar to: 142 | 143 | ``` 144 | https://console.developers.google.com/apis/api/civicinfo.googleapis.com/overview?project= 145 | ``` 146 | 147 | 3. On the resulting page, click the "Enable" button. 148 | 4. Revisit the URL from the earlier step, substituting your own API key in: 149 | 150 | ``` 151 | https://www.googleapis.com/civicinfo/v2/elections?key= 152 | ``` 153 | 154 | This time around, you should succeed with a JSON response body containing an `elections` array. 155 | 156 | 5. Create a file called `.env` in the project's root directory, preferrably using the `.env.example` file as your template. 157 | 6. Add a new key-value pair to the file containing your new API key, e.g. 158 | 159 | ``` 160 | # API key for the Google Civic API 161 | CIVIC_API_KEY= 162 | ``` 163 | 164 | 7. Save the changes to the `.env` file. 165 | 166 | #### Using the Lob API locally 167 | 168 | 1. Login to your personal [Heroku dashboard](https://dashboard.heroku.com/). 169 | 2. Switch to the `programequity` team under the project dropdown (which probably defaulted to `Personal`). 170 | - :information_source: If you do not see that team, you probably need to be added by a project administrator. 171 | 3. Go into the backend application, currently named `murmuring-headland-63935`. 172 | 4. Go into the "Settings" tab. 173 | 5. In the "Config Vars" section, click the "Reveal Config Vars" button to expand the list of existing environment variables. 174 | 6. Find the `LOB_API_KEY` and `TEST_LOB_API_KEY` variables. 175 | 7. Create a file called `.env` in the project's root directory, preferrably using the `.env.example` file as your template. 176 | 8. Add a set of new key-value pairs to the file containing the API keys with the values from the Heroku application, e.g. 177 | 178 | ``` 179 | # Production environment API key for the Lob API 180 | LOB_API_KEY= 181 | 182 | # Test environment API key for the Lob API 183 | # This is only required for running the integration tests successfully! 184 | TEST_LOB_API_KEY= 185 | ``` 186 | 187 | 9. Save the changes to the `.env` file. 188 | 189 | 190 | #### Ignoring the Auth0 authentication locally 191 | 192 | Although the authentication check is not required locally, the module in use still expects certain values to be passed in regardless of their validity. 193 | 194 | 1. Create a file called `.env` in the project's root directory, preferrably using the `.env.example` file as your template. 195 | 2. Add set of a new key-value pairs to the file with literal nonsense values, e.g. 196 | 197 | ``` 198 | SERVER_PORT=6060 199 | CLIENT_ORIGIN_URL=http://localhost:4040 200 | AUTH0_AUDIENCE=your_Auth0_identifier_value 201 | AUTH0_DOMAIN=your_Auth0_domain 202 | ``` 203 | 204 | 3. Save the changes to the `.env` file. 205 | 206 | 207 | #### Setting up Auth0 authentication locally 208 | 209 | 210 | 1. Follow the steps for `Ignoring the Auth0 authentication locally` for configuring the .env file. 211 | 1. Create a file called `.env` in the project's root directory, preferrably using the `.env.example` file as your template. 212 | 2. Add set of a new key-value pairs to the file with literal nonsense values, e.g. 213 | 214 | ``` 215 | SERVER_PORT=6060 216 | CLIENT_ORIGIN_URL=http://localhost:4040 217 | AUTH0_AUDIENCE= 218 | AUTH0_DOMAIN= 219 | ``` 220 | 221 | 3. Save the changes to the `.env` file. 222 | 223 | The following instructions can also be found in this [guide](https://auth0.com/blog/complete-guide-to-vue-user-authentication/#Calling-an-API) 224 | 225 | 4. Sign up for an account at [Auth0](https://auth0.com/). 226 | 5. Select `personal` when prompted with the type of account being created. 227 | 6. Go to [API dashboard](https://manage.auth0.com/#/apis) and click `Create API` button. 228 | 7. Add a Name to your API. It can be named anything you'd like. 229 | 8. Set the Identifier value. `http://localhost:5000/` is recommended. For more information see this [guide](https://auth0.com/blog/complete-guide-to-vue-user-authentication/#Calling-an-API) 230 | 9. Set `AUTH0_AUDIENCE=http://localhost:5000/` or another Identifier value from the step above. 231 | 10. Click on the "Test" tab. 232 | 11. Locate the section called " Asking Auth0 for tokens from my application". 233 | 12. Click on the cURL tab to show a mock POST request. 234 | 13. Copy your Auth0 domain, which is part of the --url parameter value: tenant-name.region.auth0.com. For more information see this [guide](https://auth0.com/blog/complete-guide-to-vue-user-authentication/#Calling-an-API) 235 | 14. In .env, set `AUTH0_DOMAIN` value equal to domain value from step above. 236 | 237 | 238 | #### Connecting to the production PostgreSQL database locally 239 | 240 | :warning: _For trusted collaborators ONLY!_ :warning: 241 | 242 | 1. Using your authorized Heroku account, you can find the `DATABASE_URL` [Config Var](https://devcenter.heroku.com/articles/config-vars) on our deployed Heroku app's "Settings" page. 243 | 244 | 2. Create a file called `.env` in the project's root directory, preferrably using the `.env.example` file as your template. 245 | 3. Add a new key-value pair to the file containing the Heroku `DATABASE_URL` value, e.g. 246 | 247 | ``` 248 | # PostgreSQL database connection string for production 249 | DATABASE_URL= 250 | ``` 251 | 252 | 4. Save the changes to the `.env` file. 253 | 5. When starting your local server, you must set your `NODE_ENV` environment variable to `"production"` in order for `knex` to connect to this Heroku production database, e.g. 254 | 255 | ```shell 256 | NODE_ENV=production npm start 257 | ``` 258 | -------------------------------------------------------------------------------- /server/__tests__/integration/lob.test.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | const app = require('../../app') 3 | const request = require('supertest') 4 | const axios = require('axios') 5 | 6 | const LOB_API_HOST = 'https://api.lob.com' 7 | const LOB_TEST_KEY_PREFIX = 'test_' 8 | 9 | // Ensure we are using a testing key for the Lob API. 10 | // Many of the tests will fail if using a live key instead. 11 | beforeAll(() => { 12 | const { LOB_API_KEY, TEST_LOB_API_KEY } = process.env 13 | let lobApiKey = LOB_API_KEY || '' 14 | 15 | // Lob API keys starts with either "live_" (production) or "test_" (testing) 16 | if ( 17 | !lobApiKey.startsWith(LOB_TEST_KEY_PREFIX) && 18 | (TEST_LOB_API_KEY || '').startsWith(LOB_TEST_KEY_PREFIX) 19 | ) { 20 | lobApiKey = TEST_LOB_API_KEY 21 | } 22 | 23 | if (!lobApiKey.startsWith(LOB_TEST_KEY_PREFIX)) { 24 | throw new Error('You must use a test environment Lob API key!') 25 | } 26 | 27 | process.env.LOB_API_KEY = lobApiKey 28 | }) 29 | 30 | afterEach(() => { 31 | jest.clearAllMocks() 32 | }) 33 | 34 | afterAll(async () => { 35 | await new Promise((resolve) => setTimeout(() => resolve(), 500)) // avoid jest open handle error 36 | }) 37 | 38 | describe('GET /api/lob/templates/:templateId', () => { 39 | const templateId = 'tmpl_c94e83ca2cd5121' 40 | const route = `/api/lob/templates/${templateId}` 41 | 42 | // From https://docs.lob.com/#templates_object 43 | const someDate = '2017-11-07T22:56:10.962Z' 44 | const exampleLobResponse = { 45 | id: templateId, 46 | description: 'Test Template', 47 | versions: [ 48 | { 49 | id: 'vrsn_362184d96d9b0c9', 50 | description: 'Test Template', 51 | html: 'HTML for {{name}}', 52 | date_created: someDate, 53 | date_modified: someDate, 54 | object: 'version' 55 | } 56 | ], 57 | published_version: { 58 | id: 'vrsn_362184d96d9b0c9', 59 | description: 'Test Template', 60 | html: 'HTML for {{name}}', 61 | date_created: someDate, 62 | date_modified: someDate, 63 | object: 'version' 64 | }, 65 | metadata: {}, 66 | date_created: someDate, 67 | date_modified: someDate, 68 | object: 'template' 69 | } 70 | 71 | test('returns 200 status for an existing template', async () => { 72 | const spy = jest.spyOn(axios, 'get') 73 | spy.mockImplementation((url) => { 74 | if (url !== `${LOB_API_HOST}/v1/templates/${templateId}`) { 75 | throw new Error('unexpected call to `axios.get`') 76 | } 77 | return { 78 | status: 200, 79 | data: exampleLobResponse 80 | } 81 | }) 82 | 83 | const response = await request(app).get(route) 84 | expect(response.status).toBe(200) 85 | 86 | expect(spy).toHaveBeenCalled() 87 | spy.mockRestore() 88 | }) 89 | 90 | test('returns 400 status for a non-existent template', async () => { 91 | const badTemplateId = 'non_existent_template_id' 92 | const badRoute = `/api/lob/templates/${badTemplateId}` 93 | 94 | const spy = jest.spyOn(axios, 'get') 95 | spy.mockImplementation((url) => { 96 | if (url !== `${LOB_API_HOST}/v1/templates/${badTemplateId}`) { 97 | throw new Error('unexpected call to `axios.get`') 98 | } 99 | 100 | const axiosError = new Error('Not Found') 101 | axiosError.response = { 102 | status: 404, 103 | data: { 104 | error: { 105 | message: 'template not found', 106 | status_code: 404, 107 | code: 'not_found' 108 | } 109 | } 110 | } 111 | throw axiosError 112 | }) 113 | 114 | const response = await request(app).get(badRoute) 115 | expect(response.status).toBe(400) 116 | expect(response.body.error).toBe('template not found') 117 | 118 | expect(spy).toHaveBeenCalled() 119 | spy.mockRestore() 120 | }) 121 | }) 122 | 123 | describe('POST /api/lob/addressVerification', () => { 124 | // For more information on these testing values, check the Lob API docs. 125 | // See: https://docs.lob.com/node#us-verification-test-environment 126 | 127 | const route = '/api/lob/addressVerification' 128 | const zip = '11111' // nonsense 129 | 130 | test('returns 200 status for an address meeting all requirements', async () => { 131 | const response = await request(app) 132 | .post(route) 133 | .send({ line1: 'residential house', zip }) 134 | expect(response.status).toBe(200) 135 | expect(response.body).toEqual({ 136 | deliverable: true, 137 | warning: null, 138 | revisedAddress: { 139 | line1: '1709 BRODERICK ST', 140 | line2: null, 141 | city: 'SAN FRANCISCO', 142 | state: 'CA', 143 | zip: '94115-2525' 144 | } 145 | }) 146 | }) 147 | 148 | // 149 | // Pre-request validation tests 150 | // 151 | 152 | // TODO 153 | 154 | // 155 | // Post-request validation tests 156 | // 157 | 158 | test('returns 200 status for a residential house', async () => { 159 | const response = await request(app) 160 | .post(route) 161 | .send({ line1: 'residential house', zip }) 162 | expect(response.status).toBe(200) 163 | expect(response.body).toEqual({ 164 | deliverable: true, 165 | warning: null, 166 | revisedAddress: { 167 | line1: '1709 BRODERICK ST', 168 | line2: null, 169 | city: 'SAN FRANCISCO', 170 | state: 'CA', 171 | zip: '94115-2525' 172 | } 173 | }) 174 | }) 175 | 176 | test('returns 200 status for residential highrise', async () => { 177 | const response = await request(app) 178 | .post(route) 179 | .send({ line1: 'residential highrise', zip }) 180 | expect(response.status).toBe(200) 181 | expect(response.body).toEqual({ 182 | deliverable: true, 183 | revisedAddress: { 184 | city: 'SAN FRANCISCO', 185 | line1: '660 KING ST UNIT 305', 186 | line2: null, 187 | state: 'CA', 188 | zip: '94107-1539' 189 | }, 190 | warning: null 191 | }) 192 | }) 193 | 194 | test('returns 200 status for residential department of state', async () => { 195 | const response = await request(app) 196 | .post(route) 197 | .send({ line1: 'department of state', zip }) 198 | expect(response.status).toBe(200) 199 | expect(response.body).toEqual({ 200 | deliverable: true, 201 | revisedAddress: { 202 | city: 'DPO', 203 | line1: 'UNIT 8900 BOX 4301', 204 | line2: null, 205 | state: 'AE', 206 | zip: '09831-4301' 207 | }, 208 | warning: null 209 | }) 210 | }) 211 | 212 | test('returns 200 status for residential military', async () => { 213 | const response = await request(app) 214 | .post(route) 215 | .send({ line1: 'military', zip }) 216 | expect(response.status).toBe(200) 217 | expect(response.body).toEqual({ 218 | deliverable: true, 219 | revisedAddress: { 220 | city: 'APO', 221 | line1: 'CMR 409 BOX 145', 222 | line2: null, 223 | state: 'AE', 224 | zip: '09053-0002' 225 | }, 226 | warning: null 227 | }) 228 | }) 229 | 230 | test('returns 200 status with warning for residence with unnecessary unit', async () => { 231 | const response = await request(app) 232 | .post(route) 233 | .send({ line1: 'unnecessary unit', zip }) 234 | expect(response.status).toBe(200) 235 | expect(response.body).toEqual({ 236 | deliverable: true, 237 | revisedAddress: { 238 | city: 'SAN FRANCISCO', 239 | line1: '1709 BRODERICK ST APT 505', 240 | line2: null, 241 | state: 'CA', 242 | zip: '94115-2525' 243 | }, 244 | warning: 245 | 'Address may be deliverable but contains an unnecessary suite number' 246 | }) 247 | }) 248 | 249 | test('returns 400 status for residential post office box', async () => { 250 | const response = await request(app) 251 | .post(route) 252 | .send({ line1: 'po box', zip }) 253 | expect(response.status).toBe(400) 254 | expect(response.body).toEqual({ 255 | error: 'Post office boxes are not currently supported' 256 | }) 257 | }) 258 | 259 | test('returns 400 status for residence in Puerto Rico', async () => { 260 | const response = await request(app) 261 | .post(route) 262 | .send({ line1: 'puerto rico', zip }) 263 | expect(response.status).toBe(400) 264 | expect(response.body).toEqual({ 265 | error: 'Puerto Rico addresses are not currently supported' 266 | }) 267 | }) 268 | 269 | test('returns 400 status for commercial building', async () => { 270 | const response = await request(app) 271 | .post(route) 272 | .send({ line1: 'deliverable', zip }) 273 | expect(response.status).toBe(400) 274 | expect(response.body).toEqual({ 275 | error: 'Non-residential addresses are not currently supported' 276 | }) 277 | }) 278 | 279 | test('returns 400 status for commercial highrise', async () => { 280 | const response = await request(app) 281 | .post(route) 282 | .send({ line1: 'commercial highrise', zip }) 283 | expect(response.status).toBe(400) 284 | expect(response.body).toEqual({ 285 | error: 'Non-residential addresses are not currently supported' 286 | }) 287 | }) 288 | 289 | test('returns 400 status for undeliverable address', async () => { 290 | const response = await request(app) 291 | .post(route) 292 | .send({ line1: 'undeliverable block match', zip }) 293 | expect(response.status).toBe(400) 294 | expect(response.body).toEqual({ error: 'Address is undeliverable' }) 295 | }) 296 | }) 297 | 298 | // Creating 'from' variable here to capture the address_id generated by the following /createAdress test 299 | // for the /createLetter test. While it'd be preferable to not have tests depend on 300 | // each other, this will ensure a consistently correct address_id and should the test address_id 301 | // get deleted from our test instance, the test will still pass. 302 | let from = { address_id: '' } 303 | describe('POST /api/lob/createAddress', () => { 304 | // For more information on these testing values, check the Lob API docs. 305 | // See: https://docs.lob.com/node#us-verification-test-environment 306 | 307 | const route = '/api/lob/createAddress' 308 | 309 | test('returns 200 status for an address meeting all requirements', async () => { 310 | const address = { 311 | description: 'Jane - Office', 312 | name: 'Jane Doe', 313 | email: 'jane@lob.com', 314 | company: 'Lob', 315 | line1: 'residential house', 316 | city: 'SAN FRANCISCO', 317 | state: 'CA', 318 | zip: '94115-2525' 319 | } 320 | 321 | const response = await request(app).post(route).send(address) 322 | expect(response.status).toBe(200) 323 | expect(response.body).toEqual({ 324 | address_id: expect.any(String) 325 | }) 326 | return (from['address_id'] = response.body.address_id) 327 | }) 328 | }) 329 | 330 | describe('POST /api/lob/createLetter', () => { 331 | // For more information on these testing values, check the Lob API docs. 332 | // See: https://docs.lob.com/node#us-verification-test-environment 333 | 334 | const route = '/api/lob/createLetter' 335 | 336 | test('returns 200 status if a letter is created meeting all requirements', async () => { 337 | const description = 'This is a test description' 338 | 339 | const to = { 340 | description: 'Jane - Office', 341 | name: 'Jane Doe', 342 | email: 'jane@lob.com', 343 | company: 'Lob', 344 | line1: '1709 BRODERICK ST', 345 | city: 'SAN FRANCISCO', 346 | state: 'CA', 347 | zip: '94115-2525' 348 | } 349 | 350 | const template_id = 'tmpl_1057bb6f50f81fb' 351 | const response = await request(app) 352 | .post(route) 353 | .send({ description, to, from, template_id }) 354 | expect(response.status).toBe(200) 355 | expect(response.body).toEqual({ 356 | expected_delivery_date: expect.any(String) 357 | }) 358 | }) 359 | }) 360 | -------------------------------------------------------------------------------- /server/routes/api/lob.js: -------------------------------------------------------------------------------- 1 | const express = require('express') 2 | const axios = require('axios') 3 | const Lob = require('lob') 4 | 5 | const router = express.Router() 6 | 7 | const ALLOWED_ADDRESS_FIELDS = [ 8 | 'name', 9 | 'email', 10 | 'company', 11 | 'description', 12 | 'line1', 13 | 'line2', 14 | 'city', 15 | 'state', 16 | 'zip' 17 | ] 18 | const VALID_US_ZIP_CODE_MATCH = /^(?:\d{1,4}|\d{5}(?:[+-]?\d{4})?)$/ 19 | const DELIVERABILITY_WARNINGS = { 20 | undeliverable: 'Address is not deliverable', 21 | deliverable_incorrect_unit: 22 | 'Address may be deliverable but contains a suite number that does not exist', 23 | deliverable_missing_unit: 24 | 'Address may be deliverable but is missing a suite number', 25 | deliverable_unnecessary_unit: 26 | 'Address may be deliverable but contains an unnecessary suite number' 27 | } 28 | 29 | router.post('/createAddress', async (req, res) => { 30 | // Get description, to, and template_id from request body 31 | const address = req.body || {} 32 | const lobApiKey = getLobApiKey() 33 | const lob = new Lob({ apiKey: lobApiKey }) 34 | 35 | // Very rough schema validation 36 | try { 37 | const keys = Object.keys(address || {}).sort() 38 | if (!address || keys.length === 0) { 39 | throw new Error('Address object cannot be empty') 40 | } 41 | 42 | const disallowedKeys = keys.reduce((badKeys, key) => { 43 | if (!ALLOWED_ADDRESS_FIELDS.includes(key)) { 44 | badKeys.push(key) 45 | } 46 | return badKeys 47 | }, []) 48 | 49 | if (disallowedKeys.length > 0) { 50 | throw new Error( 51 | `Address object contained unexpected keys: ${JSON.stringify( 52 | disallowedKeys 53 | )}` 54 | ) 55 | } 56 | 57 | if (!(address.line1 || '').trim()) { 58 | throw new Error('Address object must contain a primary line (line1)') 59 | } 60 | 61 | const { zip } = address 62 | if (zip != null && typeof zip !== 'string') { 63 | throw new Error('Address object must contain a string-based ZIP code') 64 | } 65 | 66 | let zipCode = (zip || '').trim() 67 | if (zipCode) { 68 | if (!VALID_US_ZIP_CODE_MATCH.test(zipCode)) { 69 | throw new Error( 70 | `Address object contained an invalid ZIP code: ${zipCode}` 71 | ) 72 | } 73 | } else if (!((address.city || '').trim() && (address.state || '').trim())) { 74 | throw new Error( 75 | 'Address object must include both city and state, or a ZIP code' 76 | ) 77 | } 78 | } catch (validationError) { 79 | return res.status(400).send({ error: validationError.message }) 80 | } 81 | 82 | try { 83 | const response = await lob.usVerifications.verify({ 84 | primary_line: address.line1, 85 | secondary_line: address.line2, 86 | city: address.city, 87 | state: address.state, 88 | zip_code: address.zipCode 89 | }) 90 | 91 | const { 92 | deliverability, 93 | components: { 94 | state: revisedState, 95 | address_type: addressType, 96 | record_type: recordType 97 | } 98 | } = response 99 | 100 | const isUndeliverable = 101 | !deliverability || deliverability === 'undeliverable' 102 | const isResidential = addressType === 'residential' 103 | const isPostOfficeBox = recordType === 'po_box' 104 | const isPuertoRico = revisedState === 'PR' 105 | 106 | const deliverable = 107 | !isUndeliverable && isResidential && !isPostOfficeBox && !isPuertoRico 108 | 109 | if (!deliverable) { 110 | let errorMessage = 'Address is undeliverable' 111 | if (!isUndeliverable) { 112 | if (!isResidential) { 113 | errorMessage = 'Non-residential addresses are not currently supported' 114 | } else if (isPostOfficeBox) { 115 | errorMessage = 'Post office boxes are not currently supported' 116 | } else if (isPuertoRico) { 117 | errorMessage = 'Puerto Rico addresses are not currently supported' 118 | } 119 | } 120 | 121 | return res.status(400).send({ error: errorMessage }) 122 | } 123 | 124 | // Create Lob address using variables passed into route via post body 125 | const addressResponse = await lob.addresses.create({ 126 | description: address.description, 127 | name: address.name, 128 | address_line1: address.line1, 129 | address_line2: address.line2, 130 | address_city: address.city, 131 | address_state: address.state, 132 | address_zip: address.zip, 133 | address_country: 'US' 134 | }) 135 | 136 | res.status(200).send({ address_id: addressResponse.id }) 137 | } catch (error) { 138 | res.status(500).send({ error: 'Something failed!' }) 139 | } 140 | }) 141 | 142 | router.post('/createLetter', async (req, res) => { 143 | // Get description, to, and template_id from request body 144 | const { description, to, from, template_id, charge } = req.body || {} 145 | const lobApiKey = getLobApiKey() 146 | const lob = new Lob({ apiKey: lobApiKey }) 147 | const stripe = require('stripe')(process.env.STRIPE_SECRET_KEY) 148 | 149 | try { 150 | // Create Lob address using variables passed into route via post body 151 | const letter = await lob.letters.create({ 152 | description: description, 153 | to: { 154 | name: to.name, 155 | address_line1: to.line1, 156 | address_line2: to.line2, 157 | address_city: to.city, 158 | address_state: to.state, 159 | address_zip: to.zip 160 | }, 161 | from: from.address_id, 162 | file: template_id, 163 | color: false 164 | }) 165 | 166 | res 167 | .status(200) 168 | .send({ expected_delivery_date: letter.expected_delivery_date }) 169 | } catch (error) { 170 | // We'll need a stripe test env key to test this in our integration tests 171 | const refund = await stripe.refunds.create({ 172 | charge: charge 173 | }) 174 | // TODO handle error for refund error. Not doing this currently because chance of 175 | // user making it this far in the process and both LOB API and Stripe failing is very small. 176 | res.status(500).send({ 177 | error: `Something failed! A refund of ${refund.amount} ${refund.currency} has been issued` 178 | }) 179 | } 180 | }) 181 | 182 | router.get('/templates/:templateId', async (req, res) => { 183 | const { templateId } = req.params 184 | var templateInfo = {} 185 | 186 | try { 187 | // We must use `axios` here as the `lob` package does not yet support 188 | // the [beta] Templates API. 189 | const response = await axios.get( 190 | `https://api.lob.com/v1/templates/${templateId}`, 191 | { 192 | auth: { username: getLobApiKey() } 193 | } 194 | ) 195 | 196 | templateInfo = response.data 197 | return res.status(200).send(templateInfo) 198 | } catch (error) { 199 | return handleLobError(error, res) 200 | } 201 | }) 202 | 203 | router.post('/addressVerification', async (req, res) => { 204 | const address = req.body 205 | 206 | // Very rough schema validation 207 | try { 208 | const keys = Object.keys(address || {}).sort() 209 | if (!address || keys.length === 0) { 210 | throw new Error('Address object cannot be empty') 211 | } 212 | 213 | const disallowedKeys = keys.reduce((badKeys, key) => { 214 | if (!ALLOWED_ADDRESS_FIELDS.includes(key)) { 215 | badKeys.push(key) 216 | } 217 | return badKeys 218 | }, []) 219 | 220 | if (disallowedKeys.length > 0) { 221 | throw new Error( 222 | `Address object contained unexpected keys: ${JSON.stringify( 223 | disallowedKeys 224 | )}` 225 | ) 226 | } 227 | 228 | if (!(address.line1 || '').trim()) { 229 | throw new Error('Address object must contain a primary line (line1)') 230 | } 231 | 232 | const { zip } = address 233 | if (zip != null && typeof zip !== 'string') { 234 | throw new Error('Address object must contain a string-based ZIP code') 235 | } 236 | 237 | let zipCode = (zip || '').trim() 238 | if (zipCode) { 239 | if (!VALID_US_ZIP_CODE_MATCH.test(zipCode)) { 240 | throw new Error( 241 | `Address object contained an invalid ZIP code: ${zipCode}` 242 | ) 243 | } 244 | } else if (!((address.city || '').trim() && (address.state || '').trim())) { 245 | throw new Error( 246 | 'Address object must include both city and state, or a ZIP code' 247 | ) 248 | } 249 | } catch (validationError) { 250 | return res.status(400).send({ error: validationError.message }) 251 | } 252 | 253 | const { line1, line2, city, state, zip } = address 254 | // Ensure the ZIP code is at least 5 digits 255 | const zipCode = zip ? zip.padStart(5, '0') : null 256 | 257 | try { 258 | const lob = new Lob({ apiKey: getLobApiKey() }) 259 | const response = await lob.usVerifications.verify({ 260 | primary_line: line1, 261 | secondary_line: line2, 262 | city, 263 | state, 264 | zip_code: zipCode 265 | }) 266 | 267 | const { 268 | deliverability, 269 | primary_line: revisedLine1, 270 | secondary_line: revisedLine2, 271 | components: { 272 | city: revisedCity, 273 | state: revisedState, 274 | zip_code: revisedZip, 275 | zip_code_plus_4: revisedZipPlus4, 276 | address_type: addressType, 277 | record_type: recordType 278 | } 279 | } = response 280 | 281 | const isUndeliverable = 282 | !deliverability || deliverability === 'undeliverable' 283 | const isResidential = addressType === 'residential' 284 | const isPostOfficeBox = recordType === 'po_box' 285 | const isPuertoRico = revisedState === 'PR' 286 | 287 | const deliverable = 288 | !isUndeliverable && isResidential && !isPostOfficeBox && !isPuertoRico 289 | const warning = DELIVERABILITY_WARNINGS[deliverability] || null 290 | 291 | if (!deliverable) { 292 | let errorMessage = 'Address is undeliverable' 293 | if (!isUndeliverable) { 294 | if (!isResidential) { 295 | errorMessage = 'Non-residential addresses are not currently supported' 296 | } else if (isPostOfficeBox) { 297 | errorMessage = 'Post office boxes are not currently supported' 298 | } else if (isPuertoRico) { 299 | errorMessage = 'Puerto Rico addresses are not currently supported' 300 | } 301 | } 302 | 303 | return res.status(400).send({ error: errorMessage }) 304 | } 305 | 306 | return res.status(200).send({ 307 | deliverable, 308 | warning, 309 | revisedAddress: { 310 | line1: revisedLine1, 311 | line2: revisedLine2 || null, 312 | city: revisedCity, 313 | state: revisedState, 314 | zip: revisedZip + (revisedZipPlus4 ? '-' + revisedZipPlus4 : '') 315 | } 316 | }) 317 | } catch (error) { 318 | // This endpoint should not return anything other than `200` status 319 | // codes, even for undeliverable addresses 320 | return handleLobError(error, res) 321 | } 322 | }) 323 | 324 | module.exports = router 325 | 326 | // Temporary implementation for fallback with deprecation warnings 327 | function getLobApiKey() { 328 | const { LOB_API_KEY, LiveLob } = process.env 329 | const lobApiKey = LOB_API_KEY || LiveLob 330 | 331 | if (LiveLob) { 332 | if (LOB_API_KEY) { 333 | console.warn('Using "LOB_API_KEY" environment variable.') 334 | console.warn( 335 | 'Please remove your deprecated "LiveLob" environment variable!' 336 | ) 337 | } else { 338 | console.warn('Expected "LOB_API_KEY" environment variable was not found.') 339 | console.warn( 340 | 'Falling back to deprecated "LiveLob" environment variable....' 341 | ) 342 | console.warn('Please update your environment to use the expected key!') 343 | } 344 | } 345 | 346 | return lobApiKey 347 | } 348 | 349 | function handleLobError(error, res) { 350 | let status = 500 351 | let errorMessage = 'Whoops' 352 | 353 | if (error) { 354 | // error.response is from the `axios` package 355 | // error._response is from the `lob` package 356 | if (error.response || error._response) { 357 | status = 502 358 | 359 | let lobStatus = null 360 | let lobApiError = {} 361 | 362 | // Handle Lob API errors from `axios` requests 363 | if (error.response) { 364 | lobStatus = error.response.status 365 | lobApiError = error.response.data.error 366 | } 367 | // Handle Lob API errors from `lob` requests 368 | else if (error._response) { 369 | lobStatus = error._response.statusCode 370 | lobApiError = error._response.body.error 371 | } 372 | 373 | if (process.env.NODE_ENV !== 'test') { 374 | console.error( 375 | `Lob API error (${lobStatus}): ${JSON.stringify(lobApiError)}` 376 | ) 377 | } 378 | 379 | // If the error is being blamed on the request... 380 | // See: https://docs.lob.com/#errors 381 | if ([400, 404, 422].includes(lobStatus)) { 382 | status = 400 383 | errorMessage = lobApiError.message 384 | } 385 | } else { 386 | console.error(error) 387 | } 388 | } 389 | 390 | return res.status(status).send({ error: errorMessage }) 391 | } 392 | --------------------------------------------------------------------------------