├── .nvmrc ├── .cfignore ├── entrypoint.sh ├── hooks ├── pre-commit ├── post-rewrite ├── post-merge └── post-checkout ├── env.example ├── newrelic.js ├── .mocharc.yml ├── src ├── config.js ├── config.cloudgov.js ├── api-data-gov-filter.js ├── logger.js ├── app.js └── db.js ├── docker-compose.test.yml ├── docker-compose.yml ├── index.js ├── nyc.config.js ├── manifest.yml ├── migrations ├── 20240814124509_add_jsonb_data_index.js ├── 20240819182419_add_jsonb_data_index_ua.js ├── 20240130203849_remove_version_col_ga4.js ├── 20240130203237_rename_date_time_to_date_ga4.js ├── 20210706213753_add_date_id_multi_col_index.js ├── 20170308164751_create_analytics_data.js ├── 20231218165411_create_analytics_data_ga4.js ├── 20170316115145_add_analytics_data_indexes.js ├── 20170522094056_rename_date_time_to_date.js └── 20240620192004_add_ga4_data_indexes.js ├── test ├── support │ └── db.js ├── api-data-gov-filter.test.js ├── db.test.js └── app.test.js ├── knexfile.cloudgov.js ├── .github └── workflows │ ├── deploy_to_dev_manually.yml │ ├── deploy.yml │ └── ci.yml ├── .gitignore ├── knexfile.js ├── CONTRIBUTING.md ├── LICENSE.md ├── eslint.config.js ├── package.json ├── docs └── development_and_deployment_process.md └── README.md /.nvmrc: -------------------------------------------------------------------------------- 1 | 22 2 | -------------------------------------------------------------------------------- /.cfignore: -------------------------------------------------------------------------------- 1 | .gitignore -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | export PATH="$PATH:/home/vcap/deps/0/bin" 3 | npm run migrate 4 | npm start 5 | -------------------------------------------------------------------------------- /hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Run linter before committing. 4 | 5 | echo "Executing pre-commit hooks" 6 | 7 | npm run lint 8 | -------------------------------------------------------------------------------- /env.example: -------------------------------------------------------------------------------- 1 | export NODE_ENV=development 2 | export POSTGRES_USER=postgres 3 | export POSTGRES_PASSWORD=123abc 4 | export POSTGRES_DATABASE=analytics-reporter 5 | -------------------------------------------------------------------------------- /newrelic.js: -------------------------------------------------------------------------------- 1 | exports.config = { 2 | app_name: [process.env.NEW_RELIC_APP_NAME], 3 | license_key: process.env.NEW_RELIC_LICENSE_KEY, 4 | logging: { 5 | level: "info", 6 | }, 7 | }; 8 | -------------------------------------------------------------------------------- /.mocharc.yml: -------------------------------------------------------------------------------- 1 | diff: true 2 | extension: ['js'] 3 | package: './package.json' 4 | slow: '75' 5 | spec: 6 | - 'test/**/*.js' 7 | timeout: '2000' 8 | ui: 'bdd' 9 | watch-files: 10 | - 'src/**/*.js' 11 | - 'test/**/*.js' 12 | -------------------------------------------------------------------------------- /hooks/post-rewrite: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # from https://gist.github.com/taurus227/28960de89e6c43bb3d492125368f1224 3 | 4 | # Get current NPM packages after rebase. 5 | 6 | if [[ "$1" == "rebase" ]] 7 | then 8 | $(dirname "$0")/post-merge 9 | fi 10 | -------------------------------------------------------------------------------- /src/config.js: -------------------------------------------------------------------------------- 1 | const knexfile = require("../knexfile"); 2 | 3 | module.exports = { 4 | api_data_gov_secret: process.env.API_DATA_GOV_SECRET, 5 | port: process.env.PORT || 4444, 6 | postgres: knexfile[process.env.NODE_ENV || "development"].connection, 7 | log_level: process.env.LOG_LEVEL || "info", 8 | }; 9 | -------------------------------------------------------------------------------- /src/config.cloudgov.js: -------------------------------------------------------------------------------- 1 | const knexfile = require("../knexfile"); 2 | 3 | module.exports = { 4 | api_data_gov_secret: process.env.API_DATA_GOV_SECRET, 5 | port: process.env.PORT || 4444, 6 | postgres: knexfile[process.env.NODE_ENV || "production"].connection, 7 | log_level: process.env.LOG_LEVEL || "info", 8 | }; 9 | -------------------------------------------------------------------------------- /docker-compose.test.yml: -------------------------------------------------------------------------------- 1 | services: 2 | db-test: 3 | image: postgres:16 4 | environment: 5 | - POSTGRES_DB=analytics_reporter_test 6 | - POSTGRES_USER=analytics 7 | - POSTGRES_PASSWORD=123abc 8 | ports: 9 | # Non-standard port to deconflict with the dev db 10 | - "5431:5432" 11 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | db: 3 | image: postgres:16 4 | environment: 5 | - POSTGRES_DB=analytics-reporter 6 | - POSTGRES_USER=analytics 7 | - POSTGRES_PASSWORD=123abc 8 | ports: 9 | - "5432:5432" 10 | volumes: 11 | - pgdata:/var/lib/postgresql/data/ 12 | volumes: 13 | pgdata: 14 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | if (process.env.NEW_RELIC_APP_NAME) { 2 | console.log("Starting New Relic"); 3 | require("newrelic"); 4 | } 5 | 6 | const app = require("./src/app"); 7 | const config = require("./src/config"); 8 | const logger = require("./src/logger").initialize(); 9 | 10 | app.listen(config.port, () => { 11 | logger.info(`Listening on ${config.port}`); 12 | }); 13 | -------------------------------------------------------------------------------- /hooks/post-merge: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # from https://gist.github.com/taurus227/28960de89e6c43bb3d492125368f1224 3 | 4 | # Get current NPM packages after merge. 5 | 6 | changed_files="$(git diff-tree -r --name-only --no-commit-id ORIG_HEAD HEAD)" 7 | 8 | check_run() { 9 | echo "$changed_files" | grep --quiet "$1" && echo "$0: Running $2..." && eval "$2" 10 | } 11 | 12 | check_run package.json "npm prune && npm install" 13 | -------------------------------------------------------------------------------- /hooks/post-checkout: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # from https://gist.github.com/taurus227/28960de89e6c43bb3d492125368f1224 3 | 4 | # Get current NPM packages on checkout of branch. 5 | 6 | changed_files="$(git diff-tree -r --name-only --no-commit-id $1 $2)" 7 | 8 | check_run() { 9 | echo "$changed_files" | grep --quiet "$1" && echo "$0: Running $2..." && eval "$2" 10 | } 11 | 12 | check_run package.json "npm prune && npm install" 13 | -------------------------------------------------------------------------------- /nyc.config.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | /** 4 | * Configures the code coverage tool NYC. 5 | */ 6 | module.exports = { 7 | all: true, 8 | exclude: [ 9 | "coverage", 10 | "eslint.config.js", 11 | "knexfile.js", 12 | "knexfile.cloudgov.js", 13 | "migrations", 14 | "newrelic.js", 15 | "nyc.config.js", 16 | "node_modules", 17 | "src/config.cloudgov.js", 18 | "test", 19 | // Ignore UA because it will be deprecated 20 | "ua", 21 | ], 22 | branches: 100, 23 | functions: 100, 24 | lines: 100, 25 | statements: 100, 26 | }; 27 | -------------------------------------------------------------------------------- /manifest.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - name: ${APP_NAME} 3 | instances: 1 4 | memory: 512M 5 | buildpacks: 6 | - nodejs_buildpack 7 | command: "chmod +x ./entrypoint.sh && ./entrypoint.sh" 8 | services: 9 | - ${DB_SERVICE_NAME} 10 | stack: cflinuxfs4 11 | routes: 12 | - route: ${APP_NAME}.app.cloud.gov 13 | - route: ${APP_NAME}.apps.internal 14 | env: 15 | API_DATA_GOV_SECRET: ${API_DATA_GOV_SECRET} 16 | NEW_RELIC_APP_NAME: ${NEW_RELIC_APP_NAME} 17 | NEW_RELIC_LICENSE_KEY: ${NEW_RELIC_LICENSE_KEY} 18 | NODE_ENV: production 19 | PGSSLMODE: true 20 | 21 | -------------------------------------------------------------------------------- /migrations/20240814124509_add_jsonb_data_index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param { import("knex").Knex } knex 3 | * @returns { Promise } 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.raw( 7 | "CREATE INDEX analytics_data_gin_jsonb ON analytics_data_ga4 USING gin(data jsonb_path_ops)", 8 | ); 9 | }; 10 | 11 | /** 12 | * @param { import("knex").Knex } knex 13 | * @returns { Promise } 14 | */ 15 | exports.down = function (knex) { 16 | return knex.schema.table("analytics_data_ga4", (table) => { 17 | table.dropIndex("analytics_data_gin_jsonb"); 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /migrations/20240819182419_add_jsonb_data_index_ua.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param { import("knex").Knex } knex 3 | * @returns { Promise } 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.raw( 7 | "CREATE INDEX analytics_data_ua_gin_jsonb ON analytics_data USING gin(data jsonb_path_ops)", 8 | ); 9 | }; 10 | 11 | /** 12 | * @param { import("knex").Knex } knex 13 | * @returns { Promise } 14 | */ 15 | exports.down = function (knex) { 16 | return knex.schema.table("analytics_data", (table) => { 17 | table.dropIndex("analytics_data_gin_jsonb"); 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /migrations/20240130203849_remove_version_col_ga4.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.table("analytics_data_ga4", (table) => { 7 | table.dropColumn("version"); 8 | }); 9 | }; 10 | 11 | /** 12 | * @param {import("knex").Knex} knex the instance of knex 13 | * @returns {Promise} resolves or rejects when the SQL statement completes 14 | */ 15 | exports.down = function (knex) { 16 | return knex.schema.table("analytics_data_ga4", (table) => { 17 | table.string("version"); 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /test/support/db.js: -------------------------------------------------------------------------------- 1 | const knex = require("knex"); 2 | const config = require("../../src/config"); 3 | 4 | class Database { 5 | get client() { 6 | return this.dbClient; 7 | } 8 | 9 | async createClient() { 10 | if (this.dbClient) { 11 | return; 12 | } 13 | 14 | this.dbClient = await knex({ client: "pg", connection: config.postgres }); 15 | } 16 | 17 | async destroyClient() { 18 | if (this.dbClient) { 19 | await this.dbClient.destroy(); 20 | this.dbClient = null; 21 | } 22 | 23 | return; 24 | } 25 | 26 | resetSchema(table) { 27 | return this.dbClient(table).delete(); 28 | } 29 | } 30 | 31 | module.exports = new Database(); 32 | -------------------------------------------------------------------------------- /knexfile.cloudgov.js: -------------------------------------------------------------------------------- 1 | const VCAP_SERVICES_JSON = JSON.parse(process.env.VCAP_SERVICES); 2 | 3 | module.exports = { 4 | production: { 5 | client: "postgresql", 6 | connection: { 7 | host: VCAP_SERVICES_JSON["aws-rds"][0]["credentials"]["host"], 8 | user: VCAP_SERVICES_JSON["aws-rds"][0]["credentials"]["username"], 9 | password: VCAP_SERVICES_JSON["aws-rds"][0]["credentials"]["password"], 10 | database: VCAP_SERVICES_JSON["aws-rds"][0]["credentials"]["db_name"], 11 | port: 5432, 12 | ssl: true, 13 | }, 14 | pool: { 15 | min: 2, 16 | max: 10, 17 | }, 18 | migrations: { 19 | tableName: "knex_migrations", 20 | }, 21 | }, 22 | }; 23 | -------------------------------------------------------------------------------- /migrations/20240130203237_rename_date_time_to_date_ga4.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.raw( 7 | "ALTER TABLE analytics_data_ga4 ALTER COLUMN date TYPE date", 8 | ); 9 | }; 10 | 11 | /** 12 | * @param {import("knex").Knex} knex the instance of knex 13 | * @returns {Promise} resolves or rejects when the SQL statement completes 14 | */ 15 | exports.down = function (knex) { 16 | return knex.schema.raw( 17 | "ALTER TABLE analytics_data_ga4 ALTER COLUMN date TYPE timestamp with time zone", 18 | ); 19 | }; 20 | -------------------------------------------------------------------------------- /migrations/20210706213753_add_date_id_multi_col_index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.raw( 7 | "CREATE INDEX analytics_data_date_desc_id_asc ON analytics_data (date DESC NULLS LAST, id ASC)", 8 | ); 9 | }; 10 | 11 | /** 12 | * @param {import("knex").Knex} knex the instance of knex 13 | * @returns {Promise} resolves or rejects when the SQL statement completes 14 | */ 15 | exports.down = function (knex) { 16 | return knex.schema.table("analytics_data", (table) => { 17 | table.dropIndex("analytics_data_date_desc_id_asc"); 18 | }); 19 | }; 20 | -------------------------------------------------------------------------------- /.github/workflows/deploy_to_dev_manually.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to dev manually 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | deploy_dev: 8 | uses: 18F/analytics-reporter-api/.github/workflows/deploy.yml@develop 9 | with: 10 | APP_NAME: ${{ vars.APP_NAME_DEV }} 11 | CF_ORGANIZATION_NAME: ${{ vars.CF_ORGANIZATION_NAME }} 12 | CF_SPACE_NAME: ${{ vars.CF_SPACE_NAME_DEV }} 13 | DB_SERVICE_NAME: ${{ vars.DB_SERVICE_NAME_DEV }} 14 | NEW_RELIC_APP_NAME: ${{ vars.NEW_RELIC_APP_NAME_DEV }} 15 | secrets: 16 | API_DATA_GOV_SECRET: ${{ secrets.API_DATA_GOV_SECRET_DEV }} 17 | CF_USERNAME: ${{ secrets.CF_USERNAME_DEV }} 18 | CF_PASSWORD: ${{ secrets.CF_PASSWORD_DEV }} 19 | NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY_DEV }} 20 | -------------------------------------------------------------------------------- /migrations/20170308164751_create_analytics_data.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.createTable("analytics_data", (table) => { 7 | table.increments("id"); 8 | table.string("report_name"); 9 | table.string("report_agency"); 10 | table.dateTime("date_time"); 11 | table.jsonb("data"); 12 | table.timestamps(true, true); 13 | }); 14 | }; 15 | 16 | /** 17 | * @param {import("knex").Knex} knex the instance of knex 18 | * @returns {Promise} resolves or rejects when the SQL statement completes 19 | */ 20 | exports.down = function (knex) { 21 | return knex.schema.dropTable("analytics_data"); 22 | }; 23 | -------------------------------------------------------------------------------- /src/api-data-gov-filter.js: -------------------------------------------------------------------------------- 1 | const config = require("./config"); 2 | 3 | /** 4 | * @param {import("express").Request} req the incoming HTTP request. 5 | * @param {import("express").Response} res the HTTP response object. 6 | * @param {Function} next callback to execute when the filter is complete. 7 | * @returns {void} the result of the next callback. 8 | */ 9 | const apiDataGovFilter = (req, res, next) => { 10 | if (!config.api_data_gov_secret || req.path === "/") { 11 | return next(); 12 | } else if ( 13 | req.headers["api-data-gov-secret"] !== config.api_data_gov_secret 14 | ) { 15 | res.status(403); 16 | return res.json({ 17 | message: "Unauthorized. See https://analytics.usa.gov/developer", 18 | status: 403, 19 | }); 20 | } 21 | return next(); 22 | }; 23 | 24 | module.exports = apiDataGovFilter; 25 | -------------------------------------------------------------------------------- /migrations/20231218165411_create_analytics_data_ga4.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema.createTable("analytics_data_ga4", (table) => { 7 | table.increments("id"); 8 | table.string("report_name"); 9 | table.string("report_agency"); 10 | table.dateTime("date"); 11 | table.jsonb("data"); 12 | table.timestamps(true, true); 13 | table.string("version"); 14 | }); 15 | }; 16 | 17 | /** 18 | * @param {import("knex").Knex} knex the instance of knex 19 | * @returns {Promise} resolves or rejects when the SQL statement completes 20 | */ 21 | exports.down = function (knex) { 22 | return knex.schema.dropTable("analytics_data_ga4"); 23 | }; 24 | -------------------------------------------------------------------------------- /migrations/20170316115145_add_analytics_data_indexes.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema 7 | .table("analytics_data", (table) => { 8 | table.index(["report_name", "report_agency"]); 9 | }) 10 | .then(() => { 11 | return knex.schema.raw( 12 | "CREATE INDEX analytics_data_date_time_desc ON analytics_data (date_time DESC NULLS LAST)", 13 | ); 14 | }); 15 | }; 16 | 17 | /** 18 | * @param {import("knex").Knex} knex the instance of knex 19 | * @returns {Promise} resolves or rejects when the SQL statement completes 20 | */ 21 | exports.down = function (knex) { 22 | return knex.schema.table("analytics_data", (table) => { 23 | table.dropIndex(["report_name", "report_agency"]); 24 | table.dropIndex("date_time", "analytics_data_date_time_desc"); 25 | }); 26 | }; 27 | -------------------------------------------------------------------------------- /migrations/20170522094056_rename_date_time_to_date.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema 7 | .raw("ALTER TABLE analytics_data RENAME COLUMN date_time TO date") 8 | .then(() => { 9 | return knex.schema.raw( 10 | "ALTER TABLE analytics_data ALTER COLUMN date TYPE date", 11 | ); 12 | }); 13 | }; 14 | 15 | /** 16 | * @param {import("knex").Knex} knex the instance of knex 17 | * @returns {Promise} resolves or rejects when the SQL statement completes 18 | */ 19 | exports.down = function (knex) { 20 | return knex.schema 21 | .raw("ALTER TABLE analytics_data RENAME COLUMN date TO date_time") 22 | .then(() => { 23 | return knex.schema.raw( 24 | "ALTER TABLE analytics_data ALTER COLUMN date_time TYPE timestamp with time zone", 25 | ); 26 | }); 27 | }; 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (http://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | -------------------------------------------------------------------------------- /migrations/20240620192004_add_ga4_data_indexes.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @param {import("knex").Knex} knex the instance of knex 3 | * @returns {Promise} resolves or rejects when the SQL statement completes 4 | */ 5 | exports.up = function (knex) { 6 | return knex.schema 7 | .table("analytics_data_ga4", (table) => { 8 | table.index(["report_name", "report_agency"]); 9 | }) 10 | .then(() => { 11 | return knex.schema.raw( 12 | "CREATE INDEX analytics_data_date_desc ON analytics_data_ga4 (date DESC NULLS LAST)", 13 | ); 14 | }) 15 | .then(() => { 16 | knex.schema.raw( 17 | "CREATE INDEX analytics_data_date_desc_id_asc ON analytics_data (date DESC NULLS LAST, id ASC)", 18 | ); 19 | }); 20 | }; 21 | 22 | /** 23 | * @param {import("knex").Knex} knex the instance of knex 24 | * @returns {Promise} resolves or rejects when the SQL statement completes 25 | */ 26 | exports.down = function (knex) { 27 | return knex.schema.table("analytics_data_ga4", (table) => { 28 | table.dropIndex(["report_name", "report_agency"]); 29 | table.dropIndex("date", "analytics_data_date_desc"); 30 | table.dropIndex("analytics_data_date_desc_id_asc"); 31 | }); 32 | }; 33 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | development: { 3 | client: "postgresql", 4 | connection: { 5 | host: process.env.POSTGRES_HOST || "localhost", 6 | user: process.env.POSTGRES_USER || "analytics", 7 | password: process.env.POSTGRES_PASSWORD || "123abc", 8 | database: process.env.POSTGRES_DATABASE || "analytics-reporter", 9 | }, 10 | }, 11 | production: { 12 | client: "postgresql", 13 | connection: { 14 | host: process.env.POSTGRES_HOST, 15 | user: process.env.POSTGRES_USER, 16 | password: process.env.POSTGRES_PASSWORD, 17 | database: process.env.POSTGRES_DATABASE, 18 | }, 19 | pool: { 20 | min: 2, 21 | max: 10, 22 | }, 23 | migrations: { 24 | tableName: "knex_migrations", 25 | }, 26 | }, 27 | test: { 28 | client: "postgresql", 29 | connection: { 30 | host: process.env.POSTGRES_HOST || "localhost", 31 | user: process.env.POSTGRES_USER || "analytics", 32 | password: process.env.POSTGRES_PASSWORD || "123abc", 33 | database: process.env.POSTGRES_DATABASE || "analytics_reporter_test", 34 | port: process.env.POSTGRES_PORT || 5431, 35 | }, 36 | migrations: { 37 | tableName: "knex_migrations", 38 | }, 39 | }, 40 | }; 41 | -------------------------------------------------------------------------------- /src/logger.js: -------------------------------------------------------------------------------- 1 | const expressWinston = require("express-winston"); 2 | const winston = require("winston"); 3 | const config = require("./config"); 4 | 5 | const loggerConfig = () => { 6 | return { 7 | level: config.log_level, 8 | format: winston.format.combine( 9 | winston.format.colorize(), 10 | winston.format.simple(), 11 | ), 12 | transports: [ 13 | new winston.transports.Console({ 14 | level: config.log_level, 15 | }), 16 | ], 17 | headerBlacklist: ["x-api-key", "api-data-gov-secret"], 18 | }; 19 | }; 20 | 21 | /** 22 | * @returns {import("winston")} the configured winston instance 23 | */ 24 | const initialize = () => { 25 | return winston.createLogger(loggerConfig()); 26 | }; 27 | 28 | /** 29 | * @returns {import("express-winston")} the configured express-winston 30 | * logging middleware instance 31 | */ 32 | const middleware = () => { 33 | return expressWinston.logger(loggerConfig()); 34 | }; 35 | 36 | /** 37 | * @returns {import("express-winston")} the configured express-winston error 38 | * logging middleware instance 39 | */ 40 | const errorLoggingMiddleware = () => { 41 | return expressWinston.errorLogger(loggerConfig()); 42 | }; 43 | 44 | module.exports = { initialize, middleware, errorLoggingMiddleware }; 45 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Welcome! 2 | 3 | We're so glad you're thinking about contributing to an 18F open source project! If you're unsure or afraid of anything, just ask or submit the issue or pull request anyways. The worst that can happen is that you'll be politely asked to change something. We appreciate any sort of contribution, and don't want a wall of rules to get in the way of that. 4 | 5 | Before contributing, we encourage you to read our CONTRIBUTING policy (you are here), our LICENSE, and our README, all of which should be in this repository. If you have any questions, or want to read more about our underlying policies, you can consult the 18F Open Source Policy GitHub repository at https://github.com/18f/open-source-policy, or just shoot us an email/official government letterhead note to [18f@gsa.gov](mailto:18f@gsa.gov). 6 | 7 | ## Public domain 8 | 9 | This project is in the public domain within the United States, and 10 | copyright and related rights in the work worldwide are waived through 11 | the [CC0 1.0 Universal public domain dedication](https://creativecommons.org/publicdomain/zero/1.0/). 12 | 13 | All contributions to this project will be released under the CC0 14 | dedication. By submitting a pull request, you are agreeing to comply 15 | with this waiver of copyright interest. 16 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | As a work of the United States Government, this project is in the 2 | public domain within the United States. 3 | 4 | Additionally, we waive copyright and related rights in the work 5 | worldwide through the CC0 1.0 Universal public domain dedication. 6 | 7 | ## CC0 1.0 Universal Summary 8 | 9 | This is a human-readable summary of the [Legal Code (read the full text)](https://creativecommons.org/publicdomain/zero/1.0/legalcode). 10 | 11 | ### No Copyright 12 | 13 | The person who associated a work with this deed has dedicated the work to 14 | the public domain by waiving all of his or her rights to the work worldwide 15 | under copyright law, including all related and neighboring rights, to the 16 | extent allowed by law. 17 | 18 | You can copy, modify, distribute and perform the work, even for commercial 19 | purposes, all without asking permission. 20 | 21 | ### Other Information 22 | 23 | In no way are the patent or trademark rights of any person affected by CC0, 24 | nor are the rights that other persons may have in the work or in how the 25 | work is used, such as publicity or privacy rights. 26 | 27 | Unless expressly stated otherwise, the person who associated a work with 28 | this deed makes no warranties about the work, and disclaims liability for 29 | all uses of the work, to the fullest extent permitted by applicable law. 30 | When using or citing the work, you should not imply endorsement by the 31 | author or the affirmer. 32 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | const { configs: eslintConfigs } = require("@eslint/js"); 2 | const eslintPluginPrettierRecommended = require("eslint-plugin-prettier/recommended"); 3 | const globals = require("globals"); 4 | const jsdoc = require("eslint-plugin-jsdoc"); 5 | 6 | module.exports = [ 7 | { 8 | languageOptions: { 9 | globals: { 10 | ...globals.node, 11 | ...globals.mocha, 12 | }, 13 | }, 14 | }, 15 | eslintConfigs.recommended, 16 | eslintPluginPrettierRecommended, 17 | { 18 | plugins: { 19 | jsdoc, 20 | }, 21 | files: ["**/*.js"], 22 | rules: { 23 | ...jsdoc.configs.recommended.rules, 24 | "jsdoc/check-indentation": "error", 25 | "jsdoc/check-line-alignment": "error", 26 | "jsdoc/check-syntax": "error", 27 | "jsdoc/convert-to-jsdoc-comments": "warn", 28 | "jsdoc/no-bad-blocks": "error", 29 | "jsdoc/no-blank-block-descriptions": "error", 30 | "jsdoc/no-blank-blocks": "error", 31 | "jsdoc/require-asterisk-prefix": "error", 32 | "jsdoc/require-jsdoc": [ 33 | "error", 34 | { 35 | publicOnly: true, 36 | require: { 37 | ArrowFunctionExpression: true, 38 | ClassDeclaration: true, 39 | ClassExpression: true, 40 | FunctionDeclaration: true, 41 | FunctionExpression: true, 42 | MethodDefinition: true, 43 | }, 44 | }, 45 | ], 46 | "jsdoc/require-throws": "error", 47 | "jsdoc/sort-tags": "warn", 48 | }, 49 | }, 50 | ]; 51 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "analytics-report-api", 3 | "version": "2.0.0", 4 | "description": "An API for service data generated by Analytics Reporter", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "node index.js", 8 | "dev": "nodemon src/index.js", 9 | "pretest": "NODE_ENV=test npm run migrate", 10 | "test": "NODE_ENV=test mocha", 11 | "migrate": "knex migrate:latest", 12 | "lint": "eslint .", 13 | "lint:fix": "eslint . --fix", 14 | "coverage": "nyc npm run test", 15 | "install-git-hooks": "cp ./hooks/* .git/hooks/ && chmod -R a+x .git/hooks/" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "git+https://github.com/18F/analytics-reporter-api.git" 20 | }, 21 | "author": "Jonathan Hooper", 22 | "license": "CC0-1.0", 23 | "bugs": { 24 | "url": "https://github.com/18F/analytics-reporter-api/issues" 25 | }, 26 | "homepage": "https://github.com/18F/analytics-reporter-api#readme", 27 | "devDependencies": { 28 | "@eslint/js": "^8.57.0", 29 | "chai": "^4.3.10", 30 | "eslint": "^8.56.0", 31 | "eslint-config-prettier": "^9.1.0", 32 | "eslint-plugin-jsdoc": "^48.7.0", 33 | "eslint-plugin-prettier": "^5.1.3", 34 | "extend": ">= 3.0.2", 35 | "globals": "^14.0.0", 36 | "mocha": "^10.2.0", 37 | "nodemon": "^3.0.1", 38 | "nyc": "^15.1.0", 39 | "proxyquire": "^2.1.3", 40 | "sinon": "^17.0.1", 41 | "supertest": "^7.0.0" 42 | }, 43 | "dependencies": { 44 | "express": "^4.18.2", 45 | "express-routes-versioning": "^1.0.1", 46 | "express-winston": "^4.2.0", 47 | "knex": "^3.0.1", 48 | "lodash": ">= 4.17.21", 49 | "pg": "^8.11.3", 50 | "winston": "^3.11.0", 51 | "yup": "^1.4.0" 52 | }, 53 | "optionalDependencies": { 54 | "newrelic": "^11.3.0" 55 | }, 56 | "engines": { 57 | "node": "22.x.x" 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /test/api-data-gov-filter.test.js: -------------------------------------------------------------------------------- 1 | const expect = require("chai").expect; 2 | const proxyquire = require("proxyquire"); 3 | const sinon = require("sinon"); 4 | 5 | proxyquire.noCallThru(); 6 | 7 | describe("apiDataGovFilter", () => { 8 | let apiDataGovFilter; 9 | let config; 10 | let req; 11 | let res; 12 | let next; 13 | 14 | beforeEach(() => { 15 | config = { api_data_gov_secret: "123abc" }; 16 | apiDataGovFilter = proxyquire("../src/api-data-gov-filter", { 17 | "./config": config, 18 | }); 19 | req = {}; 20 | res = { 21 | status: sinon.spy(), 22 | json: sinon.spy(), 23 | }; 24 | next = sinon.spy(); 25 | }); 26 | 27 | context("with a correct api.data.gov secret", () => { 28 | beforeEach(() => { 29 | req.headers = { "api-data-gov-secret": "123abc" }; 30 | }); 31 | 32 | it("should allow requests to the root url", () => { 33 | req.path = "/"; 34 | 35 | apiDataGovFilter(req, res, next); 36 | expect(next.calledOnce).to.be.true; 37 | }); 38 | 39 | it("should allow API requests", () => { 40 | req.path = "/reports/site/data?limit=100"; 41 | 42 | apiDataGovFilter(req, res, next); 43 | expect(next.calledOnce).to.be.true; 44 | }); 45 | }); 46 | 47 | context("with an incorrect api.data.gov secret", () => { 48 | beforeEach(() => { 49 | req.headers = { "api-data-gov-secret": "456def" }; 50 | }); 51 | 52 | it("should allow requests to the root url", () => { 53 | req.path = "/"; 54 | 55 | apiDataGovFilter(req, res, next); 56 | expect(next.calledOnce).to.be.true; 57 | }); 58 | 59 | it("should disallow API requests", () => { 60 | req.path = "/reports/site/data?limit=100"; 61 | apiDataGovFilter(req, res, next); 62 | expect(next.calledOnce).to.be.false; 63 | expect(res.status.firstCall.args[0]).to.equal(403); 64 | expect(res.json.firstCall.args[0]).to.deep.equal({ 65 | message: "Unauthorized. See https://analytics.usa.gov/developer", 66 | status: 403, 67 | }); 68 | }); 69 | }); 70 | 71 | context("without an api.data.gov secret", () => { 72 | beforeEach(() => { 73 | delete config.api_data_gov_secret; 74 | req.headers = {}; 75 | }); 76 | 77 | it("should allow requests to the root url", () => { 78 | req.path = "/"; 79 | 80 | apiDataGovFilter(req, res, next); 81 | expect(next.calledOnce).to.be.true; 82 | }); 83 | 84 | it("should allow API requests", () => { 85 | req.path = "/reports/site/data?limit=100"; 86 | 87 | apiDataGovFilter(req, res, next); 88 | expect(next.calledOnce).to.be.true; 89 | }); 90 | }); 91 | }); 92 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | inputs: 4 | APP_NAME: 5 | required: true 6 | type: string 7 | CF_ORGANIZATION_NAME: 8 | required: true 9 | type: string 10 | CF_SPACE_NAME: 11 | required: true 12 | type: string 13 | DB_SERVICE_NAME: 14 | required: true 15 | type: string 16 | NEW_RELIC_APP_NAME: 17 | type: string 18 | secrets: 19 | API_DATA_GOV_SECRET: 20 | required: true 21 | CF_USERNAME: 22 | required: true 23 | CF_PASSWORD: 24 | required: true 25 | NEW_RELIC_LICENSE_KEY: 26 | 27 | env: 28 | API_DATA_GOV_SECRET: ${{ secrets.API_DATA_GOV_SECRET }} 29 | APP_NAME: ${{ inputs.APP_NAME }} 30 | DB_SERVICE_NAME: ${{ inputs.DB_SERVICE_NAME }} 31 | CF_USERNAME: ${{ secrets.CF_USERNAME }} 32 | CF_PASSWORD: ${{ secrets.CF_PASSWORD }} 33 | CF_ORGANIZATION_NAME: ${{ inputs.CF_ORGANIZATION_NAME }} 34 | CF_SPACE_NAME: ${{ inputs.CF_SPACE_NAME }} 35 | NEW_RELIC_APP_NAME: ${{ inputs.NEW_RELIC_APP_NAME }} 36 | NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY }} 37 | 38 | jobs: 39 | deploy_api: 40 | runs-on: ubuntu-latest 41 | steps: 42 | - name: Code checkout 43 | uses: actions/checkout@v4 44 | - name: Install node 45 | uses: actions/setup-node@v4 46 | with: 47 | node-version-file: ".nvmrc" 48 | cache: 'npm' 49 | - name: Install node dependencies 50 | # This causes npm install to omit dev dependencies per NPM docs. 51 | env: 52 | NODE_ENV: production 53 | run: npm ci 54 | - name: Install cloud foundry CLI for interacting with cloud.gov 55 | run: | 56 | sudo curl -v -L -o cf8-cli-installer_8.7.4_x86-64.deb 'https://packages.cloudfoundry.org/stable?release=debian64&version=8.7.4' 57 | sudo dpkg -i cf8-cli-installer_8.7.4_x86-64.deb 58 | - name: Run envsubst on manifest.yml to set environment specific values 59 | run: | 60 | mv manifest.yml manifest.yml.src 61 | envsubst < manifest.yml.src > manifest.yml 62 | cat manifest.yml 63 | - name: Replace config.js and knexfile.js with .cloudgov versions of those files 64 | run: | 65 | rm ./src/config.js 66 | mv ./src/config.cloudgov.js ./src/config.js 67 | rm knexfile.js 68 | mv knexfile.cloudgov.js knexfile.js 69 | - name: Login to cloud.gov and deploy 70 | run: | 71 | set -e 72 | # Log into cloud.gov 73 | cf api api.fr.cloud.gov 74 | cf login -u $CF_USERNAME -p $CF_PASSWORD -o $CF_ORGANIZATION_NAME -s $CF_SPACE_NAME 75 | # Deploy app 76 | cf push -f "./manifest.yml" 77 | cf logout 78 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | pull_request: 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Code checkout 10 | uses: actions/checkout@v4 11 | - name: Install node 12 | uses: actions/setup-node@v4 13 | with: 14 | node-version-file: ".nvmrc" 15 | cache: 'npm' 16 | - name: Install node dependencies 17 | run: npm ci 18 | - name: Lint javascript 19 | run: npm run lint 20 | audit_dependencies: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Code checkout 24 | uses: actions/checkout@v4 25 | - name: Install node 26 | uses: actions/setup-node@v4 27 | with: 28 | node-version-file: ".nvmrc" 29 | cache: 'npm' 30 | - name: Install node dependencies 31 | run: npm ci 32 | - name: Validate npm package signatures 33 | run: npm audit signatures 34 | test: 35 | needs: 36 | - lint 37 | - audit_dependencies 38 | runs-on: ubuntu-latest 39 | # Start Postgres as a service, wait until healthy. Uses latest Postgres version. 40 | services: 41 | postgres: 42 | image: postgres:latest 43 | env: 44 | POSTGRES_DB: analytics_reporter_test 45 | POSTGRES_USER: analytics 46 | POSTGRES_PASSWORD: 123abc 47 | ports: 48 | - 5431:5432 49 | options: 50 | --health-cmd pg_isready 51 | --health-interval 10s 52 | --health-timeout 5s 53 | --health-retries 5 54 | steps: 55 | - name: Code checkout 56 | uses: actions/checkout@v4 57 | - name: Install node 58 | uses: actions/setup-node@v4 59 | with: 60 | node-version-file: ".nvmrc" 61 | cache: 'npm' 62 | - name: Install node dependencies 63 | run: npm ci 64 | - name: Run tests 65 | run: npm test 66 | deploy_dev: 67 | needs: 68 | - lint 69 | - audit_dependencies 70 | - test 71 | if: github.ref == 'refs/heads/develop' 72 | uses: 18F/analytics-reporter-api/.github/workflows/deploy.yml@develop 73 | with: 74 | APP_NAME: ${{ vars.APP_NAME_DEV }} 75 | CF_ORGANIZATION_NAME: ${{ vars.CF_ORGANIZATION_NAME }} 76 | CF_SPACE_NAME: ${{ vars.CF_SPACE_NAME_DEV }} 77 | DB_SERVICE_NAME: ${{ vars.DB_SERVICE_NAME_DEV }} 78 | NEW_RELIC_APP_NAME: ${{ vars.NEW_RELIC_APP_NAME_DEV }} 79 | secrets: 80 | API_DATA_GOV_SECRET: ${{ secrets.API_DATA_GOV_SECRET_DEV }} 81 | CF_USERNAME: ${{ secrets.CF_USERNAME_DEV }} 82 | CF_PASSWORD: ${{ secrets.CF_PASSWORD_DEV }} 83 | NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY_DEV }} 84 | deploy_stg: 85 | needs: 86 | - lint 87 | - audit_dependencies 88 | - test 89 | if: github.ref == 'refs/heads/staging' 90 | uses: 18F/analytics-reporter-api/.github/workflows/deploy.yml@develop 91 | with: 92 | APP_NAME: ${{ vars.APP_NAME_STG }} 93 | CF_ORGANIZATION_NAME: ${{ vars.CF_ORGANIZATION_NAME }} 94 | CF_SPACE_NAME: ${{ vars.CF_SPACE_NAME_STG }} 95 | DB_SERVICE_NAME: ${{ vars.DB_SERVICE_NAME_STG }} 96 | NEW_RELIC_APP_NAME: ${{ vars.NEW_RELIC_APP_NAME_STG }} 97 | secrets: 98 | API_DATA_GOV_SECRET: ${{ secrets.API_DATA_GOV_SECRET_STG }} 99 | CF_USERNAME: ${{ secrets.CF_USERNAME_STG }} 100 | CF_PASSWORD: ${{ secrets.CF_PASSWORD_STG }} 101 | NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY_STG }} 102 | deploy_prd: 103 | needs: 104 | - lint 105 | - audit_dependencies 106 | - test 107 | if: github.ref == 'refs/heads/master' 108 | uses: 18F/analytics-reporter-api/.github/workflows/deploy.yml@develop 109 | with: 110 | APP_NAME: ${{ vars.APP_NAME_PRD }} 111 | CF_ORGANIZATION_NAME: ${{ vars.CF_ORGANIZATION_NAME }} 112 | CF_SPACE_NAME: ${{ vars.CF_SPACE_NAME_PRD }} 113 | DB_SERVICE_NAME: ${{ vars.DB_SERVICE_NAME_PRD }} 114 | NEW_RELIC_APP_NAME: ${{ vars.NEW_RELIC_APP_NAME_PRD }} 115 | secrets: 116 | API_DATA_GOV_SECRET: ${{ secrets.API_DATA_GOV_SECRET_PRD }} 117 | CF_USERNAME: ${{ secrets.CF_USERNAME_PRD }} 118 | CF_PASSWORD: ${{ secrets.CF_PASSWORD_PRD }} 119 | NEW_RELIC_LICENSE_KEY: ${{ secrets.NEW_RELIC_LICENSE_KEY_PRD }} 120 | -------------------------------------------------------------------------------- /docs/development_and_deployment_process.md: -------------------------------------------------------------------------------- 1 | # Development and deployment process 2 | 3 | The analytics.usa.gov system components follow a Continuous Delivery (CD) model 4 | for feature development and deployment. 5 | 6 | In this project's implementation of CD, the basic idea is the following: 7 | 8 | - The "develop" git branch represents the current state of the deployed 9 | development environment. 10 | - The "staging" git branch represents the current state of the deployed 11 | staging environment. 12 | - The "master" git branch represents the current state of the deployed 13 | production environment. 14 | 15 | Commits added to the branches described above automatically deploy to the 16 | corresponding application environment. 17 | 18 | The process for creating new features and promoting them through the application 19 | environments should follow the steps below: 20 | 21 | - [New feature request](#new-feature-request) 22 | - [Feature development and testing](#feature-development-and-testing) 23 | - [Feature review and acceptance](#feature-review-and-acceptance) 24 | - [Deploying the feature to production](#deploying-the-feature-to-production) 25 | 26 | ## New feature request 27 | 28 | An Issue describing or requesting a change in functionality is created in 29 | GitHub or another project tracking tool and the System Owner triages to ensure 30 | that it is a well-formed User Story. 31 | 32 | When the story is determined to be well formed and has been prioritized, then a 33 | developer is assigned to or assigns themselves to implement the feature. 34 | 35 | ## Feature development and testing 36 | 37 | A developer creates a new branch off of “develop”. This branch is typically 38 | named "feature/descriptive_name_of_the_feature". During implementation of the 39 | feature, this branch should be pushed to the remote repository and a Pull 40 | Request (PR) should be created to request merging the feature to the develop 41 | branch. The PR may be marked as a draft to ensure that it is not merged until 42 | the feature implementation is complete. 43 | 44 | The PR creation allows for another developer to review the changes in progress 45 | and make suggestions as development continues. 46 | 47 | The feature branch may also contain a commit which allows the feature to be 48 | deployed to the dev environment via CI for testing purposes. This commit 49 | should be removed from the branch before merging to develop. (This is necessary 50 | because GitHub Actions don't provide a nice way to run the deployment workflow 51 | manually from the repository web UI, so you must set the workflow to deploy the 52 | feature branch automatically instead of develop and then revert the change 53 | later.) 54 | 55 | The developer completes initial implementation, ensures the changes pass CI 56 | quality scans, tests their changes in the development environment, and requests 57 | another developer to review/approve the PR. 58 | 59 | ## Feature review and acceptance 60 | 61 | A developer conducts code review of the feature, and may make suggestions for 62 | changes on the PR. When the PR has been approved by the reviewer, then the 63 | feature is ready to submit to the System Owner for acceptance. 64 | 65 | The feature is deployed to the development environment and the System Owner 66 | performs acceptance testing in this environment. If changes are requested by the 67 | System Owner, then the feature moves back to [Feature development and testing](#feature-development-and-testing). 68 | 69 | When the System Owner accepts the feature, then the feature branch is merged to 70 | the develop branch (if a commit on the branch exists for deploying the feature 71 | branch to the dev environment, it should be removed before merging to develop). 72 | The System's Owner acceptance of the feature signifies that the 73 | staging/production deployment process can begin for the feature. 74 | 75 | ## Deploying the feature to production 76 | 77 | In preparation for deployment for production, testing must be done in the 78 | staging environment (which is meant to be a replica of the production 79 | environment). 80 | 81 | A PR should be created to merge the develop branch into the staging branch. When 82 | the feature has been deployed to the development environment and smoke tested 83 | there successfully, the PR to merge the develop branch into the staging branch 84 | can be merged. 85 | 86 | Merging to the staging branch will cause the feature to be deployed to the 87 | staging environment for final testing of the feature. When testing in staging is 88 | complete, a PR is created to merge the staging branch into the master branch. 89 | 90 | Merging to the master branch will cause the feature to be deployed to the 91 | production environment. Smoke testing should be done in production to ensure 92 | there were no issues with deployment. Afterwards, the feature is complete and 93 | live. The issue describing the feature should now be marked as completed. 94 | 95 | -------------------------------------------------------------------------------- /src/app.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const apiDataGovFilter = require("./api-data-gov-filter"); 3 | const db = require("./db"); 4 | const logger = require("./logger"); 5 | const router = express.Router(); 6 | const routesVersioning = require("express-routes-versioning")(); 7 | const yup = require("yup"); 8 | 9 | const app = express(); 10 | 11 | if (process.env.NODE_ENV != "test") { 12 | app.use(logger.middleware()); 13 | } 14 | 15 | app.use(apiDataGovFilter); 16 | app.use(router); 17 | app.use(logger.errorLoggingMiddleware()); 18 | 19 | /** 20 | * Converts date object to an ISO date string without time and zone. 21 | * @param dataPoint 22 | */ 23 | const formatDateForDataPoint = (dataPoint) => { 24 | if (dataPoint.date) { 25 | return dataPoint.date.toISOString().slice(0, 10); 26 | } 27 | return null; 28 | }; 29 | 30 | const acceptableDomainReports = [ 31 | "site", 32 | "domain", 33 | "download", 34 | "second-level-domain", 35 | ]; 36 | 37 | /** 38 | * Currently the only regex match in request validation is on date string 39 | * formatting. Hard code that the yup.string().matches() validation returns a 40 | * helpful error message when dates are not formatted correctly. 41 | */ 42 | yup.setLocale({ 43 | string: { 44 | matches: "must be a date in format 'YYYY-MM-DD'", 45 | }, 46 | }); 47 | 48 | const checkDomainFilter = (req, res) => { 49 | if ( 50 | acceptableDomainReports.includes(req.params.reportName) && 51 | req.params.domain 52 | ) { 53 | return fetchData(req, res); 54 | } 55 | const tryReportText = acceptableDomainReports.join(", "); 56 | res.status(400); 57 | return res.json({ 58 | message: `You are requesting a report that cannot be filtered on domain. Please try one of the following reports: ${tryReportText}.`, 59 | status: 400, 60 | }); 61 | }; 62 | 63 | const fetchData = (req, res) => { 64 | try { 65 | validateRequest(req); 66 | } catch (err) { 67 | res.status(400); 68 | return res.json({ 69 | message: `Invalid request params: ${err}`, 70 | status: 400, 71 | }); 72 | } 73 | const params = Object.assign(req.query, req.params); 74 | return db 75 | .query(params) 76 | .then((result) => { 77 | const response = result.map((dataPoint) => 78 | Object.assign( 79 | { 80 | notice: 81 | req.version === "1.1" 82 | ? "v1 is being deprecated. Use v2 instead. See https://analytics.usa.gov/developer" 83 | : undefined, 84 | id: dataPoint.id, 85 | date: formatDateForDataPoint(dataPoint), 86 | report_name: dataPoint.report_name, 87 | report_agency: dataPoint.report_agency, 88 | }, 89 | dataPoint.data, 90 | ), 91 | ); 92 | 93 | res.json(response); 94 | }) 95 | .catch((err) => { 96 | console.error("Unexpected Error:", err); 97 | res.status(500); 98 | return res.json({ 99 | message: "An error occurred. Please check the application logs.", 100 | status: 500, 101 | }); 102 | }); 103 | }; 104 | 105 | const validateRequest = (req) => { 106 | const isoDateRegex = /^\d{4}-([0][1-9]|1[0-2])-([0][1-9]|[1-2]\d|3[01])$/; 107 | const requestSchema = yup.object({ 108 | query: yup.object({ 109 | before: yup.string().matches(isoDateRegex), 110 | after: yup.string().matches(isoDateRegex), 111 | limit: yup.number().positive().integer().max(10000), 112 | page: yup.number().positive().integer(), 113 | }), 114 | params: yup.object({ 115 | domain: yup.string(), 116 | reportAgency: yup.string(), 117 | reportName: yup.string(), 118 | version: yup.string(), 119 | }), 120 | }); 121 | return requestSchema.validateSync(req); 122 | }; 123 | 124 | app.get("/", (req, res) => { 125 | res.json({ 126 | current_time: new Date(), 127 | }); 128 | }); 129 | 130 | // middleware 131 | router.use("/v:version/", function (req, res, next) { 132 | const version = req.params.version; 133 | req.version = version; 134 | next(); 135 | }); 136 | 137 | router.get( 138 | "/v:version/reports/:reportName/data", 139 | routesVersioning( 140 | { 141 | "1.1.0": fetchData, 142 | "~2.0.0": fetchData, 143 | }, 144 | NoMatchFoundCallback, 145 | ), 146 | ); 147 | 148 | router.get( 149 | "/v:version/agencies/:reportAgency/reports/:reportName/data", 150 | routesVersioning( 151 | { 152 | "1.1.0": fetchData, 153 | "~2.0.0": fetchData, 154 | }, 155 | NoMatchFoundCallback, 156 | ), 157 | ); 158 | 159 | router.get( 160 | "/v:version/domain/:domain/reports/:reportName/data", 161 | routesVersioning( 162 | { 163 | "1.1.0": checkDomainFilter, 164 | "~2.0.0": checkDomainFilter, 165 | }, 166 | NoMatchFoundCallback, 167 | ), 168 | ); 169 | 170 | function NoMatchFoundCallback(req, res) { 171 | res 172 | .status(404) 173 | .json( 174 | "Version not found. Visit https://analytics.usa.gov/developer for information on the latest supported version.", 175 | ); 176 | } 177 | 178 | module.exports = app; 179 | -------------------------------------------------------------------------------- /src/db.js: -------------------------------------------------------------------------------- 1 | const knex = require("knex"); 2 | 3 | const config = require("./config"); 4 | 5 | const db = knex({ client: "pg", connection: config.postgres }); 6 | 7 | const parseLimitParam = (limitParam) => { 8 | const limit = parseInt(limitParam, 10); 9 | 10 | if (limit > 10000 || limit <= 0) { 11 | return 10000; 12 | } 13 | return limit; 14 | }; 15 | 16 | const parsePageParam = (pageParam) => { 17 | const page = parseInt(pageParam, 10); 18 | return Math.max(1, page); 19 | }; 20 | 21 | /** 22 | * @param {string} before the maximum date for the query in ISO format 23 | * @param {string} after the minimum date for the query in ISO format 24 | * @returns {string} the constructed where clause for the query 25 | */ 26 | const buildTimeQuery = (before, after) => { 27 | if (before && after) { 28 | return ['"date" <= ?::date AND "date" >= ?::date', [before, after]]; 29 | } 30 | if (before) { 31 | return ['"date" <= ?::date', [before]]; 32 | } 33 | if (after) { 34 | return ['"date" >= ?::date', [after]]; 35 | } 36 | return [true]; 37 | }; 38 | 39 | /** 40 | * @param {string} domain the domain to be queried 41 | * @param {string} reportName the report to be queried 42 | * @param {number} limitParam the maximum number of results to return 43 | * @param {number} pageParam the page of data to return based on the limit 44 | * @param {string} before the maximum date for the query in ISO format 45 | * @param {string} after the minimum date for the query in ISO format 46 | * @param {string} dbTable the table name to query 47 | * @returns {Promise} resolves with the result of the database query 48 | */ 49 | const queryDomain = ( 50 | domain, 51 | reportName, 52 | limitParam, 53 | pageParam, 54 | before, 55 | after, 56 | dbTable, 57 | ) => { 58 | const timeQuery = buildTimeQuery(before, after); 59 | 60 | const mainQuery = db(dbTable).where({ report_name: reportName }); 61 | 62 | if (reportName == "download") { 63 | mainQuery.whereRaw("data->> 'page' like ?", [`%${domain}%`]); 64 | } else { 65 | mainQuery.whereRaw( 66 | `data @> '${JSON.stringify({ domain }).replaceAll("'", "''").replaceAll("?", "\\?")}'::jsonb`, 67 | ); 68 | } 69 | 70 | return ( 71 | mainQuery 72 | .whereRaw(...timeQuery) 73 | // Using `orderByRaw` in order to specifcy NULLS LAST, see: 74 | // https://github.com/knex/knex/issues/282 75 | .orderByRaw("date desc NULLS LAST") 76 | // Previously, this was ordered by data-->total_events and data-->visits. Those queries 77 | // were very slow, and from what I can tell, it's not possible to add the proper multi-field 78 | // index on (date, data-->total_events, data-->visits) to speed up the queries, because `data` 79 | // is a JSON field. See this (rather wordy, sorry) thread for more details: 80 | // https://github.com/18F/analytics-reporter-api/issues/161#issuecomment-874860764 81 | // 82 | // Ordering by `id` here does _not_ guarantee ordering based on total_events or visits. However, 83 | // the order in which data is inserted into the table (by code in the analytics-reporter repo, which 84 | // pulls from Google Analytics) happens to be in order by visits or total_events, so ordering by 85 | // IDs may in practice keep the same ordering as before - but it would be best not to rely on this. 86 | // A longer term fix would be to move the total_events and visits fields to their own columns. 87 | .orderBy("id", "asc") 88 | .limit(limitParam) 89 | .offset((pageParam - 1) * limitParam) 90 | ); 91 | }; 92 | 93 | /** 94 | * @param {object} queryConfig the config values for the database query 95 | * @param {string} queryConfig.reportName the report to be queried 96 | * @param {string} queryConfig.reportAgency the agency to be queried 97 | * @param {number} queryConfig.limit the max results to return 98 | * @param {number} queryConfig.page the page of data to return based on the limit 99 | * @param {string} queryConfig.domain the domain to be queried 100 | * @param {string} queryConfig.after the minimum date for the query in ISO format 101 | * @param {string} queryConfig.before the maximum date for the query in ISO format 102 | * @param {string} queryConfig.version the API version 103 | * @returns {Promise} resolves with the result of the database query 104 | */ 105 | const query = ({ 106 | reportName, 107 | reportAgency = null, 108 | limit = 1000, 109 | page = 1, 110 | domain = null, 111 | after = null, 112 | before = null, 113 | version, 114 | }) => { 115 | // we have different tables for new ga4 116 | // TODO: once UA has sunset we can remove this 117 | const dbTable = version === "1.1" ? "analytics_data" : "analytics_data_ga4"; 118 | const limitParam = parseLimitParam(limit); 119 | const pageParam = parsePageParam(page); 120 | if (domain) { 121 | return queryDomain( 122 | domain, 123 | reportName, 124 | limitParam, 125 | pageParam, 126 | before, 127 | after, 128 | dbTable, 129 | ); 130 | } 131 | const recordQuery = Object.assign({ 132 | report_name: reportName, 133 | report_agency: reportAgency, 134 | }); 135 | const timeQuery = buildTimeQuery(before, after); 136 | 137 | return ( 138 | db(dbTable) 139 | .where(recordQuery) 140 | .whereRaw(...timeQuery) 141 | // Using `orderByRaw` in order to specifcy NULLS LAST, see: 142 | // https://github.com/knex/knex/issues/282 143 | .orderByRaw("date desc NULLS LAST") 144 | // Previously, this was ordered by data-->total_events and data-->visits. Those queries 145 | // were very slow, and from what I can tell, it's not possible to add the proper multi-field 146 | // index on (date, data-->total_events, data-->visits) to speed up the queries, because `data` 147 | // is a JSON field. See this (rather wordy, sorry) thread for more details: 148 | // https://github.com/18F/analytics-reporter-api/issues/161#issuecomment-874860764 149 | // 150 | // Ordering by `id` here does _not_ guarantee ordering based on total_events or visits. However, 151 | // the order in which data is inserted into the table (by code in the analytics-reporter repo, which 152 | // pulls from Google Analytics) happens to be in order by visits or total_events, so ordering by 153 | // IDs may in practice keep the same ordering as before - but it would be best not to rely on this. 154 | // A longer term fix would be to move the total_events and visits fields to their own columns. 155 | .orderBy("id", "asc") 156 | .limit(limitParam) 157 | .offset((pageParam - 1) * limitParam) 158 | ); 159 | }; 160 | 161 | module.exports = { query, queryDomain, buildTimeQuery, dbClient: db }; 162 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Build Status](https://github.com/18F/analytics-reporter-api/actions/workflows/ci.yml/badge.svg?branch=master) 2 | [![Snyk](https://snyk.io/test/github/18F/analytics-reporter-api/badge.svg)](https://snyk.io/test/github/18F/analytics-reporter-api) 3 | 4 | # Analytics API 5 | 6 | A system for publishing data retrieved from the Google Analytics API by the 7 | [Analytics Reporter](https://github.com/18F/analytics-reporter). 8 | This Analytics API serves data written to a PostgreSQL database by the Analytics 9 | Reporter, in response to HTTP requests. 10 | 11 | This project's data is provided by the Analytics Reporter using the [Google Analytics Data API v1](https://developers.google.com/analytics/devguides/reporting/data/v1/rest). 12 | The analytics data is processed into a flat data structure by the reporter and 13 | stored in the database which is then served by this API. 14 | 15 | The project previously used the [Google Analytics Core Reporting API v3](https://developers.google.com/analytics/devguides/reporting/core/v3/) 16 | and the [Google Analytics Real Time API v3](https://developers.google.com/analytics/devguides/reporting/realtime/v3/), 17 | also known as Universal Analytics, which has slightly different data points. 18 | 19 | Analytics API v1 serves the Universal Analytics data and Analytics API v2 serves 20 | the new GA4 data. See [Migrating from API V1 to API V2](#migrating-from-api-v1-to-api-v2) 21 | for more details. The Universal Analytics API will be deprecated on July 1, 2024 22 | and the Analytics API v1 will no longer receive new data after that date. 23 | 24 | The process for adding features to this project is described in 25 | [Development and deployment process](docs/development_and_deployment_process.md). 26 | 27 | ## Setup 28 | 29 | This Analytics API maintains the schema for the database that the 30 | [Analytics Reporter](https://github.com/18F/analytics-reporter) 31 | writes to. Thus, the Analytics API must be setup and 32 | configured before the Analytics Reporter starts writing data. 33 | 34 | ### Prerequisites 35 | 36 | * NodeJS > v20.x 37 | * Docker (used to run Postgres) 38 | 39 | ### Clone the code and install dependencies 40 | 41 | ```bash 42 | git clone git@github.com:18F/analytics-reporter-api.git 43 | cd analytics-reporter-api 44 | npm install 45 | ``` 46 | 47 | ### Linting 48 | 49 | This repo uses Eslint and Prettier for code static analysis and formatting. Run 50 | the linter with: 51 | 52 | ```bash 53 | npm run lint 54 | ``` 55 | 56 | Automatically fix lint issues with: 57 | 58 | ```bash 59 | npm run lint:fix 60 | ``` 61 | 62 | ### Install git hooks 63 | 64 | There are some git hooks provided in the `./hooks` directory to help with 65 | common development tasks. These will checkout current NPM packages on branch 66 | change events, and run the linter on pre-commit. 67 | 68 | Install the provided hooks with the following command: 69 | 70 | ```bash 71 | npm run install-git-hooks 72 | ``` 73 | 74 | ### Running the unit tests 75 | 76 | The unit tests for this repo require a local PostgreSQL database. 77 | 78 | Start the test DB in Docker: 79 | 80 | ```bash 81 | docker compose -f docker-compose.test.yml up 82 | ``` 83 | 84 | The test DB connection in knexfile.js has some default connection config which works out-of-the-box with the docker-compose test db. 85 | 86 | Run the tests (pre-test hook runs DB migrations): 87 | 88 | ```bash 89 | npm test 90 | ``` 91 | 92 | #### Running the unit tests with code coverage reporting 93 | 94 | If you wish to see a code coverage report after running the tests, use the 95 | following command. This runs the DB migrations, tests, and the NYC code coverage 96 | tool: 97 | 98 | ```bash 99 | npm run coverage 100 | ``` 101 | 102 | ### Run the application 103 | 104 | Start up the dev database and run the migrations. 105 | 106 | ```bash 107 | # Start database in Docker 108 | docker compose up -d 109 | 110 | # Run migrations 111 | npm run migrate 112 | ``` 113 | 114 | Now the app can be started. 115 | 116 | ```bash 117 | npm start 118 | ``` 119 | 120 | The API should now be available at http://localhost:4444/. Note that the API will not render any data because the database is empty. 121 | 122 | ### Load data with analytics-reporter 123 | Data for the API is loaded into the database by the [Analytics Reporter](https://github.com/18F/analytics-reporter). For dev environments, 124 | the default database configuration for both the `analytics-reporter` repo and the `analytics-reporter-api` repo point to the same database. 125 | 126 | Follow the instructions in the `analytics-reporter` README to set up the reporter and configure an agency to collect data for. 127 | Ignore any instructions about starting up a database - you'll use the database you already have running. 128 | Once setup is done, run the reporter with the `--write-to-database` option. 129 | 130 | Now you should be able to retrieve data for the agency you selected. For instance, if you configured 131 | the reporter to load data for `general-services-administration`, then JSON data about browser demographics should be available at 132 | http://localhost:4444/v2.0.0/agencies/general-services-administration/reports/browsers/data. 133 | 134 | ## Using the API 135 | 136 | Full API docs can be found here: https://open.gsa.gov/api/dap/ 137 | 138 | ### Environments 139 | 140 | The base URLs for the 3 API environments: 141 | - development: https://api.gsa.gov/analytics/dap/develop/ 142 | - staging: https://api.gsa.gov/analytics/dap/staging/ 143 | - production: https://api.gsa.gov/analytics/dap/ 144 | 145 | ### Overview 146 | 147 | The Analytics API exposes 3 API endpoints: 148 | 149 | - `/v{api_version}/reports/{report_name}/data` 150 | - `/v{api_version}/agencies/{agency_name}/reports/{report_name}/data` 151 | - `/v{api_version}/domain/{domain}/reports/{report_name}/data` 152 | 153 | Each endpoint renders a JSON array with the most recent 1000 records that the 154 | Analytics Reporter has generated for the given agency and report. If no records 155 | are found, an empty array is returned. 156 | 157 | Records are sorted according to the associated date. 158 | 159 | #### Limit query parameter 160 | 161 | If a different number of records is desired, the `limit` query parameter can be 162 | set to specify the desired number of records. 163 | 164 | ``` 165 | /v2/reports/realtime/data?limit=500 166 | ``` 167 | 168 | The maximum number of records that can be rendered for any given request is 169 | 10,000. 170 | 171 | #### Page query parameter 172 | 173 | If the desired record does not appear for the current request, the `page` query 174 | parameter can be used to get the next series of data points. Since the data is 175 | ordered by date, this parameter effectively allows older data to be queried. 176 | 177 | ``` 178 | /v2/reports/realtime/data?page=2 179 | ``` 180 | 181 | ## Migrating from API V1 to API V2 182 | 183 | ### Background 184 | 185 | Analytics API V1 returns data from Google Analytics V3, also known as Universal 186 | Analytics (UA). 187 | 188 | Google is retiring UA and is encouraging users to move to their new 189 | version Google Analytics V4 (GA4) in 2024. 190 | 191 | Analytics API V2 returns data from GA4. 192 | 193 | ### Migration details 194 | 195 | #### Requests 196 | 197 | The Analytics API endpoints are the same between V1 and V2, the only difference 198 | for API requests is the API version string. 199 | 200 | #### Responses 201 | 202 | Response data is slightly different in Analytics API V2. This change is due to 203 | the data provided by Google Analytics. Some data fields were retired in GA4, and 204 | some other useful data fields were added. The changes follow: 205 | 206 | ##### Deprecated fields 207 | 208 | - browser_version 209 | - has_social_referral 210 | - exits 211 | - exit_page 212 | 213 | ##### New fields 214 | 215 | ###### bounce_rate 216 | 217 | The percentage of sessions that were not engaged. GA4 defines engaged as a 218 | session that lasts longer than 10 seconds or has multiple pageviews. 219 | 220 | ###### file_name 221 | 222 | The page path of a downloaded file. 223 | 224 | ###### language_code 225 | 226 | The ISO639 language setting of the user's device. e.g. 'en-us' 227 | 228 | ###### session_default_channel_group 229 | 230 | An enum which describes the session. Possible values: 231 | 232 | 'Direct', 'Organic Search', 'Paid Social', 'Organic Social', 'Email', 233 | 'Affiliates', 'Referral', 'Paid Search', 'Video', and 'Display' 234 | 235 | ## Creating a new database migration 236 | If you need to migrate the database, you can create a new migration via `knex`, which will create the migration file for you based in part on the migration name you provide. From the root of this repo, run: 237 | ``` 238 | `npm bin`/knex migrate:make 239 | ``` 240 | 241 | See [knex documentation](https://knexjs.org/#Installation-migrations) for more details. 242 | 243 | ## Running database migrations 244 | 245 | ### Locally 246 | 247 | `npm run migrate` 248 | 249 | ### In production 250 | 251 | In production, you can run database migrations via `cf run-task`. As with anything in production, be careful when doing this! First, try checking the current status of migrations using the `migrate:status` command 252 | 253 | ``` 254 | cf run-task analytics-reporter-api --command "knex migrate:status" --name check_migration_status 255 | ``` 256 | 257 | This will kick off a task - you can see the output by running: 258 | 259 | ``` 260 | cf logs analytics-reporter-api --recent 261 | # the output will look something like... 262 | 2021-07-19T14:31:39.89-0400 [APP/TASK/check_migration_status/0] OUT Using environment: production 263 | 2021-07-19T14:31:40.16-0400 [APP/TASK/check_migration_status/0] OUT Found 3 Completed Migration file/files. 264 | 2021-07-19T14:31:40.16-0400 [APP/TASK/check_migration_status/0] OUT 20170308164751_create_analytics_data.js 265 | 2021-07-19T14:31:40.16-0400 [APP/TASK/check_migration_status/0] OUT 20170316115145_add_analytics_data_indexes.js 266 | 2021-07-19T14:31:40.16-0400 [APP/TASK/check_migration_status/0] OUT 20170522094056_rename_date_time_to_date.js 267 | 2021-07-19T14:31:40.16-0400 [APP/TASK/check_migration_status/0] OUT No Pending Migration files Found. 268 | 2021-07-19T14:31:40.17-0400 [APP/TASK/check_migration_status/0] OUT Exit status 0 269 | ``` 270 | 271 | To actually run the migration, you would run: 272 | 273 | ``` 274 | cf run-task analytics-reporter-api --command "knex migrate:latest" --name run_db_migrations 275 | ``` 276 | 277 | See [knex documentation](https://knexjs.org/#Installation-migrations) for more details and options on the `migrate` command. 278 | 279 | ## Public domain 280 | 281 | This project is in the worldwide [public domain](LICENSE.md). As stated in 282 | [CONTRIBUTING](CONTRIBUTING.md): 283 | 284 | > This project is in the public domain within the United States, and copyright and related rights in the work worldwide are waived through the [CC0 1.0 Universal public domain dedication](https://creativecommons.org/publicdomain/zero/1.0/). 285 | > 286 | > All contributions to this project will be released under the CC0 dedication. By submitting a pull request, you are agreeing to comply with this waiver of copyright interest. 287 | -------------------------------------------------------------------------------- /test/db.test.js: -------------------------------------------------------------------------------- 1 | const expect = require("chai").expect; 2 | const proxyquire = require("proxyquire"); 3 | const database = require("./support/db"); 4 | 5 | const db = proxyquire("../src/db", { 6 | "./config": require("../src/config"), 7 | }); 8 | 9 | describe("db", () => { 10 | const apiVersions = ["v1.1", "v2"]; 11 | 12 | before(async () => { 13 | // Setup the test database client 14 | await database.createClient(); 15 | }); 16 | 17 | after(async () => { 18 | // Clean up the test database client and the application database client 19 | await database.destroyClient().then(() => { 20 | return db.dbClient.destroy(); 21 | }); 22 | }); 23 | 24 | apiVersions.forEach((apiVersion) => { 25 | describe(`for API version ${apiVersion}`, () => { 26 | const table = 27 | apiVersion === "v1.1" ? "analytics_data" : "analytics_data_ga4"; 28 | const queryVersion = apiVersion === `v1.1` ? "1.1" : "2"; 29 | 30 | beforeEach(async () => { 31 | await database.resetSchema(table); 32 | }); 33 | 34 | describe(".query(params)", () => { 35 | it("should return all rows for the given agency and report", async () => { 36 | await database 37 | .client(table) 38 | .insert([ 39 | { report_name: "my-report", report_agency: "my-agency" }, 40 | { report_name: "not-my-report", report_agency: "my-agency" }, 41 | { report_name: "my-report", report_agency: "not-my-agency" }, 42 | { report_name: "my-report", report_agency: null }, 43 | ]) 44 | .then(() => { 45 | return db.query({ 46 | reportName: "my-report", 47 | reportAgency: "my-agency", 48 | version: queryVersion, 49 | }); 50 | }) 51 | .then((results) => { 52 | expect(results).to.have.length(1); 53 | expect(results[0].report_name).to.equal("my-report"); 54 | expect(results[0].report_agency).to.equal("my-agency"); 55 | }); 56 | }); 57 | 58 | it("should return all rows without an agency if no agency name is given", async () => { 59 | await database 60 | .client(table) 61 | .insert([ 62 | { report_name: "my-report", report_agency: "not-my-agency" }, 63 | { report_name: "my-report", report_agency: null }, 64 | ]) 65 | .then(() => { 66 | return db.query({ 67 | reportName: "my-report", 68 | version: queryVersion, 69 | }); 70 | }) 71 | .then((results) => { 72 | expect(results).to.have.length(1); 73 | expect(results[0].report_name).to.equal("my-report"); 74 | expect(results[0].report_agency).to.be.null; 75 | }); 76 | }); 77 | 78 | it("should sort the rows according to the date column", async () => { 79 | await database 80 | .client(table) 81 | .insert([ 82 | { report_name: "report", date: "2017-01-02" }, 83 | { report_name: "report", date: "2017-01-01" }, 84 | { report_name: "report", date: "2017-01-03" }, 85 | ]) 86 | .then(() => { 87 | return db.query({ reportName: "report", version: queryVersion }); 88 | }) 89 | .then((results) => { 90 | expect(results).to.have.length(3); 91 | results.forEach((result, index) => { 92 | const resultDate = result.date.toISOString().slice(0, 10); 93 | const expectedDate = `2017-01-0${3 - index}`; 94 | expect(resultDate).to.equal(expectedDate); 95 | }); 96 | }); 97 | }); 98 | 99 | it("should limit the rows according to the limit param", async () => { 100 | const rows = Array(5) 101 | .fill(0) 102 | .map(() => { 103 | return { report_name: "report", date: "2017-01-01" }; 104 | }); 105 | await database 106 | .client(table) 107 | .insert(rows) 108 | .then(() => { 109 | return db.query({ 110 | reportName: "report", 111 | limit: 4, 112 | version: queryVersion, 113 | }); 114 | }) 115 | .then((results) => { 116 | expect(results).to.have.length(4); 117 | }); 118 | }); 119 | 120 | it("should default to a limit of 1000", async () => { 121 | const rows = Array(1001) 122 | .fill(0) 123 | .map(() => { 124 | return { report_name: "report", date: "2017-01-01" }; 125 | }); 126 | await database 127 | .client(table) 128 | .insert(rows) 129 | .then(() => { 130 | return db.query({ reportName: "report", version: queryVersion }); 131 | }) 132 | .then((results) => { 133 | expect(results).to.have.length(1000); 134 | }); 135 | }); 136 | 137 | it("should have a maximum limit of 10,000", async () => { 138 | const rows = Array(11000) 139 | .fill(0) 140 | .map(() => { 141 | return { report_name: "report", date: "2017-01-01" }; 142 | }); 143 | await database 144 | .client(table) 145 | .insert(rows) 146 | .then(() => { 147 | return db.query({ 148 | reportName: "report", 149 | limit: 11000, 150 | version: queryVersion, 151 | }); 152 | }) 153 | .then((results) => { 154 | expect(results).to.have.length(10000); 155 | }); 156 | }); 157 | 158 | it("should paginate on the page param", async () => { 159 | const rows = Array(6) 160 | .fill(0) 161 | .map((val, index) => { 162 | return { report_name: "report", date: `2017-01-0${index + 1}` }; 163 | }); 164 | await database 165 | .client(table) 166 | .insert(rows) 167 | .then(() => { 168 | return db.query({ 169 | reportName: "report", 170 | limit: 3, 171 | page: 1, 172 | version: queryVersion, 173 | }); 174 | }) 175 | .then((results) => { 176 | expect(results).to.have.length(3); 177 | expect(results[0].date.toISOString()).to.match(/^2017-01-06/); 178 | expect(results[2].date.toISOString()).to.match(/^2017-01-04/); 179 | 180 | return db.query({ 181 | reportName: "report", 182 | limit: 3, 183 | page: 2, 184 | version: queryVersion, 185 | }); 186 | }) 187 | .then((results) => { 188 | expect(results).to.have.length(3); 189 | expect(results[0].date.toISOString()).to.match(/^2017-01-03/); 190 | expect(results[2].date.toISOString()).to.match(/^2017-01-01/); 191 | }); 192 | }); 193 | }); 194 | 195 | describe(".buildTimeQuery(before, after)", () => { 196 | it("should return an array containing true if no date params are present", () => { 197 | const result = db.buildTimeQuery(null, null); 198 | expect(result).to.deep.equal([true]); 199 | }); 200 | 201 | it("should return a nested array a raw query string and an array of the dates if both a params are set", () => { 202 | const result = db.buildTimeQuery("2018-11-20", "2018-12-20"); 203 | expect(result).to.deep.equal([ 204 | '"date" <= ?::date AND "date" >= ?::date', 205 | ["2018-11-20", "2018-12-20"], 206 | ]); 207 | }); 208 | 209 | it("should return a nested array a raw query string and an array of the before if before is set", () => { 210 | const result = db.buildTimeQuery("2018-11-20", null); 211 | expect(result).to.deep.equal(['"date" <= ?::date', ["2018-11-20"]]); 212 | }); 213 | 214 | it("should return a nested array a raw query string and an array of the after if after is set", () => { 215 | const result = db.buildTimeQuery(null, "2018-11-22"); 216 | expect(result).to.deep.equal(['"date" >= ?::date', ["2018-11-22"]]); 217 | }); 218 | }); 219 | 220 | describe(".queryDomain(params)", () => { 221 | it("should only return 2 results that include site reports from the test.gov domain", async () => { 222 | await database 223 | .client(table) 224 | .insert([ 225 | { 226 | report_name: "site", 227 | date: "2017-01-02", 228 | data: { domain: "test.gov" }, 229 | }, 230 | { 231 | report_name: "site", 232 | date: "2017-01-01", 233 | data: { domain: "test.gov" }, 234 | }, 235 | { 236 | report_name: "site", 237 | date: "2017-01-03", 238 | data: { domain: "test.gov" }, 239 | }, 240 | ]) 241 | .then(() => { 242 | return db.query({ 243 | domain: "test.gov", 244 | reportName: "site", 245 | limit: 2, 246 | page: 1, 247 | version: queryVersion, 248 | }); 249 | }) 250 | .then((results) => { 251 | expect(results).to.have.length(2); 252 | }); 253 | }); 254 | 255 | it("should only return 2 results that include site reports from the test.gov domain, when multiple reports", async () => { 256 | await database 257 | .client(table) 258 | .insert([ 259 | { 260 | report_name: "report", 261 | date: "2017-01-02", 262 | data: { domain: "test.gov" }, 263 | }, 264 | { 265 | report_name: "site", 266 | date: "2017-01-01", 267 | data: { domain: "test.gov" }, 268 | }, 269 | { 270 | report_name: "site", 271 | date: "2017-01-03", 272 | data: { domain: "test.gov" }, 273 | }, 274 | ]) 275 | .then(() => { 276 | return db.query({ 277 | domain: "test.gov", 278 | reportName: "site", 279 | limit: 1000, 280 | page: 1, 281 | version: queryVersion, 282 | }); 283 | }) 284 | .then((results) => { 285 | expect(results).to.have.length(2); 286 | expect(results[0].report_name).to.equal("site"); 287 | expect(results[0].data.domain).to.equal("test.gov"); 288 | }); 289 | }); 290 | 291 | it("should only return 2 results that include site reports from the test.gov domain, when multiple domains", async () => { 292 | await database 293 | .client(table) 294 | .insert([ 295 | { 296 | report_name: "site", 297 | date: "2017-01-02", 298 | data: { domain: "test.gov" }, 299 | }, 300 | { 301 | report_name: "site", 302 | date: "2017-01-01", 303 | data: { domain: "test.gov" }, 304 | }, 305 | { 306 | report_name: "site", 307 | date: "2017-01-03", 308 | data: { domain: "usda.gov" }, 309 | }, 310 | ]) 311 | .then(() => { 312 | return db.query({ 313 | domain: "test.gov", 314 | reportName: "site", 315 | limit: 1000, 316 | page: 1, 317 | version: queryVersion, 318 | }); 319 | }) 320 | .then((results) => { 321 | expect(results).to.have.length(2); 322 | expect(results[0].report_name).to.equal("site"); 323 | expect(results[0].data.domain).to.equal("test.gov"); 324 | }); 325 | }); 326 | 327 | it("should only return 4 results that include download reports from the test.gov domain, when multiple domains", async () => { 328 | const testData = [ 329 | { 330 | report_name: "download", 331 | date: "2017-01-02", 332 | data: { page: "www.test.gov" }, 333 | }, 334 | { 335 | report_name: "download", 336 | date: "2017-01-02", 337 | data: { page: "test.gov" }, 338 | }, 339 | { 340 | report_name: "download", 341 | date: "2017-01-01", 342 | data: { page: "test.gov/example" }, 343 | }, 344 | { 345 | report_name: "download", 346 | date: "2017-01-01", 347 | data: { page: "www.test.gov/example" }, 348 | }, 349 | { 350 | report_name: "download", 351 | date: "2017-01-03", 352 | data: { page: "usda.gov" }, 353 | }, 354 | ]; 355 | await database 356 | .client(table) 357 | .insert(testData) 358 | .then(() => { 359 | return db.query({ 360 | domain: "test.gov", 361 | reportName: "download", 362 | limit: 1000, 363 | page: 1, 364 | version: queryVersion, 365 | }); 366 | }) 367 | .then((results) => { 368 | expect(results).to.have.length(4); 369 | results.forEach((resultItem, index) => { 370 | expect(resultItem.report_name).to.equal( 371 | testData[index].report_name, 372 | ); 373 | expect(resultItem.data.page).to.equal( 374 | testData[index].data.page, 375 | ); 376 | }); 377 | }); 378 | }); 379 | 380 | it("should only return 2 results that include site reports from the test.gov domain, when before date parameters are in", async () => { 381 | await database 382 | .client(table) 383 | .insert([ 384 | { 385 | report_name: "site", 386 | date: "2017-01-02", 387 | data: { domain: "test.gov" }, 388 | }, 389 | { 390 | report_name: "site", 391 | date: "2017-01-01", 392 | data: { domain: "test.gov" }, 393 | }, 394 | { 395 | report_name: "site", 396 | date: "2018-01-03", 397 | data: { domain: "test.gov" }, 398 | }, 399 | { 400 | report_name: "site", 401 | date: "2018-01-03", 402 | data: { domain: "usda.gov" }, 403 | }, 404 | ]) 405 | .then(() => { 406 | return db.query({ 407 | domain: "test.gov", 408 | reportName: "site", 409 | limit: 1000, 410 | page: 1, 411 | before: "2017-10-20", 412 | version: queryVersion, 413 | }); 414 | }) 415 | .then((results) => { 416 | expect(results).to.have.length(2); 417 | expect(results[0].report_name).to.equal("site"); 418 | expect(results[0].data.domain).to.equal("test.gov"); 419 | expect(results[0].date.toISOString()).to.match(/^2017-01-02/); 420 | }); 421 | }); 422 | 423 | it("should only return 1 result that include site reports from the test.gov domain, when after date parameters are in", async () => { 424 | await database 425 | .client(table) 426 | .insert([ 427 | { 428 | report_name: "site", 429 | date: "2017-01-02", 430 | data: { domain: "test.gov" }, 431 | }, 432 | { 433 | report_name: "site", 434 | date: "2017-01-01", 435 | data: { domain: "test.gov" }, 436 | }, 437 | { 438 | report_name: "site", 439 | date: "2018-01-03", 440 | data: { domain: "test.gov" }, 441 | }, 442 | { 443 | report_name: "site", 444 | date: "2018-01-03", 445 | data: { domain: "usda.gov" }, 446 | }, 447 | ]) 448 | .then(() => { 449 | return db.query({ 450 | domain: "test.gov", 451 | reportName: "site", 452 | limit: 1000, 453 | page: 1, 454 | after: "2017-10-20", 455 | version: queryVersion, 456 | }); 457 | }) 458 | .then((results) => { 459 | expect(results).to.have.length(1); 460 | expect(results[0].report_name).to.equal("site"); 461 | expect(results[0].data.domain).to.equal("test.gov"); 462 | expect(results[0].date.toISOString()).to.match(/^2018-01-03/); 463 | }); 464 | }); 465 | 466 | it("should only return 2 result that include site reports from the test.gov domain, when after/before date parameters set", async () => { 467 | await database 468 | .client(table) 469 | .insert([ 470 | { 471 | report_name: "site", 472 | date: "2017-01-02", 473 | data: { domain: "test.gov" }, 474 | }, 475 | { 476 | report_name: "site", 477 | date: "2017-01-01", 478 | data: { domain: "test.gov" }, 479 | }, 480 | { 481 | report_name: "site", 482 | date: "2018-01-03", 483 | data: { domain: "test.gov" }, 484 | }, 485 | { 486 | report_name: "site", 487 | date: "2017-11-04", 488 | data: { domain: "test.gov" }, 489 | }, 490 | { 491 | report_name: "site", 492 | date: "2017-11-03", 493 | data: { domain: "test.gov" }, 494 | }, 495 | { 496 | report_name: "site", 497 | date: "2018-01-03", 498 | data: { domain: "usda.gov" }, 499 | }, 500 | ]) 501 | .then(() => { 502 | return db.query({ 503 | domain: "test.gov", 504 | reportName: "site", 505 | limit: 1000, 506 | page: 1, 507 | before: "2018-01-02", 508 | after: "2017-10-20", 509 | version: queryVersion, 510 | }); 511 | }) 512 | .then((results) => { 513 | expect(results).to.have.length(2); 514 | expect(results[0].report_name).to.equal("site"); 515 | expect(results[0].data.domain).to.equal("test.gov"); 516 | expect(results[0].date.toISOString()).to.match(/^2017-11-04/); 517 | }); 518 | }); 519 | 520 | it("should only return 2 result that include site reports from the test.gov domain, when after/before date parameters set", async () => { 521 | await database 522 | .client(table) 523 | .insert([ 524 | { 525 | report_name: "site", 526 | date: "2017-01-02", 527 | data: { domain: "test.gov" }, 528 | }, 529 | { 530 | report_name: "site", 531 | date: "2017-01-01", 532 | data: { domain: "test.gov" }, 533 | }, 534 | { 535 | report_name: "site", 536 | date: "2018-01-03", 537 | data: { domain: "test.gov" }, 538 | }, 539 | { 540 | report_name: "report", 541 | date: "2018-01-03", 542 | data: { domain: "test.gov" }, 543 | }, 544 | { 545 | report_name: "site", 546 | date: "2017-11-03", 547 | data: { domain: "test.gov" }, 548 | }, 549 | { 550 | report_name: "site", 551 | date: "2018-01-03", 552 | data: { domain: "usda.gov" }, 553 | }, 554 | ]) 555 | .then(() => { 556 | return db.query({ 557 | domain: "test.gov", 558 | reportName: "site", 559 | limit: 1000, 560 | page: 1, 561 | before: "2018-01-04", 562 | after: "2017-10-20", 563 | version: queryVersion, 564 | }); 565 | }) 566 | .then((results) => { 567 | expect(results).to.have.length(2); 568 | expect(results[0].report_name).to.equal("site"); 569 | expect(results[0].data.domain).to.equal("test.gov"); 570 | expect(results[0].date.toISOString()).to.match(/^2018-01-03/); 571 | }); 572 | }); 573 | }); 574 | }); 575 | }); 576 | }); 577 | -------------------------------------------------------------------------------- /test/app.test.js: -------------------------------------------------------------------------------- 1 | const logger = require("../src/logger"); 2 | 3 | logger.level = "error"; 4 | 5 | const expect = require("chai").expect; 6 | const proxyquire = require("proxyquire"); 7 | const request = require("supertest"); 8 | 9 | const db = {}; 10 | const noticeValue = 11 | "v1 is being deprecated. Use v2 instead. See https://analytics.usa.gov/developer"; 12 | 13 | const app = proxyquire("../src/app", { 14 | "./db": db, 15 | }); 16 | 17 | const handleIfApiVersionNotice = (apiVersion, arr) => { 18 | if (apiVersion === "v1.1") { 19 | return arr.map((object) => { 20 | return { ...object, notice: noticeValue }; 21 | }); 22 | } 23 | return arr; 24 | }; 25 | 26 | const apiVersions = ["v1.1", "v2"]; 27 | 28 | const invalidDates = [ 29 | "2020-00-00", 30 | "2024-14-01", 31 | "2025-01-33", 32 | "2020/01/02", 33 | "20202-01-01", 34 | "2020-010-01", 35 | "2020-01-010", 36 | "343542", 37 | "junk", 38 | ]; 39 | 40 | const invalidPositiveIntegers = [-1, 0, 33.33, "foobar", "foo4bar", "4foobar"]; 41 | 42 | const invalidPageNumbers = [...invalidPositiveIntegers, 10001]; 43 | 44 | describe("app", () => { 45 | let url; 46 | 47 | beforeEach(() => { 48 | url = ""; 49 | }); 50 | 51 | apiVersions.forEach((apiVersion) => { 52 | describe(`with api version: ${apiVersion}`, () => { 53 | beforeEach(() => { 54 | db.query = () => Promise.resolve(); 55 | }); 56 | 57 | describe("and with route: /reports/:reportName/data", () => { 58 | beforeEach(() => { 59 | url = `/${apiVersion}/reports/fake-report/data`; 60 | }); 61 | 62 | describe("when params are valid", () => { 63 | it("should not pass the agency param if the request does not specify an agency", async () => { 64 | db.query = (params) => { 65 | expect(params.reportAgency).to.be.undefined; 66 | expect(params.reportName).to.equal("fake-report"); 67 | const arr = handleIfApiVersionNotice(apiVersion, [ 68 | { id: 1, date: new Date("2017-01-01") }, 69 | { id: 2, date: new Date("2017-01-02") }, 70 | ]); 71 | return Promise.resolve(arr); 72 | }; 73 | 74 | const dataRequest = request(app) 75 | .get(`/${apiVersion}/reports/fake-report/data`) 76 | .expect(200); 77 | 78 | await dataRequest.then((actualResponse) => { 79 | const expectedResponseBody = handleIfApiVersionNotice( 80 | apiVersion, 81 | [ 82 | { id: 1, date: "2017-01-01" }, 83 | { id: 2, date: "2017-01-02" }, 84 | ], 85 | ); 86 | expect(actualResponse.body).to.deep.equal(expectedResponseBody); 87 | }); 88 | }); 89 | }); 90 | 91 | describe("when params are invalid", () => { 92 | describe("and the before param is not a valid date", () => { 93 | invalidDates.forEach((invalidDate) => { 94 | describe(`and date is ${invalidDate}`, () => { 95 | it("should respond with a 400", async () => { 96 | const apiRequest = request(app) 97 | .get(`${url}?before=${invalidDate}`) 98 | .expect(400); 99 | 100 | await apiRequest.then((actualResponse) => { 101 | const expectedResponseBody = { 102 | message: 103 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 104 | status: 400, 105 | }; 106 | expect(actualResponse.body).to.deep.equal( 107 | expectedResponseBody, 108 | ); 109 | }); 110 | }); 111 | }); 112 | }); 113 | }); 114 | 115 | describe("and the after param is not a valid date", () => { 116 | invalidDates.forEach((invalidDate) => { 117 | describe(`and date is ${invalidDate}`, () => { 118 | it("should respond with a 400", async () => { 119 | const apiRequest = request(app) 120 | .get(`${url}?after=${invalidDate}`) 121 | .expect(400); 122 | 123 | await apiRequest.then((actualResponse) => { 124 | const expectedResponseBody = { 125 | message: 126 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 127 | status: 400, 128 | }; 129 | expect(actualResponse.body).to.deep.equal( 130 | expectedResponseBody, 131 | ); 132 | }); 133 | }); 134 | }); 135 | }); 136 | }); 137 | 138 | describe("and the page param is not a valid positive integer", () => { 139 | invalidPositiveIntegers.forEach((invalidPositiveInteger) => { 140 | describe(`and page is ${invalidPositiveInteger}`, () => { 141 | it("should respond with a 400", async () => { 142 | const apiRequest = request(app) 143 | .get(`${url}?page=${invalidPositiveInteger}`) 144 | .expect(400); 145 | 146 | await apiRequest.then((actualResponse) => { 147 | const expectedResponseBody = { 148 | status: 400, 149 | }; 150 | expect(actualResponse.body).to.deep.include( 151 | expectedResponseBody, 152 | ); 153 | }); 154 | }); 155 | }); 156 | }); 157 | }); 158 | 159 | describe("and the limit param is not a valid positive integer with max 10000", () => { 160 | invalidPageNumbers.forEach((invalidPageNumber) => { 161 | describe(`and page is ${invalidPageNumber}`, () => { 162 | it("should respond with a 400", async () => { 163 | const apiRequest = request(app) 164 | .get(`${url}?limit=${invalidPageNumber}`) 165 | .expect(400); 166 | 167 | await apiRequest.then((actualResponse) => { 168 | const expectedResponseBody = { 169 | status: 400, 170 | }; 171 | expect(actualResponse.body).to.deep.include( 172 | expectedResponseBody, 173 | ); 174 | }); 175 | }); 176 | }); 177 | }); 178 | }); 179 | }); 180 | }); 181 | 182 | describe("and with route: /agencies/:agency/reports/:reportName/data", () => { 183 | beforeEach(() => { 184 | url = `/${apiVersion}/agencies/fake-agency/reports/fake-report/data`; 185 | }); 186 | 187 | describe("and params are valid", () => { 188 | it("should pass params from the url to db.query and render the result", async () => { 189 | db.query = (params) => { 190 | expect(params.reportAgency).to.equal("fake-agency"); 191 | expect(params.reportName).to.equal("fake-report"); 192 | const arr = handleIfApiVersionNotice(apiVersion, [ 193 | { id: 1, date: new Date("2017-01-01") }, 194 | { id: 2, date: new Date("2017-01-02") }, 195 | ]); 196 | return Promise.resolve(arr); 197 | }; 198 | 199 | const dataRequest = request(app).get(url).expect(200); 200 | 201 | await dataRequest.then((actualResponse) => { 202 | const expectedResponseBody = handleIfApiVersionNotice( 203 | apiVersion, 204 | [ 205 | { id: 1, date: "2017-01-01" }, 206 | { id: 2, date: "2017-01-02" }, 207 | ], 208 | ); 209 | expect(actualResponse.body).to.deep.equal(expectedResponseBody); 210 | }); 211 | }); 212 | 213 | it("should merge the params in the url with query params", async () => { 214 | db.query = (params) => { 215 | expect(params.reportAgency).to.equal("fake-agency"); 216 | expect(params.reportName).to.equal("fake-report"); 217 | expect(params.limit).to.equal("50"); 218 | const arr = handleIfApiVersionNotice(apiVersion, [ 219 | { id: 1, date: new Date("2017-01-01") }, 220 | { id: 2, date: new Date("2017-01-02") }, 221 | ]); 222 | return Promise.resolve(arr); 223 | }; 224 | 225 | const dataRequest = request(app).get(`${url}?limit=50`).expect(200); 226 | 227 | await dataRequest.then((actualResponse) => { 228 | const expectedResponseBody = handleIfApiVersionNotice( 229 | apiVersion, 230 | [ 231 | { id: 1, date: "2017-01-01" }, 232 | { id: 2, date: "2017-01-02" }, 233 | ], 234 | ); 235 | expect(actualResponse.body).to.deep.equal(expectedResponseBody); 236 | }); 237 | }); 238 | 239 | it("should respond with a 500 if db.query rejects", async () => { 240 | db.query = () => 241 | Promise.reject( 242 | "This is a test of the emergency broadcast system.", 243 | ); 244 | 245 | const dataRequest = request(app) 246 | .get( 247 | `/${apiVersion}/agencies/fake-agency/reports/fake-report/data`, 248 | ) 249 | .expect(500); 250 | 251 | await dataRequest.then((actualResponse) => { 252 | const expectedResponseBody = { 253 | message: 254 | "An error occurred. Please check the application logs.", 255 | status: 500, 256 | }; 257 | expect(actualResponse.body).to.deep.equal(expectedResponseBody); 258 | }); 259 | }); 260 | }); 261 | 262 | describe("and params are invalid", () => { 263 | describe("and the before param is not a valid date", () => { 264 | invalidDates.forEach((invalidDate) => { 265 | describe(`and date is ${invalidDate}`, () => { 266 | it("should respond with a 400", async () => { 267 | const apiRequest = request(app) 268 | .get(`${url}?before=${invalidDate}`) 269 | .expect(400); 270 | 271 | await apiRequest.then((actualResponse) => { 272 | const expectedResponseBody = { 273 | message: 274 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 275 | status: 400, 276 | }; 277 | expect(actualResponse.body).to.deep.equal( 278 | expectedResponseBody, 279 | ); 280 | }); 281 | }); 282 | }); 283 | }); 284 | }); 285 | 286 | describe("and the after param is not a valid date", () => { 287 | invalidDates.forEach((invalidDate) => { 288 | describe(`and date is ${invalidDate}`, () => { 289 | it("should respond with a 400", async () => { 290 | const apiRequest = request(app) 291 | .get(`${url}?after=${invalidDate}`) 292 | .expect(400); 293 | 294 | await apiRequest.then((actualResponse) => { 295 | const expectedResponseBody = { 296 | message: 297 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 298 | status: 400, 299 | }; 300 | expect(actualResponse.body).to.deep.equal( 301 | expectedResponseBody, 302 | ); 303 | }); 304 | }); 305 | }); 306 | }); 307 | }); 308 | 309 | describe("and the page param is not a valid positive integer", () => { 310 | invalidPositiveIntegers.forEach((invalidPositiveInteger) => { 311 | describe(`and page is ${invalidPositiveInteger}`, () => { 312 | it("should respond with a 400", async () => { 313 | const apiRequest = request(app) 314 | .get(`${url}?page=${invalidPositiveInteger}`) 315 | .expect(400); 316 | 317 | await apiRequest.then((actualResponse) => { 318 | const expectedResponseBody = { 319 | status: 400, 320 | }; 321 | expect(actualResponse.body).to.deep.include( 322 | expectedResponseBody, 323 | ); 324 | }); 325 | }); 326 | }); 327 | }); 328 | }); 329 | 330 | describe("and the limit param is not a valid positive integer with max 10000", () => { 331 | invalidPageNumbers.forEach((invalidPageNumber) => { 332 | describe(`and page is ${invalidPageNumber}`, () => { 333 | it("should respond with a 400", async () => { 334 | const apiRequest = request(app) 335 | .get(`${url}?limit=${invalidPageNumber}`) 336 | .expect(400); 337 | 338 | await apiRequest.then((actualResponse) => { 339 | const expectedResponseBody = { 340 | status: 400, 341 | }; 342 | expect(actualResponse.body).to.deep.include( 343 | expectedResponseBody, 344 | ); 345 | }); 346 | }); 347 | }); 348 | }); 349 | }); 350 | }); 351 | }); 352 | 353 | describe("and with route: /domain/:domain/reports/:reportName/data", () => { 354 | const allowedDomainReports = [ 355 | "site", 356 | "domain", 357 | "download", 358 | "second-level-domain", 359 | ]; 360 | 361 | beforeEach(() => { 362 | url = `/${apiVersion}/domain/example.gov/reports/site/data`; 363 | }); 364 | 365 | describe("and params are valid", () => { 366 | allowedDomainReports.forEach((reportName) => { 367 | describe(`and the report name is ${reportName}`, () => { 368 | beforeEach(() => { 369 | url = `/${apiVersion}/domain/example.gov/reports/${reportName}/data`; 370 | 371 | db.query = (params) => { 372 | expect(params.domain).to.equal("example.gov"); 373 | expect(params.reportName).to.equal(reportName); 374 | const arr = handleIfApiVersionNotice(apiVersion, [ 375 | { 376 | id: 1, 377 | date: new Date("2017-01-01"), 378 | report_name: reportName, 379 | data: { domain: "example.gov" }, 380 | }, 381 | ]); 382 | return Promise.resolve(arr); 383 | }; 384 | }); 385 | 386 | it(`should pass params from the url to db.query and render the result`, async () => { 387 | const dataRequest = request(app).get(url).expect(200); 388 | 389 | await dataRequest.then((actualResponse) => { 390 | const expectedResponseBody = handleIfApiVersionNotice( 391 | apiVersion, 392 | [ 393 | { 394 | id: 1, 395 | date: "2017-01-01", 396 | report_name: reportName, 397 | domain: "example.gov", 398 | }, 399 | ], 400 | ); 401 | expect(actualResponse.body).to.deep.equal( 402 | expectedResponseBody, 403 | ); 404 | }); 405 | }); 406 | }); 407 | }); 408 | }); 409 | 410 | describe("and params are invalid", () => { 411 | it("should respond with a 400 if the domain report is not one of the acceptable kinds of reports", async () => { 412 | db.query = (params) => { 413 | expect(params.domain).to.equal("fakeiscool.gov"); 414 | expect(params.reportName).to.equal("browser"); 415 | return Promise.resolve([ 416 | { 417 | id: 1, 418 | date: new Date("2017-01-01"), 419 | data: { domain: "fakeiscool.gov" }, 420 | }, 421 | { 422 | id: 2, 423 | date: new Date("2017-01-02"), 424 | data: { domain: "bobtown.gov" }, 425 | }, 426 | ]); 427 | }; 428 | 429 | const dataRequest = request(app) 430 | .get(`/${apiVersion}/domain/fakeiscool.gov/reports/browser/data`) 431 | .expect(400); 432 | 433 | await dataRequest.then((actualResponse) => { 434 | const expectedResponseBody = { 435 | message: 436 | "You are requesting a report that cannot be filtered on domain. Please try one of the following reports: site, domain, download, second-level-domain.", 437 | status: 400, 438 | }; 439 | expect(actualResponse.body).to.deep.equal(expectedResponseBody); 440 | }); 441 | }); 442 | 443 | describe("and the before param is not a valid date", () => { 444 | invalidDates.forEach((invalidDate) => { 445 | describe(`and date is ${invalidDate}`, () => { 446 | it("should respond with a 400", async () => { 447 | const apiRequest = request(app) 448 | .get(`${url}?before=${invalidDate}`) 449 | .expect(400); 450 | 451 | await apiRequest.then((actualResponse) => { 452 | const expectedResponseBody = { 453 | message: 454 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 455 | status: 400, 456 | }; 457 | expect(actualResponse.body).to.deep.equal( 458 | expectedResponseBody, 459 | ); 460 | }); 461 | }); 462 | }); 463 | }); 464 | }); 465 | 466 | describe("and the after param is not a valid date", () => { 467 | invalidDates.forEach((invalidDate) => { 468 | describe(`and date is ${invalidDate}`, () => { 469 | it("should respond with a 400", async () => { 470 | const apiRequest = request(app) 471 | .get(`${url}?after=${invalidDate}`) 472 | .expect(400); 473 | 474 | await apiRequest.then((actualResponse) => { 475 | const expectedResponseBody = { 476 | message: 477 | "Invalid request params: ValidationError: must be a date in format 'YYYY-MM-DD'", 478 | status: 400, 479 | }; 480 | expect(actualResponse.body).to.deep.equal( 481 | expectedResponseBody, 482 | ); 483 | }); 484 | }); 485 | }); 486 | }); 487 | }); 488 | 489 | describe("and the page param is not a valid positive integer", () => { 490 | invalidPositiveIntegers.forEach((invalidPositiveInteger) => { 491 | describe(`and page is ${invalidPositiveInteger}`, () => { 492 | it("should respond with a 400", async () => { 493 | const apiRequest = request(app) 494 | .get(`${url}?page=${invalidPositiveInteger}`) 495 | .expect(400); 496 | 497 | await apiRequest.then((actualResponse) => { 498 | const expectedResponseBody = { 499 | status: 400, 500 | }; 501 | expect(actualResponse.body).to.deep.include( 502 | expectedResponseBody, 503 | ); 504 | }); 505 | }); 506 | }); 507 | }); 508 | }); 509 | 510 | describe("and the limit param is not a valid positive integer with max 10000", () => { 511 | invalidPageNumbers.forEach((invalidPageNumber) => { 512 | describe(`and page is ${invalidPageNumber}`, () => { 513 | it("should respond with a 400", async () => { 514 | const apiRequest = request(app) 515 | .get(`${url}?limit=${invalidPageNumber}`) 516 | .expect(400); 517 | 518 | await apiRequest.then((actualResponse) => { 519 | const expectedResponseBody = { 520 | status: 400, 521 | }; 522 | expect(actualResponse.body).to.deep.include( 523 | expectedResponseBody, 524 | ); 525 | }); 526 | }); 527 | }); 528 | }); 529 | }); 530 | }); 531 | }); 532 | 533 | describe(`with unsupported version`, () => { 534 | beforeEach(() => { 535 | db.query = () => Promise.resolve(); 536 | }); 537 | 538 | it("should not accept unsupported versions", async () => { 539 | const unsupportedVersion = "v2.x"; 540 | 541 | db.query = (params) => { 542 | expect(params.reportAgency).to.equal("fake-agency"); 543 | expect(params.reportName).to.equal("fake-report"); 544 | const arr = handleIfApiVersionNotice(unsupportedVersion, [ 545 | { id: 1, date: new Date("2017-01-01") }, 546 | { id: 2, date: new Date("2017-01-02") }, 547 | ]); 548 | return Promise.resolve(arr); 549 | }; 550 | 551 | const expectedErrorMessage = 552 | "Version not found. Visit https://analytics.usa.gov/developer for information on the latest supported version."; 553 | 554 | const dataRequest = request(app) 555 | .get( 556 | `/${unsupportedVersion}/agencies/fake-agency/reports/fake-report/data`, 557 | ) 558 | .expect(404); 559 | 560 | await dataRequest.then((actualResponse) => { 561 | const expectedResponse = { 562 | _body: expectedErrorMessage, 563 | status: 404, 564 | }; 565 | expect(actualResponse).to.include(expectedResponse); 566 | }); 567 | }); 568 | }); 569 | }); 570 | }); 571 | }); 572 | --------------------------------------------------------------------------------