├── .circleci └── config.yml ├── .dockerignore ├── .github ├── ISSUE_TEMPLATE.md └── PULL_REQUEST_TEMPLATE.md ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE.md ├── Makefile ├── README.md ├── backend ├── .coveragerc ├── .flake8 ├── .pep8 ├── CONTRIBUTING.md ├── Dockerfile ├── README.md ├── __init__.py ├── app │ ├── __init__.py │ ├── exceptions │ │ └── format.py │ ├── main.py │ ├── mocks │ │ └── responses.py │ └── requests │ │ ├── __init__.py │ │ ├── adequacy.py │ │ ├── caching.py │ │ ├── census.py │ │ ├── providers.py │ │ ├── representative_points.py │ │ └── service_areas.py ├── config │ ├── __init__.py │ ├── config.py │ ├── entrypoint-local.sh │ ├── entrypoint.sh │ ├── nginx.conf │ └── uwsgi.ini ├── lib │ ├── __init__.py │ ├── calculate │ │ ├── __init__.py │ │ ├── adequacy.py │ │ └── gravity.py │ ├── database │ │ ├── __init__.py │ │ ├── postgres │ │ │ ├── base.py │ │ │ ├── connect.py │ │ │ ├── maintenance.py │ │ │ ├── methods.py │ │ │ ├── postgis.py │ │ │ └── table_handling.py │ │ └── tables │ │ │ ├── address.py │ │ │ ├── provider.py │ │ │ ├── representative_point.py │ │ │ └── service_area.py │ ├── fetch │ │ ├── __init__.py │ │ ├── census.py │ │ ├── providers.py │ │ └── representative_points.py │ ├── geocoder.py │ ├── timer.py │ └── utils │ │ ├── census.py │ │ ├── config_utils.py │ │ ├── datatypes.py │ │ └── iterators.py ├── models │ ├── __init__.py │ ├── base.py │ ├── distance.py │ ├── measurers.py │ └── time.py ├── requirements.txt ├── runners │ ├── check_sample_points_exist.py │ ├── export_representative_points.py │ ├── fetch_tiger_data.py │ ├── generate_representative_points.py │ ├── initialize_postgres.py │ ├── install_postgis.py │ ├── load_addresses.py │ ├── load_age_data.py │ ├── load_census_tract_demographics.py │ ├── load_representative_points.py │ └── normalize_population_totals.py └── tests │ ├── .cache │ └── v │ │ └── cache │ │ └── lastfailed │ ├── config │ └── test_config.py │ ├── lib │ ├── calculate │ │ ├── test_calculate_adequacy.py │ │ └── test_calculate_gravity.py │ ├── database │ │ ├── postgres │ │ │ ├── test_connect.py │ │ │ └── test_inserts.py │ │ └── test_row_to_dict.py │ ├── fetch │ │ ├── test_census_mapping_conversion.py │ │ └── test_fetch_queries.py │ ├── test_config_utils.py │ └── test_utils.py │ ├── models │ ├── test_distance.py │ ├── test_mapbox.py │ ├── test_open_route_service.py │ ├── test_osrm.py │ └── test_time.py │ ├── requests │ ├── test_adequacy.py │ ├── test_census.py │ ├── test_providers.py │ ├── test_representative_points.py │ └── test_service_areas.py │ └── runners │ └── test_load_representative_points.py ├── data ├── California │ ├── all_ca_counties.csv │ └── all_pcps_addresses.csv ├── healthcare_gov │ ├── Machine_Readable_URL_PUF.csv │ └── Plan_Attributes_PUF.csv ├── images │ └── encompass_texas.png ├── sample │ ├── los-angeles-points.geojson │ └── mock-providers.csv └── urban_rural_codes │ └── NCHSURCodes2013.txt ├── docker-compose.local.yml ├── docker-compose.override.db.yml ├── docker-compose.remote.yml ├── docker-compose.yml ├── explorer ├── Dockerfile ├── README.md ├── __init__.py ├── explorer_requirements.txt ├── lib │ ├── __init__.py │ └── etl_helper.py ├── notebooks │ ├── ArcgisConnect.ipynb │ ├── data.pkl │ ├── geocode_providers.ipynb │ ├── gravity.ipynb │ ├── plan_analysis.ipynb │ ├── sample_datasets_exploration.ipynb │ ├── simple_osm_isochrone_exploration.ipynb │ └── simple_osm_isochrone_with_runner.ipynb └── scripts │ ├── README.md │ ├── healthcare_gov_extract.py │ ├── healthcare_gov_url_to_csv.py │ └── merge_issuer_csvs.py ├── frontend ├── .firebaserc ├── .vscode │ └── settings.json ├── ARCHITECTURE.md ├── CONTRIBUTING.md ├── Dockerfile ├── README.md ├── firebase.json ├── package-lock.json ├── package.json ├── remote │ ├── entrypoint.sh │ └── nginx │ │ └── nginx.conf ├── scripts │ ├── buildCache.ts │ └── codegen.ts ├── src │ ├── components │ │ ├── AboutDialog │ │ │ ├── AboutDialog.css │ │ │ └── AboutDialog.tsx │ │ ├── AddDatasetDrawer │ │ │ ├── AddDatasetDrawer.css │ │ │ └── AddDatasetDrawer.tsx │ │ ├── AdequacyDoughnut │ │ │ └── AdequacyDoughnut.tsx │ │ ├── AlertDialog │ │ │ ├── AlertDialog.css │ │ │ └── AlertDialog.tsx │ │ ├── AnalyticsDrawer │ │ │ ├── AnalyticsDrawer.css │ │ │ ├── AnalyticsDrawer.tsx │ │ │ ├── CensusAnalytics.css │ │ │ └── CensusAnalytics.tsx │ │ ├── App │ │ │ ├── App.css │ │ │ └── App.tsx │ │ ├── CSVUploader │ │ │ ├── CSVUploader.css │ │ │ └── CSVUploader.tsx │ │ ├── CensusAdequacyCharts │ │ │ └── CensusAdequacyCharts.tsx │ │ ├── CensusAdequacyTable │ │ │ └── CensusAdequacyTable.tsx │ │ ├── CensusDataChart │ │ │ └── CensusDataChart.tsx │ │ ├── ClearInputsButton │ │ │ └── ClearInputsButton.tsx │ │ ├── CountySelector │ │ │ └── CountySelector.tsx │ │ ├── DatasetsDrawer │ │ │ ├── DatasetsDrawer.css │ │ │ └── DatasetsDrawer.tsx │ │ ├── DownloadAnalysisLink │ │ │ ├── BuildCSV.ts │ │ │ ├── DownloadAnalysisLink.css │ │ │ └── DownloadAnalysisLink.tsx │ │ ├── ErrorBar │ │ │ ├── ErrorBar.css │ │ │ └── ErrorBar.tsx │ │ ├── FilterBar │ │ │ ├── FilterBar.css │ │ │ └── FilterBar.tsx │ │ ├── Header │ │ │ ├── Header.css │ │ │ └── Header.tsx │ │ ├── LeftPane │ │ │ ├── LeftPane.css │ │ │ └── LeftPane.tsx │ │ ├── Link │ │ │ ├── Link.css │ │ │ └── Link.tsx │ │ ├── MapLegend │ │ │ ├── MapLegend.css │ │ │ └── MapLegend.tsx │ │ ├── MapTooltip │ │ │ ├── MapTooltip.css │ │ │ ├── MapTooltip.tsx │ │ │ └── TableRow.tsx │ │ ├── MapView │ │ │ ├── MapView.css │ │ │ └── MapView.tsx │ │ ├── MethodologyDialog │ │ │ ├── MethodologyDialog.css │ │ │ └── MethodologyDialog.tsx │ │ ├── Selectors │ │ │ ├── CensusCategorySelector.tsx │ │ │ ├── CountyTypeSelector.tsx │ │ │ ├── FilterMethodSelector.tsx │ │ │ ├── FormatSelector.tsx │ │ │ ├── SelectorBlock.css │ │ │ ├── SelectorBlock.tsx │ │ │ ├── ServiceAreaSelector.tsx │ │ │ └── StateSelector.tsx │ │ ├── StateCountySelector │ │ │ └── StateCountySelector.tsx │ │ ├── StatsBox │ │ │ ├── StatsBox.css │ │ │ └── StatsBox.tsx │ │ ├── TilePicker │ │ │ ├── TilePicker.css │ │ │ └── TilePicker.tsx │ │ └── Uploader │ │ │ ├── ProvidersUploader.tsx │ │ │ ├── ServiceAreasUploader.tsx │ │ │ └── Uploader.css │ ├── config │ │ └── config.ts │ ├── constants │ │ ├── api │ │ │ ├── adequacies-request.ts │ │ │ ├── adequacies-response.ts │ │ │ ├── available-service-areas-response.ts │ │ │ ├── census-data-response.ts │ │ │ ├── geocode-request.ts │ │ │ ├── geocode-response.ts │ │ │ ├── representative-points-request.ts │ │ │ └── representative-points-response.ts │ │ ├── census.ts │ │ ├── colors.ts │ │ ├── datasets.ts │ │ ├── datasets │ │ │ ├── FL_HCSD_and_Look-Alike_FL.json │ │ │ ├── FL_fl_endocrinologists.json │ │ │ ├── MS_HCSD_and_Look-Alike_MS.json │ │ │ ├── TX_HCSD_and_Look-Alike_TX.json │ │ │ ├── TX_texas_abortion_clinics_address_mar2017.json │ │ │ ├── US_mental_health.json │ │ │ └── US_snap_farmers_markets.json │ │ ├── datatypes.ts │ │ ├── map.ts │ │ ├── states.ts │ │ ├── zipCodes.ts │ │ └── zipCodesByCountyByState.ts │ ├── images │ │ ├── favicon.png │ │ └── logo.png │ ├── index.css │ ├── index.ejs │ ├── index.prod.ejs │ ├── index.tsx │ ├── services │ │ ├── api.ts │ │ ├── effects.ts │ │ └── store.ts │ ├── types.d.ts │ └── utils │ │ ├── adequacy.ts │ │ ├── analytics.test.ts │ │ ├── analytics.ts │ │ ├── csv.test.ts │ │ ├── csv.ts │ │ ├── data.ts │ │ ├── download.ts │ │ ├── env.ts │ │ ├── formatters.test.ts │ │ ├── formatters.ts │ │ ├── geojson.test.ts │ │ ├── geojson.ts │ │ ├── lazy.ts │ │ ├── link.tsx │ │ ├── list.test.ts │ │ ├── list.ts │ │ ├── numbers.ts │ │ ├── serializers.ts │ │ ├── string.test.ts │ │ ├── string.ts │ │ └── webgl.ts ├── test │ ├── mockResponses │ │ └── adequacies.ts │ ├── mocks │ │ ├── point-as.csv │ │ ├── point-as_county_only.csv │ │ ├── point-as_duplicate_county_only.csv │ │ ├── point-as_invalid_input_file.csv │ │ ├── point-as_invalid_input_file_2.csv │ │ ├── point-as_invalid_input_file_3.csv │ │ ├── point-as_invalid_input_file_4.csv │ │ ├── point-as_no_zip_no_county.csv │ │ ├── point-as_zip_and_county.csv │ │ ├── point-as_zip_only.csv │ │ ├── point-bs-100k.csv │ │ ├── point-bs-10k.csv │ │ ├── point-bs-2k.csv │ │ ├── point-bs-300.csv │ │ └── point-bs-30k.csv │ └── setupFiles.js ├── tsconfig.json ├── tslint.json ├── webpack.config.js ├── webpack.config.prod.js └── yarn.lock ├── osrm ├── Dockerfile └── initialize.py ├── shared ├── api-spec │ ├── adequacies-request.json │ ├── adequacies-response.json │ ├── available-service-areas-response.json │ ├── census-data-response.json │ ├── geocode-request.json │ ├── geocode-response.json │ ├── representative-points-request.json │ └── representative-points-response.json ├── census_mapping.json └── config.json ├── terraform ├── README.md ├── environments │ ├── demo │ │ └── main.tf │ ├── prod │ │ └── main.tf │ └── qa │ │ └── main.tf └── template │ ├── main.tf │ └── variables.tf └── test └── performance ├── README.md └── basic.jmx /.dockerignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | What would you like to do? 2 | 3 | # Report a bug? 4 | 5 | [] Title - Your title is clear and to the point 6 | 7 | **Steps to reproduce:** 8 | 1. 9 | 2. 10 | 11 | **Expected behavior:** 12 | 13 | 14 | **Actual behavior:** 15 | 16 | _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 17 | 18 | # Create a new feature ticket? 19 | 20 | [] Title - Your title is clear and to the point 21 | 22 | **Description** 23 | What is the task about? Please give enough context for people to take on this task efficiently. 24 | 25 | **Files** 26 | Is this a front-end or a backend ticket? 27 | What files is this likely to affect? Help your team dive right in! 28 | 29 | **Acceptance Criteria** 30 | 31 | [] Item 1 32 | [] Item 2 33 | 34 | _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 35 | 36 | 37 | **Labels** 38 | Once you are satisified with your Bug or Task ticket, assign the correct labels. 39 | Here are a few examples: `frontend`, `backend`, `good first issue`, `priority`, `feature`, `enhancement`. 40 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Hello, and thanks for contributing to <%= name %>! 2 | 3 | ## TL;DR 4 | 5 | There are three main goals in this document, depending on the nature of your PR: 6 | 7 | - [description](#description): tell us about your PR. 8 | - [checklist](#checklist): review the checklist that is most closely related to your PR. 9 | - [qa](#qa): update the qa checklist for your reviewers. 10 | 11 | 12 | ## Title 13 | Add a clear title using an action verb. 14 | If it closes an issue, refer to it in your PR title by stating `closes #117` at the end. 15 | 16 | ## Description 17 | To help others to quickly understand the nature of your pull request, please create a description that incorporates the following elements: 18 | 19 | - [] What is accomplished by the PR. 20 | - [] If you think some decisions may raise questions or require discussion, please state them and explain your choices. 21 | 22 | 23 | ## Checklist 24 | Please use this checklist to verify that you took all the necessary steps. 25 | 26 | - [ ] If closing an issue, all acceptance criteria are met. 27 | - [ ] All existing unit tests are still passing. 28 | - [ ] Add new passing unit tests to cover the code introduced by your PR and maintain test coverage. 29 | - [ ] Update the readme if needed. 30 | - [ ] Update or add any necessary API documentation. 31 | 32 | 33 | ## QA 34 | When your PR is created, it is important to select a reviewer with knowledge of the code that has been changed. 35 | It is also useful to guide reviewers, to this extent, please complete this QA list with areas that could be affected by your changes. 36 | 37 | Dear reviewer, please perform the following QA steps before approving my lovely PR: 38 | - [ ] The app loads nominally. 39 | - [ ] Select service area using the county selector. 40 | - [ ] Upload a service area csv (FIX DATASET). 41 | - [ ] Upload a provider csv (FIX DATASET). 42 | - [ ] Verify that `NEW FEATRURE` works as expected, i.e. `DETAILS`. 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # DS_Store 2 | *.DS_Store 3 | 4 | # Compiled python files 5 | *.py[cod] 6 | 7 | # iPython notebook backups 8 | .ipynb_checkpoints 9 | 10 | # iPython notebook cache 11 | models/notebooks/cache 12 | 13 | # ENV 14 | .env 15 | 16 | # Build 17 | /frontend/public/ 18 | 19 | # NPM 20 | node_modules 21 | **/node_modules/ 22 | 23 | # idea 24 | .idea/ 25 | 26 | # vscode 27 | *.vscode/* 28 | 29 | # Private Data 30 | data/private/* 31 | *.cache/* 32 | 33 | # Terraform local state 34 | **/.terraform/ 35 | **/*.tfstate* 36 | 37 | # Firebase Cache 38 | **/.firebase/ 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Encompass 2 | 3 | [![Build Status][build]](https://circleci.com/gh/bayesimpact/encompass) [![apache2]](https://www.apache.org/licenses/LICENSE-2.0) 4 | 5 | [build]: https://img.shields.io/circleci/project/bayesimpact/encompass/master.svg 6 | [apache2]: https://img.shields.io/badge/License-Apache%202.0-blue.svg 7 | 8 | ## Introduction 9 | Encompass is an analytics and mapping tool by [Bayes Impact](http://bayesimpact.org) that enables policymakers, researchers, and consumer advocates to analyze how accessibility to social services varies across demographic groups. Inadequate and untimely access to health care services is a major barrier to health equity for disadvantaged communities. Existing tools used to map systems at this scale are prohibitively expensive, require significant amounts of manual data processing, and are too coarse in their analysis methods to accurately depict accessibility issues. We set out to build a solution that eliminates those barriers. 10 | 11 | This is an open-source project. We invite researchers, developers, and the public to contribute to our project. See below for details. 12 | 13 | ## [Launch Encompass](https://encompass.bayesimpact.org) 14 | 15 | [![alt text][screenshot]](https://encompass.bayesimpact.org) 16 | 17 | [screenshot]: data/images/encompass_texas.png 18 | 19 | ## How to contribute 20 | __Researchers__: We’d love to collaborate with any researchers who might find our tool useful! Please let us know what other applications or datasets you would like to analyze with Encompass. Send your inquiries to [encompass@bayesimpact.org](mailto:encompass@bayesimpact.org). 21 | 22 | __Developers__: We want to invite the developer community to contribute to our mission of promoting a culture of evidence-based and transparent policymaking. Please read [CONTRIBUTING.md](https://github.com/bayesimpact/encompass/blob/master/CONTRIBUTING.md) to learn more about how you can get involved. 23 | -------------------------------------------------------------------------------- /backend/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | */tests/* 4 | */__init__.py 5 | */main.py 6 | */runners/* 7 | */database/postgres/maintenance.py 8 | [report] 9 | exclude_lines = 10 | def __repr__ 11 | if __name__ == .__main__.: 12 | -------------------------------------------------------------------------------- /backend/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | exclude = ./data/* 4 | # Ignore "E731 do not assign a lambda expression, use a def" 5 | ignore = E731 6 | -------------------------------------------------------------------------------- /backend/.pep8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | exclude = ./data/* 4 | -------------------------------------------------------------------------------- /backend/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Install and run locally 4 | 5 | ``` 6 | docker-compose up backend 7 | ``` 8 | The API then becomes available at [http://localhost:8080]() 9 | 10 | ## Stack & Libraries 11 | The backend of the app uses Flask, uWSGI and Nginx to serve the API. 12 | Prefer open-source libraries when possible. 13 | 14 | ## Docstring 15 | When contributing, please be mindful of people coming after you and write down clear docstrings. We suggest the following schema for docstrings: 16 | 17 | ``` 18 | """ 19 | Function description 20 | 21 | :param key: Variable key. 22 | :returns: The value. 23 | :raises: TypeError if key is not found. 24 | """ 25 | ``` 26 | 27 | ## Tests 28 | This repository uses CircleCI for continuous testing. When contributing, please write / update tests relating to your additions. 29 | 30 | -------------------------------------------------------------------------------- /backend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM tiangolo/uwsgi-nginx-flask:python3.6 2 | 3 | RUN apt-get -qq update && \ 4 | apt-get install -y \ 5 | libgeos-dev \ 6 | libspatialindex-dev \ 7 | gdal-bin \ 8 | postgresql-client \ 9 | man \ 10 | unzip \ 11 | --fix-missing \ 12 | > /dev/null 13 | 14 | COPY ./app/main.py /app/main.py 15 | COPY ./lib /usr/local/lib/python3.6/site-packages/backend/lib 16 | COPY ./app /usr/local/lib/python3.6/site-packages/backend/app 17 | 18 | # UWSGI and NGINX configs 19 | COPY ./config/uwsgi.ini /app/uwsgi.ini 20 | COPY ./config/nginx.conf /etc/nginx/conf.d/nginx.conf 21 | 22 | ENTRYPOINT ["/entrypoint.sh"] 23 | 24 | CMD ["/usr/bin/supervisord"] 25 | 26 | EXPOSE 8080 27 | 28 | COPY requirements.txt ./requirements.txt 29 | RUN pip -q install -r ./requirements.txt 30 | -------------------------------------------------------------------------------- /backend/README.md: -------------------------------------------------------------------------------- 1 | # Backend Description 2 | The backend API is served by Flask through Nginx and uWSGI. 3 | 4 | ## Backend endpoints 5 | 6 | The backend API has the following endpoints and methods: 7 | 8 | | Endpoint | Method | Options | 9 | |-------------------------- |:------------:|:------| 10 | | /available-service-areas| GET | | 11 | | /api/providers | POST | body: providers=[{"provider_address"}] | 12 | | /api/representative_points | POST | body: representative_points=[{rep_point_ids}] | 13 | | /api/adequacies | POST | body: {providers=[{provider_ids}], representative_points=[{rep_point_ids}]} | 14 | -------------------------------------------------------------------------------- /backend/__init__.py: -------------------------------------------------------------------------------- 1 | """Top level module for network adequacy webapp backend.""" 2 | -------------------------------------------------------------------------------- /backend/app/__init__.py: -------------------------------------------------------------------------------- 1 | """App functions and classes for network adequacy webapp backend.""" 2 | -------------------------------------------------------------------------------- /backend/app/exceptions/format.py: -------------------------------------------------------------------------------- 1 | """Handle request handling errors.""" 2 | 3 | 4 | class InvalidFormat(Exception): 5 | """Exception class for invalid file or data format.""" 6 | 7 | def __init__(self, message, status_code=400, payload=None): 8 | """Initialize an exception.""" 9 | Exception.__init__(self) 10 | self.message = message 11 | self.payload = payload 12 | self.status_code = status_code 13 | -------------------------------------------------------------------------------- /backend/app/main.py: -------------------------------------------------------------------------------- 1 | """Routing for backend API.""" 2 | import logging 3 | import os 4 | from logging.config import dictConfig 5 | 6 | from backend.app.requests import adequacy, census, providers, representative_points, service_areas 7 | from backend.config import config 8 | from backend.lib.database.postgres import connect 9 | from backend.lib.timer import timed 10 | 11 | import flask 12 | 13 | from flask_cors import CORS 14 | 15 | from raven.contrib.flask import Sentry 16 | 17 | 18 | dictConfig(config.get('logging')) 19 | app = flask.Flask(__name__) 20 | app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False # Disable pretty JSON. 21 | engine = connect.create_db_engine() 22 | 23 | CORS(app, resources={r'/api/*': {'origins': '*'}}) 24 | 25 | sentry = Sentry(app, dsn=os.environ.get('SENTRY_DSN', None)) 26 | logger = logging.getLogger(__name__) 27 | 28 | 29 | @timed 30 | @app.route('/api/available-service-areas/', methods=['GET']) 31 | def fetch_service_areas(): 32 | """Fetch and return all available service areas from db.""" 33 | logger.debug('Return service areas.') 34 | response = service_areas.service_areas_request(app, flask.request, engine) 35 | return flask.jsonify(response) 36 | 37 | 38 | @timed 39 | @app.route('/api/census-data-by-service-area/', methods=['POST']) 40 | def fetch_service_area_census_data(): 41 | """Fetch and return census information for the specified service areas.""" 42 | logger.debug('Return census information for the specified service areas.') 43 | if not config.get('is_census_data_available'): 44 | return flask.Response( 45 | response='501: Census data is not enabled in the configuration file.', 46 | status=501 47 | ) 48 | else: 49 | response = census.census_info_by_service_area_request(app, flask.request, engine) 50 | return flask.jsonify(response) 51 | 52 | 53 | @timed 54 | @app.route('/api/geocode/', methods=['POST']) 55 | def geocode_providers(): 56 | """Geocode provider addresses.""" 57 | logger.debug('Fetch providers.') 58 | response = providers.providers_request(app, flask.request, engine) 59 | return flask.jsonify(response) 60 | 61 | 62 | @timed 63 | @app.route('/api/representative_points/', methods=['POST']) 64 | def fetch_representative_points(): 65 | """Fetch and return all representative points for the requested service areas.""" 66 | logger.debug('Fetch representative_points for the specifed service areas.') 67 | response = representative_points.representative_points_request(app, flask.request, engine) 68 | return flask.jsonify(response) 69 | 70 | 71 | @timed 72 | @app.route('/api/adequacies/', methods=['POST']) 73 | def calculate_adequacies(): 74 | """Calculate adequacy for the input service areas and providers.""" 75 | logger.debug('Calculate time distance standards.') 76 | response = adequacy.adequacy_request(app, flask.request, engine) 77 | return flask.jsonify(response) 78 | 79 | 80 | if __name__ == '__main__': 81 | app.run(host='0.0.0.0', debug=True, port=8080) 82 | -------------------------------------------------------------------------------- /backend/app/mocks/responses.py: -------------------------------------------------------------------------------- 1 | """Mock Providers.""" 2 | import random 3 | 4 | 5 | def mock_provider(provider_id, success=True): 6 | """Create a mock provider.""" 7 | if success: 8 | return { 9 | 'status': 'success', 10 | 'id': provider_id, 11 | 'lat': random_lat(), 12 | 'lng': random_lng(), 13 | } 14 | return { 15 | 'status': 'error', 16 | 'message': 'Failed to geocode address for this provider.' 17 | } 18 | 19 | 20 | def mock_representative_point(service_area_id=0, rp_id=0): 21 | """Create a mock representative point.""" 22 | county = 'county_placeholder' 23 | zip_code = 'zip_code_placeholder' 24 | return { 25 | 'id': rp_id, 26 | 'service_area_id': service_area_id, 27 | 'lat': random_lat(), 28 | 'lng': random_lng(), 29 | 'county': county, 30 | 'population': random.randint(10, 10000), 31 | 'zip': zip_code, 32 | 'census_block_group': 105, 33 | 'census_block': 3, 34 | 'census_tract': 304, 35 | } 36 | 37 | 38 | def mock_adequacy(rp_id, provider_id): 39 | """Create a mock adequacy response.""" 40 | return { 41 | 'id': rp_id, 42 | 'closest_provider_by_distance': provider_id, 43 | 'closest_provider_by_time': provider_id, 44 | 'time_to_closest_provider': round(random.uniform(10, 60)), 45 | 'distance_to_closest_provider': random.uniform(5, 40) 46 | } 47 | 48 | 49 | # Helpers 50 | def random_coord(seed): 51 | """Generate a mock lat/lng coordinate.""" 52 | return lambda: seed + random.uniform(-0.03, 0.03) 53 | 54 | 55 | random_lat = random_coord(37.765134) 56 | random_lng = random_coord(-122.444687) 57 | -------------------------------------------------------------------------------- /backend/app/requests/__init__.py: -------------------------------------------------------------------------------- 1 | """Main requests for the api.""" 2 | -------------------------------------------------------------------------------- /backend/app/requests/census.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handle requests to the census-data-by-service-area/ endpoint. 3 | 4 | The /api/census-data-by-service-area/ endpoint returns census info for each service area. 5 | """ 6 | import json 7 | import logging 8 | 9 | from backend.app.exceptions.format import InvalidFormat 10 | from backend.app.requests.caching import cache 11 | from backend.lib.fetch import census 12 | 13 | from retrying import retry 14 | 15 | WAIT_FIXED_MILLISECONDS = 500 16 | STOP_MAX_ATTEMPT_NUMBER = 2 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | @retry( 22 | wait_fixed=WAIT_FIXED_MILLISECONDS, 23 | stop_max_attempt_number=STOP_MAX_ATTEMPT_NUMBER 24 | ) 25 | def census_info_by_service_area_request(app, flask_request, engine): 26 | """Handle /api/census-data-by-service-area/ requests.""" 27 | logger.info('Fetching census data by service area.') 28 | try: 29 | request_json = flask_request.get_json(force=True) 30 | service_area_ids = request_json['service_area_ids'] 31 | except (json.JSONDecodeError, KeyError): 32 | raise InvalidFormat(message='Invalid JSON format.') 33 | 34 | return construct_census_info_response( 35 | service_area_ids=service_area_ids, 36 | engine=engine 37 | ) 38 | 39 | 40 | @cache(prefix='census', hint_fields=('service_area_ids',)) 41 | def construct_census_info_response(service_area_ids, engine): 42 | """Construct responses for /api/census-data-by-service-area/ requests.""" 43 | return census.fetch_census_info_by_service_area( 44 | service_area_ids=service_area_ids, 45 | engine=engine 46 | ) 47 | -------------------------------------------------------------------------------- /backend/app/requests/providers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handle requests to the provider endpoint. 3 | 4 | The /api/geocode enpoint accepts a list of addresses and return a list of 5 | associated lat, lng. 6 | 7 | REQUEST - POST 8 | [ 9 | '1855 Mission Street, San Francisco, CA', 10 | '1855 Mission Road, San Francisco, CA', 11 | '100855 South Van Ness, San Francisco, CA' 12 | ] 13 | 14 | RESPONSE 15 | [ 16 | {'status': 'error', 'message': 'Failed to geocode address for this provider.'}, 17 | {'status': 'success', lat': 37.06660983626426, 'lng': -122.03874290199911} 18 | {'status': 'success', lat': 37.06098362, 'lng': -122.038742210} 19 | ] 20 | """ 21 | import json 22 | import logging 23 | 24 | from backend.lib.fetch import providers 25 | from backend.app.exceptions.format import InvalidFormat 26 | 27 | from retrying import retry 28 | 29 | # TODO - Use config. 30 | # TODO - Create a dedicated retry decorator to avoid duplication. 31 | WAIT_FIXED_MILLISECONDS = 500 32 | STOP_MAX_ATTEMPT_NUMBER = 2 33 | 34 | logger = logging.getLogger(__name__) 35 | 36 | 37 | @retry( 38 | wait_fixed=WAIT_FIXED_MILLISECONDS, 39 | stop_max_attempt_number=STOP_MAX_ATTEMPT_NUMBER) 40 | def providers_request(app, flask_request, engine): 41 | """Handle /api/geocode requests.""" 42 | logger.info('Geocoding providers.') 43 | try: 44 | request_json = flask_request.get_json(force=True) 45 | provider_addresses = request_json['addresses'] 46 | except json.JSONDecodeError: 47 | raise InvalidFormat(message='Invalid JSON format.') 48 | return providers.geocode_providers(provider_addresses, engine=engine) 49 | -------------------------------------------------------------------------------- /backend/app/requests/representative_points.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handle requests to the representative_points endpoint. 3 | 4 | The /api/representative_points/ endpoint accepts a list service areas and returns a list 5 | of ids and associated data. 6 | 7 | REQUEST - POST /api/representative_points 8 | 9 | { 10 | service_area_ids: ["alameda_020202", "alameda_94100"] 11 | } 12 | 13 | RESPONSE 14 | [ 15 | { 16 | id: 17323, 17 | service_area_id: "ca_alameda_94100", 18 | lat: 74.38732, 19 | lng: -122.323331 20 | county: "Alameda", 21 | population: 2000, 22 | zip: 94105, 23 | census_block_group: 105, 24 | census_block: 3, 25 | census_tract: 304, 26 | }, 27 | ] 28 | """ 29 | import json 30 | import logging 31 | 32 | from backend.app.requests.caching import cache 33 | from backend.config import config 34 | from backend.lib.fetch import representative_points 35 | from backend.app.exceptions.format import InvalidFormat 36 | 37 | from retrying import retry 38 | 39 | WAIT_FIXED_MILLISECONDS = 500 40 | STOP_MAX_ATTEMPT_NUMBER = 2 41 | 42 | logger = logging.getLogger(__name__) 43 | 44 | 45 | @retry( 46 | wait_fixed=WAIT_FIXED_MILLISECONDS, 47 | stop_max_attempt_number=STOP_MAX_ATTEMPT_NUMBER) 48 | def representative_points_request(app, flask_request, engine): 49 | """Handle /api/representative_points requests.""" 50 | logger.debug('Fetching representative points.') 51 | try: 52 | request_json = flask_request.get_json(force=True) 53 | service_area_ids = request_json['service_area_ids'] 54 | # Only send census data if requested AND available. 55 | include_census_data = ( 56 | request_json.get('include_census_data', False) and 57 | config.get('is_census_data_available') 58 | ) 59 | except (json.JSONDecodeError, KeyError): 60 | raise InvalidFormat(message='Invalid JSON format.') 61 | representative_point_response = construct_representative_point_response( 62 | service_area_ids=service_area_ids, 63 | include_census_data=include_census_data, 64 | engine=engine 65 | ) 66 | logger.debug('Returning %d representative points.', len(representative_point_response)) 67 | return representative_point_response 68 | 69 | 70 | @cache(prefix='representative_points', hint_fields=('service_area_ids', 'include_census_data')) 71 | def construct_representative_point_response(service_area_ids, include_census_data, engine): 72 | """Construct responses for /api/representative_points requests.""" 73 | return representative_points.fetch_representative_points( 74 | service_area_ids=service_area_ids, 75 | include_census_data=include_census_data, 76 | engine=engine 77 | ) 78 | -------------------------------------------------------------------------------- /backend/app/requests/service_areas.py: -------------------------------------------------------------------------------- 1 | """ 2 | Handle requests to the service_areas endpoint. 3 | 4 | The /api/available-service-areas/ endpoint returns all available service areas. 5 | """ 6 | from backend.lib.fetch import representative_points 7 | import logging 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def service_areas_request(app, flask_request, engine): 13 | """Handle /api/available-service-areas requests.""" 14 | logger.info('Fetching service areas.') 15 | return representative_points.fetch_all_service_areas(engine=engine,) 16 | -------------------------------------------------------------------------------- /backend/config/__init__.py: -------------------------------------------------------------------------------- 1 | """Config file for the Python backend.""" 2 | -------------------------------------------------------------------------------- /backend/config/entrypoint-local.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | MAX_ATTEMPT_NUM=10 3 | SLEEP_TIME=3 4 | 5 | attempt_num=0 6 | until [ ${attempt_num} -ge ${MAX_ATTEMPT_NUM} ] 7 | do 8 | pg_isready -d ${POSTGRES_URL} && break 9 | attempt_num=$((attempt_num + 1)) 10 | sleep ${SLEEP_TIME} 11 | done 12 | 13 | if [ $attempt_num -eq ${MAX_ATTEMPT_NUM} ] 14 | then 15 | exit 3 16 | fi 17 | 18 | exec "$@" 19 | -------------------------------------------------------------------------------- /backend/config/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | exec "$@" 4 | -------------------------------------------------------------------------------- /backend/config/nginx.conf: -------------------------------------------------------------------------------- 1 | # Nginx config. 2 | 3 | # Allow large URI. 4 | client_header_buffer_size 64k; 5 | large_client_header_buffers 4 64k; 6 | 7 | # Avoid timeouts for large files. 8 | proxy_connect_timeout 2000; 9 | proxy_send_timeout 2000; 10 | proxy_read_timeout 2000; 11 | send_timeout 2000; 12 | 13 | uwsgi_read_timeout 2000; 14 | uwsgi_send_timeout 2000; 15 | 16 | # Avoid 413s for large POST bodies. 17 | client_max_body_size 30m; 18 | 19 | # Http server settings. 20 | server { 21 | 22 | listen 8080; 23 | 24 | location / { 25 | if ($request_method = "OPTIONS") { 26 | add_header "Access-Control-Allow-Origin" "*"; 27 | add_header "Access-Control-Allow-Methods" "GET, POST, OPTIONS"; 28 | # 29 | # Custom headers and headers various browsers *should* be OK with but aren"t 30 | # 31 | add_header "Access-Control-Allow-Headers" "DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Content-Range,Range"; 32 | # 33 | # Tell client that this pre-flight info is valid for 20 days 34 | # 35 | add_header "Access-Control-Max-Age" 1728000; 36 | add_header "Content-Type" "text/plain; charset=utf-8"; 37 | add_header "Content-Length" 0; 38 | return 204; 39 | } 40 | 41 | add_header "Access-Control-Allow-Methods" "GET, POST, OPTIONS"; 42 | add_header "Access-Control-Allow-Headers" "DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Content-Range,Range"; 43 | add_header "Access-Control-Expose-Headers" "DNT,X-CustomHeader,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Content-Range,Range"; 44 | 45 | proxy_pass http://127.0.0.1:8080; 46 | try_files $uri @app; 47 | } 48 | 49 | location @app { 50 | include uwsgi_params; 51 | uwsgi_pass unix:///tmp/uwsgi.sock; 52 | } 53 | 54 | location /static { 55 | alias /app/static; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /backend/config/uwsgi.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | module = main 3 | callable = app 4 | buffer-size = 65535 5 | catch-exceptions = true 6 | enable-threads = true 7 | lazy-apps = true 8 | socket = /tmp/uwsgi.sock 9 | chown-socket = nginx:nginx 10 | chmod-socket = 664 11 | cheaper = 4 12 | processes = 16 13 | -------------------------------------------------------------------------------- /backend/lib/__init__.py: -------------------------------------------------------------------------------- 1 | """Library function and classes for network adequacy webapp backend.""" 2 | -------------------------------------------------------------------------------- /backend/lib/calculate/__init__.py: -------------------------------------------------------------------------------- 1 | """Functions to calculate metrics.""" 2 | -------------------------------------------------------------------------------- /backend/lib/database/__init__.py: -------------------------------------------------------------------------------- 1 | """Functions to interact with the database.""" 2 | -------------------------------------------------------------------------------- /backend/lib/database/postgres/base.py: -------------------------------------------------------------------------------- 1 | """Initalize Base for SqlAlchemy.""" 2 | from sqlalchemy.ext.declarative import declarative_base 3 | 4 | Base = declarative_base() 5 | -------------------------------------------------------------------------------- /backend/lib/database/postgres/connect.py: -------------------------------------------------------------------------------- 1 | """Methods to connect to AWS/RDS/Postgres.""" 2 | import logging 3 | import os 4 | 5 | from sqlalchemy import create_engine 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def create_db_engine(db_url=os.getenv('POSTGRES_URL'), echo=False): 11 | """Create a sqlaclhemy engine from a database url. 12 | 13 | Example url "host=postgres port=5432 user=postgres password=*** dbname=postgres". 14 | """ 15 | logger.info('Creating Postgres engine.') 16 | return create_engine(db_url, pool_size=20, max_overflow=0, echo=echo) 17 | -------------------------------------------------------------------------------- /backend/lib/database/postgres/maintenance.py: -------------------------------------------------------------------------------- 1 | """Methods to perform regular database maintenance.""" 2 | from backend.lib.database.postgres import connect 3 | from backend.lib.timer import timed 4 | 5 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT 6 | 7 | 8 | def _execute_outside_of_transaction_block(query): 9 | """ 10 | Execute a SQL statement outside of a transaction block. 11 | 12 | Bypasses the transaction start enforced by the Python DB-API. 13 | """ 14 | engine = connect.create_db_engine() 15 | connection = engine.raw_connection() 16 | connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) 17 | with connection.cursor() as cur: 18 | cur.execute(query) 19 | connection.close() 20 | 21 | 22 | @timed 23 | def vacuum(): 24 | """ 25 | Vacuum all tables to remove ghost rows, then gather helpful statistics for the query optimizer. 26 | 27 | This should be run after any substantial INSERT, UPDATE, or DELETE statements. 28 | """ 29 | _execute_outside_of_transaction_block('VACUUM ANALYZE') 30 | 31 | 32 | @timed 33 | def cluster(): 34 | """Re-organize all physical tables to have records in the order of their clustered indexes.""" 35 | # FIXME: Include specific indexes / columns to cluster on. The current implementation only 36 | # re-clusters tables on existing clustered indexes. 37 | _execute_outside_of_transaction_block('CLUSTER') 38 | -------------------------------------------------------------------------------- /backend/lib/database/postgres/postgis.py: -------------------------------------------------------------------------------- 1 | """Functions to work with PostGIS geometry and geography types.""" 2 | 3 | 4 | def to_point(longitude, latitude): 5 | """Format point for insertion into Postgres.""" 6 | return 'SRID=4326;POINT({} {})'.format(longitude, latitude) 7 | 8 | 9 | def to_polygon(long_lat_tuples): 10 | """Format polygon for insertion into Postgres.""" 11 | long_lat_string = ', '.join([ 12 | '{} {}'.format(longitude, latitude) 13 | for longitude, latitude in long_lat_tuples 14 | ]) 15 | return 'POLYGON(({}))'.format(long_lat_string) 16 | -------------------------------------------------------------------------------- /backend/lib/database/postgres/table_handling.py: -------------------------------------------------------------------------------- 1 | """Postgres functions to create and handle tables.""" 2 | from __future__ import absolute_import 3 | 4 | import logging 5 | import random 6 | import string 7 | 8 | from sqlalchemy import MetaData, Table 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def get_table(engine, table_name): 14 | """Create a sql_aclhemy table from engine. To be used for queries.""" 15 | if not engine.dialect.has_table(engine, table_name): 16 | logger.info('Table {} does not exist. Passing.'.format(table_name)) 17 | return 18 | metadata = MetaData(engine) 19 | return Table(table_name, metadata, autoload=True) 20 | 21 | 22 | def get_random_table_name(prefix='', length=12): 23 | """Return a random valid Postgres table name.""" 24 | return '{}_{}'.format( 25 | prefix, 26 | ''.join(random.choice(string.ascii_lowercase) for _ in range(12)) 27 | ) 28 | -------------------------------------------------------------------------------- /backend/lib/database/tables/address.py: -------------------------------------------------------------------------------- 1 | """File holding the main address table and mapping definitions.""" 2 | from backend.lib.database.postgres.base import Base 3 | 4 | from geoalchemy2 import Geography 5 | 6 | from sqlalchemy import Column, DateTime, Float, Integer, JSON, String, func 7 | 8 | 9 | class Address(Base): 10 | """Definition of the addresses table.""" 11 | 12 | __tablename__ = 'addresses' 13 | # TODO: Add nullable=False for the relevant columns. 14 | id = Column(Integer, primary_key=True, autoincrement=True) 15 | created_at = Column(DateTime, default=func.now()) 16 | updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now()) 17 | address = Column(String, nullable=False, index=True, unique=True) 18 | geocoder_name = Column(String) 19 | latitude = Column(Float) 20 | longitude = Column(Float) 21 | location = Column( 22 | Geography(geometry_type='POINT', srid=4326, spatial_index=True), 23 | nullable=False) 24 | isodistancePolygon = Column(JSON) 25 | -------------------------------------------------------------------------------- /backend/lib/database/tables/provider.py: -------------------------------------------------------------------------------- 1 | """File holding the main provider table and mapping definitions.""" 2 | from backend.lib.database.postgres.base import Base 3 | 4 | from sqlalchemy import ARRAY, Column, DateTime, ForeignKey, Integer, String, func 5 | 6 | 7 | class Provider(Base): 8 | """Definition of the service_areas table.""" 9 | 10 | __tablename__ = 'providers' 11 | id = Column(Integer, primary_key=True, autoincrement=True) 12 | address_id = Column(Integer, ForeignKey('addresses.id'), nullable=False) 13 | created_at = Column(DateTime, default=func.now()) 14 | updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now()) 15 | # Need to verify length 10 for NPI. 16 | npi = Column(String, nullable=False, index=True) 17 | languages = Column(ARRAY(String)) 18 | specialty = Column(String, index=True) 19 | -------------------------------------------------------------------------------- /backend/lib/database/tables/service_area.py: -------------------------------------------------------------------------------- 1 | """File holding the main service area table and mapping definitions.""" 2 | from backend.config import config 3 | from backend.lib.database.postgres.base import Base 4 | 5 | from geoalchemy2 import Geography 6 | 7 | from sqlalchemy import Column, DateTime, Integer, String, func 8 | 9 | 10 | class ServiceArea(Base): 11 | """Definition of the service_areas table.""" 12 | 13 | __tablename__ = config.get('database.prefix') + 'service_areas' 14 | id = Column(Integer, primary_key=True, autoincrement=True) 15 | service_area_id = Column(String, unique=True, nullable=False, index=True) 16 | created_at = Column(DateTime, default=func.now()) 17 | updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now()) 18 | state = Column(String, nullable=False) 19 | county = Column(String, nullable=False, index=True) 20 | zip_code = Column(String, nullable=False, index=True) 21 | location = Column( 22 | Geography(geometry_type='POLYGON', srid=4326, spatial_index=True), 23 | nullable=False 24 | ) 25 | state_fips = Column(String, nullable=False) 26 | county_fips = Column(String, nullable=False) 27 | nchs_urban_rural_code = Column(String) 28 | -------------------------------------------------------------------------------- /backend/lib/fetch/__init__.py: -------------------------------------------------------------------------------- 1 | """Functions to prepare replies for the API endpoints.""" 2 | -------------------------------------------------------------------------------- /backend/lib/fetch/census.py: -------------------------------------------------------------------------------- 1 | """Fetch census data by service area.""" 2 | import collections 3 | import logging 4 | 5 | from backend.lib.fetch import representative_points 6 | from backend.lib.timer import timed 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | @timed 12 | def fetch_census_info_by_service_area(service_area_ids, engine): 13 | """ 14 | Fetch census information aggregated across an entire service area. 15 | 16 | For each service area and census field combination, calculate the average of each 17 | point's census value, weighted by population. 18 | 19 | The returned mapping for each service area has the same structure as each representative point. 20 | """ 21 | logger.debug('Fetching census information for {} service areas'.format(len(service_area_ids))) 22 | # Fetch all points for the given service areas. 23 | all_points = representative_points.fetch_representative_points( 24 | service_area_ids=service_area_ids, 25 | include_census_data=True, 26 | engine=engine 27 | ) 28 | 29 | # Infer the demographics fields from the first returned point. 30 | try: 31 | sample_demographic_map = all_points[0]['demographics'] 32 | except IndexError: 33 | logger.debug('No points found for specified service areas.') 34 | return {} 35 | 36 | # Group points by service area. 37 | points_by_service_area = collections.defaultdict(list) 38 | for point in all_points: 39 | points_by_service_area[point['service_area_id']].append(point) 40 | 41 | response = {} 42 | for service_area in points_by_service_area: 43 | 44 | service_area_demographics = collections.defaultdict(dict) 45 | points = points_by_service_area[service_area] 46 | total_population = sum(point['population'] for point in points) 47 | 48 | # Set the service area value to the weighted average of its constituent points' values. 49 | for category in sample_demographic_map: 50 | for field in sample_demographic_map[category]: 51 | service_area_demographics[category][field] = sum( 52 | point['population'] * point['demographics'][category][field] 53 | for point in points 54 | ) / total_population 55 | 56 | response[service_area] = dict(service_area_demographics) 57 | 58 | return response 59 | -------------------------------------------------------------------------------- /backend/lib/timer.py: -------------------------------------------------------------------------------- 1 | """Timer functions.""" 2 | import logging 3 | from functools import wraps 4 | from time import time 5 | 6 | logger = logging.getLogger(__name__) 7 | 8 | 9 | def timed(f): 10 | """Timer wrapper.""" 11 | @wraps(f) 12 | def wrapper(*args, **kwds): 13 | start = time() * 1000 14 | result = f(*args, **kwds) 15 | elapsed = time() * 1000 - start 16 | 17 | timer_message = '%s took %d ms to complete.' % (f.__name__, elapsed) 18 | logger.debug(timer_message) 19 | return result 20 | return wrapper 21 | -------------------------------------------------------------------------------- /backend/lib/utils/census.py: -------------------------------------------------------------------------------- 1 | """Tools to manage census tables.""" 2 | POSTGRES_COLUMN_NAME_LIMIT = 63 3 | 4 | 5 | def _verified_column_name(column_name, limit=POSTGRES_COLUMN_NAME_LIMIT): 6 | if len(column_name) > limit: 7 | raise Exception('Column name {} is too long. Postgres limit is {}.'.format( 8 | column_name, limit)) 9 | return column_name 10 | 11 | 12 | def readable_columns_from_census_mapping(census_mapping): 13 | census_columns = [ 14 | '{key} AS {unique_column_name}'.format( 15 | key=group, 16 | unique_column_name=_verified_column_name( 17 | census_mapping[category][group]['joined_column_name'] 18 | ) 19 | ) for category in census_mapping 20 | for group in census_mapping[category] 21 | 22 | ] 23 | return census_columns 24 | -------------------------------------------------------------------------------- /backend/lib/utils/config_utils.py: -------------------------------------------------------------------------------- 1 | """Utilities.""" 2 | import collections 3 | 4 | 5 | def list_to_dict(input_list, mapping): 6 | """Convert a list to a dict with the provided key mapping function.""" 7 | return {mapping[i]: value for i, value in enumerate(input_list)} 8 | 9 | 10 | def nested_update(orig_dict, new_dict): 11 | """ 12 | Method to update only the innermost elements of dictionaries. 13 | 14 | This function takes two nested dictionaries and updates the first one 15 | only with final non-nested values of the second one. 16 | This allows us to specify only the specific nested values to update. 17 | """ 18 | for key, val in new_dict.items(): 19 | if isinstance(val, collections.Mapping): 20 | tmp = nested_update(orig_dict.get(key, {}), val) 21 | orig_dict[key] = tmp 22 | elif isinstance(val, list): 23 | orig_dict[key] = (orig_dict.get(key, []) + val) 24 | else: 25 | orig_dict[key] = new_dict[key] 26 | return orig_dict 27 | 28 | 29 | def merge_dictionaries_with_list_values(dictionaries): 30 | """ 31 | Merge dictionaries with list values, concatenating the lists. 32 | 33 | Returns a dictionary with keys in the union of the input keys. 34 | The value for a particular key is the concatenation of the values for that key 35 | across all input dictionaries. 36 | """ 37 | result = collections.defaultdict(list) 38 | for d in dictionaries: 39 | for k in d: 40 | result[k].extend(d[k]) 41 | 42 | return result 43 | -------------------------------------------------------------------------------- /backend/lib/utils/datatypes.py: -------------------------------------------------------------------------------- 1 | """Convenient classes that enable hashing and dict-like access.""" 2 | import collections 3 | 4 | Point = collections.namedtuple('Point', ['latitude', 'longitude']) 5 | -------------------------------------------------------------------------------- /backend/lib/utils/iterators.py: -------------------------------------------------------------------------------- 1 | """Iterators utils.""" 2 | 3 | 4 | def iterate_in_slices(iterable, batch_size): 5 | """Yield lists of size batch_size from an iterable.""" 6 | it = iter(iterable) 7 | try: 8 | while True: 9 | chunk = [] # The buffer to hold the next n items. 10 | for _ in range(batch_size): 11 | chunk.append(next(it)) 12 | yield chunk 13 | except StopIteration: 14 | if len(chunk) > 0: 15 | yield chunk 16 | -------------------------------------------------------------------------------- /backend/models/__init__.py: -------------------------------------------------------------------------------- 1 | """Top level for all models.""" 2 | -------------------------------------------------------------------------------- /backend/models/base.py: -------------------------------------------------------------------------------- 1 | """Base classes and methods for measuring distance or time between points.""" 2 | 3 | 4 | class Measurer(): 5 | """Generic class for measuring distance or time.""" 6 | 7 | def measure_between_two_points(self, point_a, point_b): 8 | """ 9 | Get distance or time measure between two points. 10 | 11 | Expects points as (latitude, longitude) tuples. 12 | """ 13 | raise NotImplementedError('Distance type must be specified.') 14 | 15 | def closest(self, origin, point_list): 16 | """Find closest point in a list of points and returns min_measurement, min_point.""" 17 | min_point = min( 18 | point_list, 19 | key=lambda p: self.measure_between_two_points(origin, p) 20 | ) 21 | min_measurement = self.measure_between_two_points(origin, min_point) 22 | return min_measurement, min_point 23 | 24 | def closest_with_early_exit(self, origin, point_list, exit_distance): 25 | """ 26 | Find closest point in a list of points, exiting early if exit_distance is reached. 27 | 28 | Returns min_measurement, min_point. 29 | """ 30 | min_measurement = float('inf') 31 | min_point = None 32 | for point in point_list: 33 | distance = self.measure_between_two_points(origin, point) 34 | if distance < min_measurement: 35 | min_measurement = distance 36 | min_point = point 37 | if min_measurement <= exit_distance: 38 | break 39 | return min_measurement, min_point 40 | -------------------------------------------------------------------------------- /backend/models/distance.py: -------------------------------------------------------------------------------- 1 | """Classes for measuring distances between points.""" 2 | from cHaversine import haversine 3 | 4 | from backend.models.base import Measurer 5 | 6 | 7 | class HaversineDistance(Measurer): 8 | """Class for haversine distance measurements.""" 9 | 10 | def measure_between_two_points(self, point_a, point_b): 11 | """ 12 | Get haversine distance between two points. 13 | 14 | Expects points as (latitude, longitude) tuples. 15 | """ 16 | # cHaversine expects points to be given as (latitude, longitude) pairs. 17 | # TODO: Determine if this check for non-null values is necessary. 18 | if point_a and point_b: 19 | return haversine(tuple(point_a), tuple(point_b)) 20 | 21 | def _get_matrix(self, source_points, destination_points): 22 | """Retrieve a distance matrix.""" 23 | return [ 24 | [self.measure_between_two_points(point_a, point_b) for point_b in destination_points] 25 | for point_a in source_points 26 | ] 27 | -------------------------------------------------------------------------------- /backend/models/measurers.py: -------------------------------------------------------------------------------- 1 | """Methods to measure distance or time between geographic coordinates.""" 2 | from backend.models import distance 3 | from backend.models import time 4 | 5 | 6 | def get_measurer(name, **kwargs): 7 | """Return an instantiated measurer class with the given name.""" 8 | return MEASURER_NAME_TO_CLASS_MAPPING[name.lower()](**kwargs) 9 | 10 | 11 | MEASURER_NAME_TO_CLASS_MAPPING = { 12 | 'haversine': distance.HaversineDistance, 13 | 'open_route_service_driving': time.OpenRouteDrivingTime, 14 | 'osrm': time.OSRMDrivingTime, 15 | 'mapbox': time.MapBoxDrivingTime, 16 | 'walking': time.MapBoxWalkingTime 17 | } 18 | -------------------------------------------------------------------------------- /backend/requirements.txt: -------------------------------------------------------------------------------- 1 | # AWS SDK 2 | boto3==1.4.7 3 | # Flask and UWSGI 4 | flask==0.12.2 5 | Flask-Cors==3.0.3 6 | uwsgi==2.0.15 7 | werkzeug==0.12.2 8 | # Geo methods 9 | cHaversine==0.3.0 10 | geopy==1.11.0 11 | Shapely==1.6.2 12 | openrouteservice==0.1 13 | osmnx==0.6 14 | pygeocodio==0.5.0 15 | unrasterize==0.1.0 16 | # Database 17 | ratelimit==1.1.0 18 | retrying==1.3.3 19 | geoalchemy2==0.4.0 20 | geojson==2.3.0 21 | psycopg2==2.7.1 22 | sqlalchemy==1.1.4 23 | sqlalchemy-postgres-copy==0.5.0 24 | sqlalchemy_utils 25 | # Monitoring 26 | raven[flask]==6.4.0 27 | # Used by `make test` only: 28 | mock==2.0.0 29 | pytest==3.2.2 30 | pytest-cov==2.4.0 31 | Flask-Testing==0.6.2 32 | # Used by `make lint` only: 33 | flake8==3.3.0 34 | flake8-import-order==0.12 35 | flake8-quotes==0.11.0 36 | pep257==0.7.0 37 | -------------------------------------------------------------------------------- /backend/runners/check_sample_points_exist.py: -------------------------------------------------------------------------------- 1 | """Verify whether setup of sample data (used by some tests) has been performed.""" 2 | from backend.lib.database.postgres import connect 3 | from backend.lib.fetch import representative_points 4 | 5 | 6 | def check_sample_points_exist(): 7 | """ 8 | Retrieve sample points for a service area from the sample data and if none exists, exit 9 | with nonzero code to indicate that initial sample data population should proceed. 10 | """ 11 | engine = connect.create_db_engine(echo=True) 12 | service_areas = ['ca_los_angeles_county_00000'] # A service area from the base sample data. 13 | results = representative_points.fetch_representative_points( 14 | service_areas, include_census_data=False, engine=engine 15 | ) 16 | if len(results) == 0: 17 | exit(1) # Exit nonzero to indicate that no records exist. 18 | 19 | 20 | if __name__ == '__main__': 21 | check_sample_points_exist() 22 | -------------------------------------------------------------------------------- /backend/runners/export_representative_points.py: -------------------------------------------------------------------------------- 1 | """Methods to export representative points from PostGIS to GeoJSON.""" 2 | import argparse 3 | import os 4 | import subprocess 5 | 6 | 7 | # TODO: Keep these fields in sync with `to_dict` method of representative point table. 8 | SELECT_REPRESENTATIVE_POINTS_QUERY = """ 9 | SELECT 10 | r.id, 11 | r.census_tract, 12 | r.county, 13 | r.latitude::NUMERIC(10, 6) AS lat, 14 | r.longitude::NUMERIC(10, 6) AS lng, 15 | r.population, 16 | r.service_area_id, 17 | r.zip_code AS zip, 18 | r.location 19 | FROM representative_points r 20 | JOIN service_areas sa 21 | ON (r.service_area_id = sa.service_area_id) 22 | WHERE 1=1 23 | ; 24 | """.replace('\n', ' ') 25 | 26 | EXPORT_COMMAND = """ 27 | ogr2ogr -f GeoJSON {output_path} "PG:{connection_string}" -sql "{extraction_query}" 28 | """ 29 | 30 | 31 | def _get_arguments(): 32 | """Build argument parser.""" 33 | parser = argparse.ArgumentParser(description=""" 34 | This script extracts representative population points from the database into GeoJSON format. 35 | """) 36 | parser.add_argument( 37 | '-o', '--output_path', 38 | help='Filepath at which to output GeoJSON data.', 39 | required=True, 40 | type=str 41 | ) 42 | parser.add_argument( 43 | '-s', '--state', 44 | help="State to extract GeoJSON data for (e.g., 'TX'.", 45 | required=False, 46 | type=str 47 | ) 48 | return parser.parse_args().__dict__ 49 | 50 | 51 | if __name__ == '__main__': 52 | kwargs = _get_arguments() 53 | 54 | state = kwargs.get('state', None) 55 | if state: 56 | query = SELECT_REPRESENTATIVE_POINTS_QUERY.replace( 57 | 'WHERE 1=1', 'WHERE state = \'{}\''.format(state.upper()) 58 | ) 59 | else: 60 | query = SELECT_REPRESENTATIVE_POINTS_QUERY 61 | 62 | command = EXPORT_COMMAND.format( 63 | output_path=kwargs['output_path'], 64 | connection_string=os.getenv('POSTGRES_URL'), 65 | extraction_query=query 66 | ) 67 | subprocess.call(args=[command], shell=True) 68 | -------------------------------------------------------------------------------- /backend/runners/initialize_postgres.py: -------------------------------------------------------------------------------- 1 | """Initialize Postgres tables.""" 2 | from backend.lib.database.postgres import connect 3 | from backend.lib.database.postgres.base import Base 4 | from backend.lib.database.tables import address, provider, representative_point, service_area # NOQA 5 | from backend.runners import install_postgis 6 | 7 | from sqlalchemy_utils import create_database, database_exists 8 | 9 | 10 | def initialize_postgres(): 11 | """Initialize Postgres tables.""" 12 | postgres_engine = connect.create_db_engine(echo=True) 13 | 14 | # Create DB if necessary... 15 | if not database_exists(postgres_engine.url): 16 | create_database(postgres_engine.url) 17 | 18 | # Install Postgis. 19 | install_postgis.install() 20 | 21 | # Create tables. 22 | Base.metadata.create_all(postgres_engine, checkfirst=True) 23 | 24 | 25 | if __name__ == '__main__': 26 | initialize_postgres() 27 | -------------------------------------------------------------------------------- /backend/runners/load_addresses.py: -------------------------------------------------------------------------------- 1 | """Runner to load addresses to databse from a CSV.""" 2 | import argparse 3 | import csv 4 | 5 | from backend.lib.database.postgres import connect, methods, postgis 6 | from backend.lib.database.tables import address 7 | 8 | 9 | def _get_arguments(): 10 | """Build argument parser.""" 11 | parser = argparse.ArgumentParser(description=""" 12 | This script loads provider address data from a specified file into PostGIS. 13 | """) 14 | parser.add_argument( 15 | '-f', '--filepath', 16 | help='CSV filepath containing provider address data.', 17 | required=True, 18 | type=str 19 | ) 20 | return parser.parse_args().__dict__ 21 | 22 | 23 | def load_csv(filepath): 24 | """ 25 | Load a csv file to the address database. 26 | 27 | Expects address, latitude, longitude fields. 28 | """ 29 | with open(filepath) as csv_f: 30 | data = list(csv.DictReader(csv_f)) 31 | 32 | address_data = [ 33 | { 34 | 'address': temp['address'], 35 | 'latitude': float(temp['latitude']), 36 | 'longitude': float(temp['longitude']), 37 | 'location': postgis.to_point(float(temp['longitude']), float(temp['latitude'])) 38 | } 39 | for temp in data 40 | ] 41 | 42 | engine = connect.create_db_engine() 43 | methods.core_insert( 44 | engine, 45 | sql_class=address.Address, 46 | data=address_data, 47 | return_insert_ids=True, 48 | unique_column='address' 49 | ) 50 | 51 | 52 | if __name__ == '__main__': 53 | arguments = _get_arguments() 54 | print('Load providers from - %s' % arguments['filepath']) 55 | try: 56 | load_csv(**arguments) 57 | except Exception as e: 58 | print('An error occured uploading addresses.') 59 | -------------------------------------------------------------------------------- /backend/runners/normalize_population_totals.py: -------------------------------------------------------------------------------- 1 | """ 2 | Methods to update population totals to match census totals. 3 | 4 | NOTE: These methods require the census tables to exist and contain data. 5 | In particular, the data profile table DP05 from the ACS is used for population estimates. 6 | See the `load_census_demographics` script for instructions on how to populate these tables. 7 | """ 8 | from backend.lib.database.postgres import connect 9 | 10 | """ 11 | This query will update the representative points table's population numbers to match 12 | the census population numbers at the county level. For example, if the points in Los Angeles 13 | County, CA have a total population of 12 million but the ACS estimate is 10 million, all 14 | points' populations will be multiplied by 10/12 so that the total population afterwards 15 | is also 10 million. 16 | 17 | Counties are identified using FIPS codes: the first 5 digits of the census tract ID represent 18 | the state (2 digits) and the county (3). 19 | """ 20 | POPULATION_UPDATE_QUERY_COUNTY_LEVEL = """ 21 | WITH census_population AS ( 22 | SELECT 23 | LEFT(census_tract, 5) AS county 24 | , SUM(hc01_vc03) AS census_pop 25 | FROM census_acs_dp_05 26 | GROUP BY 1 27 | ), 28 | raw_population AS ( 29 | SELECT 30 | LEFT(census_tract, 5) AS county 31 | , SUM(population) AS raw_pop 32 | FROM representative_points 33 | GROUP BY 1 34 | ), 35 | census_to_raw_ratio AS ( 36 | SELECT 37 | census.county AS county 38 | , census_pop / raw_pop AS ratio 39 | FROM census_population census 40 | JOIN raw_population raw 41 | ON census.county = raw.county 42 | ) 43 | UPDATE representative_points AS rps 44 | SET population = 45 | COALESCE( 46 | ratios.ratio * population 47 | , population 48 | ) 49 | FROM census_to_raw_ratio ratios 50 | WHERE LEFT(census_tract, 5) = ratios.county 51 | ; 52 | """ 53 | 54 | 55 | if __name__ == '__main__': 56 | engine = connect.create_db_engine() 57 | with engine.begin() as conn: 58 | conn.execute(POPULATION_UPDATE_QUERY_COUNTY_LEVEL) 59 | -------------------------------------------------------------------------------- /backend/tests/.cache/v/cache/lastfailed: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /backend/tests/lib/calculate/test_calculate_adequacy.py: -------------------------------------------------------------------------------- 1 | """Test calculate adequacies.""" 2 | from functools import partial 3 | 4 | from backend.lib.calculate import adequacy 5 | from backend.lib.database.postgres import connect 6 | from backend.lib.utils.datatypes import Point 7 | from backend.models.measurers import get_measurer 8 | 9 | import pyproj 10 | 11 | from shapely import ops 12 | 13 | 14 | engine = connect.create_db_engine() 15 | 16 | 17 | def area_in_square_meters(polygon): 18 | """Calculate area in square meters.""" 19 | projection = partial( 20 | pyproj.transform, pyproj.Proj(init='epsg:4326'), 21 | pyproj.Proj(init='epsg:3857') 22 | ) 23 | return ops.transform(projection, polygon).area 24 | 25 | 26 | def test_find_closest_location(): 27 | point = { 28 | 'id': 0, 29 | 'latitude': 37.74753421600008, 30 | 'longitude': -122.2316317029999, 31 | } 32 | locations = [ 33 | Point(**{'latitude': 37.74753421600008, 'longitude': -122.2316317029999}), 34 | Point(**{'latitude': 32.74753421600008, 'longitude': -122.2316317029999}), 35 | ] 36 | output = adequacy._find_closest_location( 37 | point=point, 38 | measurer=get_measurer('haversine'), 39 | locations=locations, 40 | ) 41 | 42 | expected = { 43 | 'id': point['id'], 44 | 'closest_point': Point(latitude=37.74753421600008, longitude=-122.2316317029999), 45 | 'to_closest_provider': 0 46 | } 47 | 48 | assert output == expected 49 | 50 | 51 | def test_calculate_adequacies(): 52 | measurer_name = 'haversine' 53 | locations = [ 54 | {'id': 1, 'latitude': 33.77500830300005, 'longitude': -118.11176916399995}, 55 | {'id': 2, 'latitude': 32.74753421600008, 'longitude': -122.2316317029999}, 56 | {'id': 3, 'latitude': 33.77500830300005, 'longitude': -118.11176916399995} 57 | ] 58 | adequacies = adequacy.calculate_adequacies( 59 | service_area_ids=['ca_los_angeles_county_00000'], 60 | measurer_name=measurer_name, 61 | locations=locations, 62 | engine=engine, 63 | radius_in_meters=1000 64 | ) 65 | assert adequacies[0]['closest_providers'] == [1, 3] 66 | assert adequacies is not None 67 | 68 | 69 | def test_get_locations_to_check_by_service_area(): 70 | locations = [ 71 | Point(**{'latitude': 33.77500830300005, 'longitude': -118.11176916399995}), 72 | Point(**{'latitude': 85.00000000000000, 'longitude': -100.00000000000000}) 73 | ] 74 | output = adequacy._get_locations_to_check_by_service_area( 75 | service_area_ids=['ca_los_angeles_county_00000'], 76 | locations=locations, 77 | radius_in_meters=10**4, 78 | engine=engine, 79 | )['ca_los_angeles_county_00000'] 80 | assert output == [locations[0]] 81 | -------------------------------------------------------------------------------- /backend/tests/lib/database/postgres/test_connect.py: -------------------------------------------------------------------------------- 1 | """ 2 | This test class is running on a local postgres to test the basic connection features. 3 | 4 | It will evolve to test connections to the aws db as well. 5 | """ 6 | from backend.lib.database.postgres import connect 7 | 8 | import pytest 9 | 10 | from sqlalchemy.exc import OperationalError 11 | 12 | 13 | def test_engine_creation(): 14 | """ Test that we can create a connection without any error.""" 15 | db = connect.create_db_engine() 16 | assert db is not None 17 | 18 | 19 | def test_engine_creation_retry_fail(): 20 | """Test that engines retries to connect by providing a fake URL.""" 21 | with pytest.raises(OperationalError): 22 | db_engine = connect.create_db_engine(db_url='postgresql://user:pwd@fake_url:5432/postgres') 23 | db_engine.connect() 24 | -------------------------------------------------------------------------------- /backend/tests/lib/database/postgres/test_inserts.py: -------------------------------------------------------------------------------- 1 | """Test Postgres inserts.""" 2 | from backend.lib.database.postgres import connect, postgis 3 | from backend.lib.database.postgres import methods 4 | from backend.lib.database.tables import address, provider 5 | 6 | 7 | def test_core_insert(): 8 | engine = connect.create_db_engine() 9 | address_data = [ 10 | { 11 | 'address': "Aaaat Brian's House", 12 | 'latitude': 23, 13 | 'longitude': 35, 14 | 'location': postgis.to_point(35, 22) 15 | } 16 | ] 17 | 18 | addresss_inserted_primary_key = methods.core_insert( 19 | engine, 20 | sql_class=address.Address, 21 | data=address_data, 22 | return_insert_ids=True, 23 | unique_column='address' 24 | ) 25 | 26 | provider_data = [ 27 | { 28 | 'address_id': addresss_inserted_primary_key[0], 29 | 'languages': ['english', 'spanish'], 30 | 'npi': 'aaa_npi_hello', 31 | 'specialty': 'doctor_for_teddies' 32 | } 33 | ] 34 | 35 | provider_inserted_primary_key = methods.core_insert( 36 | engine, 37 | sql_class=provider.Provider, 38 | data=provider_data, 39 | return_insert_ids=True 40 | ) 41 | 42 | methods.delete( 43 | engine=engine, 44 | sql_class=provider.Provider, 45 | ids=provider_inserted_primary_key 46 | ) 47 | 48 | methods.delete( 49 | engine=engine, 50 | sql_class=address.Address, 51 | ids=addresss_inserted_primary_key 52 | ) 53 | -------------------------------------------------------------------------------- /backend/tests/lib/fetch/test_census_mapping_conversion.py: -------------------------------------------------------------------------------- 1 | from backend.lib.utils.census import readable_columns_from_census_mapping 2 | 3 | MOCK_CENSUS_MAPPING = { 4 | 'category1': { 5 | 'c1g1': { 6 | 'joined_column_name': 'category_one_group_one', 7 | 'human_readable_name': 'Category One, Group One' 8 | }, 9 | 'c1g2': { 10 | 'joined_column_name': 'category_one_group_two', 11 | 'human_readable_name': 'Category One, Group Two' 12 | } 13 | }, 14 | 'category2': { 15 | 'c2g1': { 16 | 'joined_column_name': 'category_two_group_one', 17 | 'human_readable_name': 'Category Two, Group One' 18 | }, 19 | 'c2g2': { 20 | 'joined_column_name': 'category_two_group_two', 21 | 'human_readable_name': 'Category Two, Group Two' 22 | } 23 | } 24 | } 25 | 26 | 27 | class TestReadableColumnsFromCensusMapping: 28 | 29 | @staticmethod 30 | def test_readable_columns_from_census_mapping(): 31 | census_columns = readable_columns_from_census_mapping(MOCK_CENSUS_MAPPING) 32 | 33 | expected_census_columns = [ 34 | 'c1g1 AS category_one_group_one', 35 | 'c1g2 AS category_one_group_two', 36 | 'c2g1 AS category_two_group_one', 37 | 'c2g2 AS category_two_group_two' 38 | ] 39 | 40 | assert all([a == b for a, b in zip(census_columns, expected_census_columns)]) 41 | -------------------------------------------------------------------------------- /backend/tests/lib/test_config_utils.py: -------------------------------------------------------------------------------- 1 | """Test config_utils.""" 2 | from backend.lib.utils import config_utils 3 | 4 | 5 | def test_list_to_dict(): 6 | """Test list_to_dict.""" 7 | input_list = ['1', '2', '3'] 8 | mapping = {0: 1, 1: 2, 2: 3} 9 | result = config_utils.list_to_dict(input_list, mapping) 10 | assert result == {1: '1', 2: '2', 3: '3'} 11 | 12 | 13 | def test_nested_update_independent(): 14 | """Test nested update with independent dicts.""" 15 | orig_dict = {'a': 'a'} 16 | new_dict = {'b': 'b'} 17 | updated_dict = config_utils.nested_update(orig_dict, new_dict) 18 | assert updated_dict == {'a': 'a', 'b': 'b'} 19 | 20 | 21 | def test_nested_update_dependent(): 22 | """Test nested update with dependent dicts.""" 23 | orig_dict = {'a': 'a'} 24 | new_dict = {'a': 'b'} 25 | updated_dict = config_utils.nested_update(orig_dict, new_dict) 26 | assert updated_dict == {'a': 'b'} 27 | 28 | 29 | def test_nested_update_dependent_second_layer(): 30 | """Test nested update with dependent dicts on second layer.""" 31 | orig_dict = {'a': {'a': 'a'}, 'b': {'a': 'a'}} 32 | new_dict = {'a': {'a': 'b'}} 33 | updated_dict = config_utils.nested_update(orig_dict, new_dict) 34 | assert updated_dict == {'a': {'a': 'b'}, 'b': {'a': 'a'}} 35 | 36 | 37 | def test_nested_update_list_values(): 38 | """Test nested update with independent dicts.""" 39 | orig_dict = {'a': [0, 1], 'b': [2]} 40 | new_dict = {'a': [3], 'c': [4]} 41 | updated_dict = config_utils.nested_update(orig_dict, new_dict) 42 | assert updated_dict == {'a': [0, 1, 3], 'b': [2], 'c': [4]} 43 | 44 | 45 | def test_merge_dictionaries_with_list_values(): 46 | """Test that merge_dictionaries_with_list_values works as intended.""" 47 | dicts = [ 48 | {'a': [0, 1, 2]}, 49 | {'b': [10, 11]}, 50 | {'a': [3, 4, 5], 'c': [7, 8, 9]} 51 | ] 52 | output = config_utils.merge_dictionaries_with_list_values(dicts) 53 | expected = { 54 | 'a': [0, 1, 2, 3, 4, 5], 55 | 'b': [10, 11], 56 | 'c': [7, 8, 9] 57 | } 58 | assert output == expected 59 | -------------------------------------------------------------------------------- /backend/tests/lib/test_utils.py: -------------------------------------------------------------------------------- 1 | """Tests for util methods.""" 2 | import math 3 | 4 | from backend.lib.utils import iterators 5 | 6 | 7 | def test_iterate_in_slices_odd_fit(): 8 | """ 9 | Test that _iterate_in_slices always stays below the given output size. 10 | 11 | Specifically tests the case when the batch size foes not fit evenlty into the 12 | total size. In addition, test that no extra iterations are required. 13 | """ 14 | total_size = 10 15 | output_size = 3 16 | 17 | iterable = iter(range(total_size)) 18 | 19 | output = [] 20 | for idx, slice_ in enumerate(iterators.iterate_in_slices(iterable, output_size)): 21 | assert 0 < len(slice_) <= output_size 22 | assert idx < math.ceil(total_size / output_size) 23 | output.extend(slice_) 24 | 25 | assert len(output) == total_size 26 | 27 | 28 | def test_iterate_in_slices_even_fit(): 29 | """ 30 | Test that iterate_in_slices handles the end of a list properly. 31 | 32 | Specifically tests the case when the batch size fits evenly into the total size. 33 | """ 34 | total_size = 10 35 | batch_size = 2 36 | iterable = iter(range(total_size)) 37 | 38 | output = [] 39 | for idx, slice_ in enumerate(iterators.iterate_in_slices(iterable, batch_size)): 40 | assert 0 < len(slice_) <= batch_size 41 | assert idx < math.ceil(total_size / batch_size) 42 | output.extend(slice_) 43 | 44 | assert len(output) == total_size 45 | -------------------------------------------------------------------------------- /backend/tests/models/test_distance.py: -------------------------------------------------------------------------------- 1 | """Tests for methods measuring distance between two series of points.""" 2 | from backend.lib.utils.datatypes import Point 3 | from backend.models import base 4 | from backend.models import distance 5 | 6 | import pytest 7 | 8 | 9 | NEWPORT_RI = Point(**{'longitude': -71.312796, 'latitude': 41.49008}) 10 | CLEVELAND_OH = Point(**{'longitude': -81.695391, 'latitude': 41.499498}) 11 | EUCLID_OH = Point(**{'longitude': -81.526787, 'latitude': 41.593105}) 12 | NASSAU = Point(**{'longitude': -77.3554, 'latitude': 25.0480}) 13 | MIAMI_FL = Point(**{'longitude': -80.1918, 'latitude': 25.7617}) 14 | 15 | 16 | class TestMetrics(): 17 | """Test base metrics in the distance module.""" 18 | 19 | def setup(self): 20 | """Initialize a measurer for use in the test cases.""" 21 | self.measurer = distance.HaversineDistance() 22 | 23 | def test_haversine_distance_class(self): 24 | """Check that the haversine distance matches expectations.""" 25 | d = self.measurer.measure_between_two_points(NEWPORT_RI, CLEVELAND_OH) 26 | assert abs(d - 863.731 * 10**3) < 10**-2 27 | 28 | def test_haversine_distance_returns_none_when_a_point_is_missing(self): 29 | """Check that the haversine distance matches expectations.""" 30 | d1 = self.measurer.measure_between_two_points(NEWPORT_RI, None) 31 | d2 = self.measurer.measure_between_two_points(None, CLEVELAND_OH) 32 | assert d1 is None 33 | assert d2 is None 34 | 35 | def test_measure_distance_class_raises_not_implemented_error(self): 36 | """The base interface for measurement should raise a NotImplementedError.""" 37 | measurer = base.Measurer() 38 | with pytest.raises(NotImplementedError): 39 | measurer.measure_between_two_points(NEWPORT_RI, CLEVELAND_OH) 40 | 41 | def test_haversine_distance_closest(self): 42 | """Check that the haversine closest distance works.""" 43 | closest_distance, closest_town = self.measurer.closest( 44 | origin=NASSAU, 45 | point_list=[NEWPORT_RI, CLEVELAND_OH] 46 | ) 47 | assert closest_town == CLEVELAND_OH 48 | 49 | def test_haversine_distance_closest_with_early_exit(self): 50 | """Check that the haversine closest_with_early_exit distance works.""" 51 | closest_distance, closest_town = self.measurer.closest_with_early_exit( 52 | origin=NASSAU, 53 | point_list=[MIAMI_FL, NASSAU, NEWPORT_RI, CLEVELAND_OH], 54 | exit_distance=320 * 10**3 55 | ) 56 | assert closest_town == MIAMI_FL 57 | 58 | closest_distance, closest_town = self.measurer.closest_with_early_exit( 59 | origin=NASSAU, 60 | point_list=[MIAMI_FL, NASSAU, NEWPORT_RI, CLEVELAND_OH], 61 | exit_distance=80 * 10**3 62 | ) 63 | assert closest_town == NASSAU 64 | -------------------------------------------------------------------------------- /backend/tests/models/test_open_route_service.py: -------------------------------------------------------------------------------- 1 | """Test Open Route Service API.""" 2 | import os 3 | 4 | from backend.lib.utils.datatypes import Point 5 | from backend.models import time 6 | 7 | import mock 8 | 9 | NEWPORT_RI = Point(**{'longitude': -71.312796, 'latitude': 41.49008}) 10 | CLEVELAND_OH = Point(**{'longitude': -81.695391, 'latitude': 41.499498}) 11 | 12 | 13 | class TestOpenRouteDrivingTime(): 14 | """Test methods related to the Open Route Service API.""" 15 | 16 | def setup(self): 17 | """Initialize a measurer for use in the test cases.""" 18 | self.access_token = os.environ.get('ORS_TOKEN', 'fake') 19 | self.measurer = time.OpenRouteDrivingTime(access_token=self.access_token) 20 | 21 | @mock.patch('backend.models.time.distance_matrix') 22 | def test_get_matrix(self, mock_distance_matrix): 23 | """Check that the get_matrix function sends back the complete 'durations' element.""" 24 | mock_distance_matrix.return_value = {'durations': [[35799.4, 35799.4]]} 25 | matrix_durations = self.measurer._get_matrix( 26 | source_points=[NEWPORT_RI], 27 | destination_points=[CLEVELAND_OH, CLEVELAND_OH] 28 | ) 29 | 30 | assert abs(matrix_durations[0][0] - 35799.4) < 20.0 * 60 31 | 32 | def test_distance_matrix(self): 33 | """Check that the distance_matrix function is working of a correct API is provided.""" 34 | if self.access_token != 'fake': 35 | matrix_durations = self.measurer._get_matrix( 36 | source_points=[NEWPORT_RI], 37 | destination_points=[CLEVELAND_OH, CLEVELAND_OH] 38 | ) 39 | 40 | assert len(matrix_durations[0]) == 2 41 | assert matrix_durations[0][0] is not None 42 | else: 43 | print('OpenRouteDrivingTime not fully tested. No API key provided. $ORS_TOKEN.') 44 | -------------------------------------------------------------------------------- /backend/tests/requests/test_census.py: -------------------------------------------------------------------------------- 1 | """Test census information requests for the Encompass API.""" 2 | from backend.app.requests import census 3 | from backend.lib.database.postgres import connect 4 | 5 | import flask 6 | 7 | from flask_testing import LiveServerTestCase 8 | 9 | import mock 10 | 11 | import pytest 12 | 13 | engine = connect.create_db_engine() 14 | 15 | 16 | class TestCensusByServiceAreaRequest(LiveServerTestCase): 17 | """Test class for census info requests.""" 18 | 19 | def create_app(self): 20 | """Start a new flask app for testing.""" 21 | app = flask.Flask(__name__) 22 | app.config['TESTING'] = True 23 | return app 24 | 25 | @mock.patch('backend.lib.fetch.census.fetch_census_info_by_service_area') 26 | def test_census_info_by_service_area_request(self, mock_fetch): 27 | """Test census by service area request in a simple case.""" 28 | service_area_ids = ['ca_alameda_94601'] 29 | mock_fetch.return_value = {'ca_alameda_94601': {'age': {'0-18 Years': 10.0}}} 30 | 31 | def _mock_get_json(force=True): 32 | return {'service_area_ids': service_area_ids} 33 | 34 | mock_request = mock.MagicMock() 35 | mock_request.get_json = _mock_get_json 36 | 37 | try: 38 | output = census.census_info_by_service_area_request( 39 | app=self.app, flask_request=mock_request, engine=engine 40 | ) 41 | except TypeError: 42 | pytest.fail('Could not retrieve census information.') 43 | 44 | assert output == {'ca_alameda_94601': {'age': {'0-18 Years': 10.0}}} 45 | -------------------------------------------------------------------------------- /backend/tests/requests/test_providers.py: -------------------------------------------------------------------------------- 1 | """Test providers requests for Time-Distance API.""" 2 | from backend.app.requests import providers 3 | from backend.lib.database.postgres import connect 4 | 5 | import flask 6 | 7 | from flask_testing import LiveServerTestCase 8 | 9 | import mock 10 | 11 | 12 | engine = connect.create_db_engine() 13 | 14 | 15 | class TestProvidersRequest(LiveServerTestCase): 16 | """Test class for providers request file.""" 17 | 18 | def create_app(self): 19 | """Start a new flask app for testing.""" 20 | app = flask.Flask(__name__) 21 | app.config['TESTING'] = True 22 | return app 23 | 24 | def test_provider_request(self): 25 | """Test provider requests in a simple case.""" 26 | request_providers = { 27 | 'addresses': ['1855 Mission Street, San Francisco, CA 94110'] 28 | } 29 | 30 | def _mock_get_json(force=True): 31 | return request_providers 32 | mock_request = mock.MagicMock() 33 | mock_request.get_json = _mock_get_json 34 | response = providers.providers_request(self.app, mock_request, engine) 35 | assert response[0]['status'] == 'success' 36 | 37 | def test_provider_request_db_error(self): 38 | """Test provider requests in a simple case with badly formatted addresses.""" 39 | request_providers = { 40 | 'addresses': ['provider_address_1', 'provider_address_1'] 41 | } 42 | 43 | def _mock_get_json(force=True): 44 | return request_providers 45 | mock_request = mock.MagicMock() 46 | mock_request.get_json = _mock_get_json 47 | response = providers.providers_request(self.app, mock_request, engine) 48 | error_message = { 49 | 'message': 'Failed to geocode address for this provider.', 'status': 'error' 50 | } 51 | assert response == [error_message, error_message] 52 | -------------------------------------------------------------------------------- /backend/tests/requests/test_representative_points.py: -------------------------------------------------------------------------------- 1 | """Test providers requests for Time-Distance API.""" 2 | from backend.app.exceptions.format import InvalidFormat 3 | from backend.app.requests import representative_points 4 | from backend.lib.database.postgres import connect 5 | from backend.lib.fetch.representative_points import ( 6 | fetch_representative_points 7 | ) 8 | 9 | import flask 10 | 11 | from flask_testing import LiveServerTestCase 12 | 13 | import mock 14 | 15 | import pytest 16 | 17 | engine = connect.create_db_engine() 18 | 19 | 20 | class TestRepresentativePointsRequest(LiveServerTestCase): 21 | """Test class for providers request file.""" 22 | 23 | def create_app(self): 24 | """Start a new flask app for testing.""" 25 | app = flask.Flask(__name__) 26 | app.config['TESTING'] = True 27 | return app 28 | 29 | @mock.patch('backend.lib.fetch.representative_points.fetch_representative_points') 30 | def test_rp_request(self, mock_fetch): 31 | """Test provider requests in a simple case.""" 32 | request_service_areas = {'service_area_ids': ['ca_los_angeles_county_00000', 'not_valid']} 33 | 34 | def _mock_get_json(force=True): 35 | return request_service_areas 36 | mock_request = mock.MagicMock() 37 | mock_request.get_json = _mock_get_json 38 | 39 | mock_fetch.return_value = fetch_representative_points( 40 | service_area_ids=request_service_areas['service_area_ids'], 41 | include_census_data=False, 42 | engine=engine 43 | ) 44 | 45 | try: 46 | results = representative_points.representative_points_request( 47 | self.app, mock_request, engine 48 | ) 49 | except Exception: 50 | pytest.fail('Could not fetch representative_points.') 51 | 52 | assert len(results) > 1000 53 | assert all( 54 | result['service_area_id'] in request_service_areas['service_area_ids'] 55 | for result in results 56 | ) 57 | 58 | def test_rp_request_missing_service_areas(self): 59 | """Test provider requests in a simple case.""" 60 | request_service_areas = { 61 | 'no_service_area_ids': ['ca_los_angeles_county_00000', 'not_valid'] 62 | } 63 | 64 | def _mock_get_json(force=True): 65 | return request_service_areas 66 | mock_request = mock.MagicMock() 67 | mock_request.get_json = _mock_get_json 68 | 69 | with pytest.raises(InvalidFormat): 70 | representative_points.representative_points_request(self.app, mock_request, engine) 71 | -------------------------------------------------------------------------------- /backend/tests/requests/test_service_areas.py: -------------------------------------------------------------------------------- 1 | """Test service area requests for the Encompass API.""" 2 | from backend.app.requests import service_areas 3 | from backend.lib.database.postgres import connect 4 | 5 | import flask 6 | 7 | from flask_testing import LiveServerTestCase 8 | 9 | import mock 10 | 11 | import pytest 12 | 13 | engine = connect.create_db_engine() 14 | 15 | 16 | class TestServiceAreaRequest(LiveServerTestCase): 17 | """Test class for service areas requests.""" 18 | 19 | def create_app(self): 20 | """Start a new flask app for testing.""" 21 | app = flask.Flask(__name__) 22 | app.config['TESTING'] = True 23 | return app 24 | 25 | @mock.patch('backend.lib.fetch.representative_points.fetch_all_service_areas') 26 | def test_request_service_areas(self, mock_fetch_service_areas): 27 | """Test service area requests in a simple case.""" 28 | complete_list_of_service_areas = ['ca_alameda_94601'] 29 | mock_fetch_service_areas.return_value = complete_list_of_service_areas 30 | try: 31 | output = service_areas.service_areas_request(self.app, {}, engine) 32 | except TypeError: 33 | pytest.fail('Could not retrieve service areas.') 34 | 35 | assert output == complete_list_of_service_areas 36 | -------------------------------------------------------------------------------- /data/California/all_ca_counties.csv: -------------------------------------------------------------------------------- 1 | CountyName 2 | Alameda County 3 | Alpine County 4 | Amador County 5 | Butte County 6 | Calaveras County 7 | Colusa County 8 | Contra Costa County 9 | Del Norte County 10 | El Dorado County 11 | Fresno County 12 | Glenn County 13 | Humboldt County 14 | Imperial County 15 | Inyo County 16 | Kern County 17 | Kings County 18 | Lake County 19 | Lassen County 20 | Los Angeles County 21 | Madera County 22 | Marin County 23 | Mariposa County 24 | Mendocino County 25 | Merced County 26 | Modoc County 27 | Mono County 28 | Monterey County 29 | Napa County 30 | Nevada County 31 | Orange County 32 | Placer County 33 | Plumas County 34 | Riverside County 35 | Sacramento County 36 | San Benito County 37 | San Bernardino County 38 | San Diego County 39 | San Francisco County 40 | San Joaquin County 41 | San Luis Obispo County 42 | San Mateo County 43 | Santa Barbara County 44 | Santa Clara County 45 | Santa Cruz County 46 | Shasta County 47 | Sierra County 48 | Siskiyou County 49 | Solano County 50 | Sonoma County 51 | Stanislaus County 52 | Sutter County 53 | Tehama County 54 | Trinity County 55 | Tulare County 56 | Tuolumne County 57 | Ventura County 58 | Yolo County 59 | Yuba County 60 | -------------------------------------------------------------------------------- /data/images/encompass_texas.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bayesimpact/encompass/a4f47e384ef4fe4dc43c30423a1713c2c93dc87f/data/images/encompass_texas.png -------------------------------------------------------------------------------- /data/sample/mock-providers.csv: -------------------------------------------------------------------------------- 1 | address,latitude,longitude 2 | "testing1",37.74753421600008,-122.2316317029999 3 | "testing2",40.74753421600008,-80.2316317029999 4 | -------------------------------------------------------------------------------- /data/urban_rural_codes/NCHSURCodes2013.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bayesimpact/encompass/a4f47e384ef4fe4dc43c30423a1713c2c93dc87f/data/urban_rural_codes/NCHSURCodes2013.txt -------------------------------------------------------------------------------- /docker-compose.local.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | services: 4 | frontend: 5 | ports: 6 | - 80:8081 7 | -------------------------------------------------------------------------------- /docker-compose.override.db.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | volumes: 4 | postgres-data: 5 | driver: local 6 | 7 | services: 8 | 9 | backend: 10 | depends_on: 11 | - db 12 | links: 13 | # Link with the DB container as `db.local`. 14 | - db:db.local 15 | volumes: 16 | - ./backend/config/entrypoint-local.sh:/entrypoint.sh:ro 17 | environment: 18 | - POSTGRES_URL=postgres://postgres:example@db.local:5432/network_adequacy 19 | - POSTGRES_URL_POSTGRES=postgres://postgres:example@db.local:5432/postgres 20 | 21 | db: 22 | image: mdillon/postgis 23 | ports: 24 | # Expose port for convenient local access. 25 | - 5432:5432 26 | volumes: 27 | - postgres-data:/var/lib/postgresql/data 28 | environment: 29 | POSTGRES_PASSWORD: example 30 | POSTGRES_DB: network_adequacy 31 | -------------------------------------------------------------------------------- /docker-compose.remote.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | services: 4 | frontend: 5 | volumes: 6 | # Mount the nginx configuration. 7 | - ./frontend/remote/nginx/nginx.conf:/etc/nginx/conf.d/nginx.conf:rw 8 | # Mount the entrypoint. 9 | - ./frontend/remote/entrypoint.sh:/tmp/entrypoint.sh 10 | entrypoint: ["/tmp/entrypoint.sh"] 11 | command: ["yarn run start:prod"] 12 | ports: 13 | - 80:80 14 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | volumes: 4 | postgres-data: 5 | driver: local 6 | 7 | services: 8 | 9 | backend: 10 | build: 11 | context: ./backend 12 | dockerfile: Dockerfile 13 | volumes: 14 | # App files and config 15 | - ./backend/config/uwsgi.ini:/etc/uwsgi/uwsgi.ini:ro 16 | - ./backend/config/nginx.conf:/etc/nginx/conf.d/nginx.conf:ro 17 | - ./backend/app/main.py:/app/main.py:ro 18 | # Cache 19 | - ./.cache/:/app/cache:rw 20 | # Library 21 | - ./backend:/usr/local/lib/python3.6/site-packages/backend:ro 22 | # Data 23 | - ./data/:/app/data/:rw 24 | - ./shared/:/app/shared:rw 25 | # Runners 26 | - ./backend/runners/:/app/runners:ro 27 | # Linters and Tests 28 | - ./backend/tests/:/app/tests:ro 29 | - ./backend/.flake8:/app/.flake8:ro 30 | - ./backend/.coveragerc:/app/.coveragerc:ro 31 | - ./backend/lib/:/app/lib:ro 32 | # Entrypoint 33 | - ./backend/config/entrypoint.sh:/entrypoint.sh:ro 34 | env_file: 35 | - .env 36 | ports: 37 | - 8080:8080 38 | 39 | frontend: 40 | volumes: 41 | - ./frontend/src:/usr/src/app/src:rw 42 | - ./frontend/public:/usr/src/app/public:rw 43 | - ./shared:/usr/src/app/src/shared:ro 44 | - ./shared:/usr/src/shared:ro 45 | build: 46 | args: 47 | - GA_ID 48 | - MAPBOX_TOKEN 49 | context: . 50 | dockerfile: ./frontend/Dockerfile 51 | ports: 52 | - 8081:8081 53 | env_file: 54 | - .env 55 | 56 | osrm: 57 | build: 58 | context: ./osrm 59 | dockerfile: Dockerfile 60 | image: osrm 61 | volumes: 62 | - ./data/:/opt/data/:rw 63 | - ./osrm:/osrm/:ro 64 | command: sh -c "/osrm/initialize.py" 65 | ports: 66 | - 80:5000 67 | 68 | explorer: 69 | build: 70 | context: . 71 | dockerfile: ./explorer/Dockerfile 72 | volumes: 73 | - ./backend:/home/jovyan/work/backend:ro 74 | - ./shared:/app/shared:ro 75 | - ./explorer:/home/jovyan/work:rw 76 | - ./data/:/home/jovyan/work/data/:rw 77 | - ./frontend/src:/usr/src/app/src:rw 78 | env_file: 79 | - .env 80 | command: bash -c "start-notebook.sh --NotebookApp.token=''" 81 | ports: 82 | - 8888:8888 83 | environment: 84 | - PYTHONPATH=/home/jovyan/work 85 | -------------------------------------------------------------------------------- /explorer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jupyter/base-notebook:latest 2 | 3 | RUN conda install --yes --no-pin \ 4 | geopandas \ 5 | statsmodels \ 6 | seaborn 7 | 8 | RUN conda install --yes -c esri arcgis 9 | 10 | USER root 11 | RUN apt-get -qq update 12 | RUN apt-get install -y \ 13 | libgeos-dev \ 14 | libspatialindex-dev \ 15 | gdal-bin \ 16 | man \ 17 | gcc \ 18 | --fix-missing 19 | USER jovyan 20 | 21 | WORKDIR /home/jovyan/work 22 | RUN cd /home/jovyan/work 23 | 24 | COPY ./backend/requirements.txt . 25 | COPY ./explorer/explorer_requirements.txt . 26 | RUN pip install -r requirements.txt 27 | RUN pip install -r explorer_requirements.txt 28 | 29 | EXPOSE 8888 30 | ENV NODE_ENV development 31 | -------------------------------------------------------------------------------- /explorer/README.md: -------------------------------------------------------------------------------- 1 | # Explorer 2 | 3 | This container is provided for performing TDS-related analysis. 4 | 5 | ```sh 6 | make explorer 7 | ``` 8 | 9 | - Open a browser and navigate to `localhost:8888`. 10 | 11 | In addition to methods from the backend, this environment gives you the following: 12 | - geopandas 13 | - scikit-learn 14 | - statsmodels 15 | - seaborn 16 | -------------------------------------------------------------------------------- /explorer/__init__.py: -------------------------------------------------------------------------------- 1 | """Files for data exploration.""" 2 | -------------------------------------------------------------------------------- /explorer/explorer_requirements.txt: -------------------------------------------------------------------------------- 1 | descartes>=1.1.0 2 | seaborn>=0.8.1 3 | pandas>=0.22.0 4 | numpy>=1.14.0 5 | ijson==2.3 6 | -------------------------------------------------------------------------------- /explorer/lib/__init__.py: -------------------------------------------------------------------------------- 1 | """Library functions for explorer notebooks and scripts.""" 2 | -------------------------------------------------------------------------------- /explorer/lib/etl_helper.py: -------------------------------------------------------------------------------- 1 | """Functions utilized in more than one explorer script.""" 2 | import logging 3 | import pandas as pd 4 | import requests 5 | 6 | HEALTHCARE_GOV_PATH = '/home/jovyan/work/data/healthcare_gov' 7 | PATH_TO_PUFS = HEALTHCARE_GOV_PATH + '/Machine_Readable_URL_PUF.csv' 8 | PATH_TO_PLANS = HEALTHCARE_GOV_PATH + '/Plan_Attributes_PUF.csv' 9 | 10 | 11 | def get_issuer_plan_ids(issuer): 12 | """Given an issuer id, return all of the plan ids registered to that issuer.""" 13 | df = pd.read_csv(PATH_TO_PLANS) 14 | df = df[df.IssuerId.astype(str) == issuer] 15 | return set(df.StandardComponentId.unique()) 16 | 17 | 18 | def extract_plans(state): 19 | """ 20 | Extract issuer ids and URLs from the puf file. 21 | 22 | Returns list of tuples of (plan name, plan url) 23 | """ 24 | df = pd.read_csv(PATH_TO_PUFS) 25 | df = df[df.State == state] 26 | return list(zip(df['Issuer ID'].astype(str), df['URL Submitted'])) 27 | 28 | 29 | def fetch_provider_urls(plan_url): 30 | """Fetch all provider urls listed on a plan's url.""" 31 | response = requests.get(plan_url) 32 | response.raise_for_status() 33 | response_json = response.json() 34 | if 'provider_urls' in response_json: 35 | return response_json['provider_urls'] 36 | logging.warning('No provider URLs available for plan url: {}'.format(plan_url)) 37 | 38 | 39 | def clean_paths(url): 40 | """Translate urls into human-readable filenames.""" 41 | to_remove = ['http://', 'https://', 'www.', '.com', '.json', '.'] 42 | for item in to_remove: 43 | url = url.replace(item, '') 44 | 45 | return url.replace('/', '_') 46 | 47 | 48 | def clean_plan_name(plan_name): 49 | """Standardize plan name.""" 50 | return str.lower(plan_name.replace(' ', '_')) 51 | 52 | 53 | def query_yes_no(message, default='y'): 54 | """Query user for yes/no choide.""" 55 | choices = 'Y/n' if default.lower() in ('y', 'yes') else 'y/N' 56 | choice = str(input('%s (%s) ' % (message, choices))) 57 | values = ('y', 'yes', '') if choices == 'Y/n' else ('y', 'yes') 58 | return choice.strip().lower() in values 59 | -------------------------------------------------------------------------------- /explorer/notebooks/ArcgisConnect.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 11, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import os\n", 10 | "\n", 11 | "import arcgis\n", 12 | "from arcgis.gis import GIS\n", 13 | "\n", 14 | "from IPython.display import HTML" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 12, 20 | "metadata": {}, 21 | "outputs": [ 22 | { 23 | "name": "stdout", 24 | "output_type": "stream", 25 | "text": [ 26 | "Enter password: ········\n" 27 | ] 28 | } 29 | ], 30 | "source": [ 31 | "# Initialize GIS\n", 32 | "gis = GIS(url='http://dmhc.esri.com/portal', username='ericBayes', password=os.environ.get('ARCGIS_PASSWORD', None))" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 13, 38 | "metadata": {}, 39 | "outputs": [], 40 | "source": [ 41 | "# List Toolboxes\n", 42 | "# Link - https://developers.arcgis.com/python/guide/accessing-geoprocessing-tools/\n", 43 | "toolboxes = gis.content.search('travel', 'Geoprocessing Toolbox', \n", 44 | " outside_org=True, max_items=3)" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 14, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "for toolbox in toolboxes:\n", 54 | " display(toolbox)" 55 | ] 56 | } 57 | ], 58 | "metadata": { 59 | "kernelspec": { 60 | "display_name": "Python 3", 61 | "language": "python", 62 | "name": "python3" 63 | }, 64 | "language_info": { 65 | "codemirror_mode": { 66 | "name": "ipython", 67 | "version": 3 68 | }, 69 | "file_extension": ".py", 70 | "mimetype": "text/x-python", 71 | "name": "python", 72 | "nbconvert_exporter": "python", 73 | "pygments_lexer": "ipython3", 74 | "version": "3.6.3" 75 | } 76 | }, 77 | "nbformat": 4, 78 | "nbformat_minor": 2 79 | } 80 | -------------------------------------------------------------------------------- /explorer/notebooks/data.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bayesimpact/encompass/a4f47e384ef4fe4dc43c30423a1713c2c93dc87f/explorer/notebooks/data.pkl -------------------------------------------------------------------------------- /explorer/scripts/README.md: -------------------------------------------------------------------------------- 1 | ## Instructions for Running 2 | 3 | This directory contains scripts that are best run using the `explorer` Docker image. Before running any of them, make sure you have an up-to-date version of the `explorer` Docker image. 4 | 5 | ### healthcare\_gov\_url\_to_csv.py 6 | 7 | This script uses a Public Use File (PUF) containing `healthcare.gov` issuer IDs and associated plan URLs and extracts the results as CSV. The following commands run the script for the state of Florida: 8 | 9 | `docker-compose run explorer bash` 10 | `mkdir data/healthcare_gov/FL/` 11 | `python scripts/healthcare_gov_url_to_csv.py --state 'FL' --neighboring_states 'GA' 'AL' 'MS' 'SC'` 12 | 13 | You will be prompted to ignore dental plans, Humana plans (Humana is not participating in the individual marketplace for 2018), and pharmacy URLs. 14 | 15 | For each issuer ID the PUF contains one or more links to large JSON files containing providers' information. This information can either be streamed from the web or from disk (using the `--from_file` flag) using the `ijson` library. Only provider addresses that are both individuals (as opposed to facilities) and that are in the given state (or a neighboring one) are considered. These filtered JSON objects are flattened to CSVs. Each record in the CSV represents a distinct address, plan, specialty, provider name combination. 16 | 17 | If a CSV already exists for a given issuer ID, that issuer ID is skipped, allowing the script to be safely interrupted between the processing of two issuer IDs. To re-run for a particular issuer, make sure that the existing CSV file has been deleted. 18 | 19 | ### merge\_issuer\_csvs.py 20 | 21 | This script merges the many CSVs across multiple issuer IDs into a single file. For now, it must be edited manually in order to run successfully. 22 | -------------------------------------------------------------------------------- /frontend/.firebaserc: -------------------------------------------------------------------------------- 1 | { 2 | "projects": { 3 | "default": "encompass-bayes" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /frontend/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": "node_modules/typescript/lib" 3 | } -------------------------------------------------------------------------------- /frontend/ARCHITECTURE.md: -------------------------------------------------------------------------------- 1 | # Time Distance Standards (frontend) Architecture 2 | 3 | ## Design principles 4 | 5 | 1. **Safety** 6 | - TypeScript + React give us a high level of compile-time safety and confidence that if the app compiles, it probably works as expected 7 | - A linter prevents other common errors, stylistic ambiguity 8 | 2. **Debuggability** 9 | - Using CSS classes instead of inline styles makes CSS bugs easy to debug 10 | - Source maps + [Babydux](https://github.com/bcherny/babydux)'s logger make JavaScript bugs easy to debug 11 | 3. **Simplicity** 12 | - React simplifies the view layer (prefer pure components over stateful ones) 13 | - Babydux instead of Redux simplifies state management 14 | - Material-UI provides most CSS out of the box 15 | 4. **Reusability** 16 | - Components have generic APIs, and can be easily reused across projects 17 | 18 | ## Structure 19 | 20 | - src/ 21 | - *components/* 22 | - *constants/* 23 | - *services/* 24 | - *utils/* 25 | 26 | ## Architecture 27 | 28 | - [*services/store.ts*] All global state is stored in the singleton `store` 29 | - State changes are reactive event streams, folded over the initial state to update it 30 | - Consumers use the `store.get` and `store.set` APIs 31 | - React components subscribe to `store` updates with the `withStore` API 32 | - No explicit Actions, Reducers usually not needed 33 | - [*services/effects.ts*] Responses to state changes outside of components (aka. reducers) 34 | - [*services/api.ts*] All functions that interact with the network 35 | -------------------------------------------------------------------------------- /frontend/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | ## Install and run locally 3 | 4 | ```bash 5 | cd frontend 6 | # Install dependencies 7 | yarn 8 | # Recompile when anything changes 9 | yarn watch 10 | ``` 11 | 12 | Then, open [http://localhost:8081]() in your browser. 13 | 14 | ## Editor 15 | 16 | I suggest editing this project using [VSCode](https://code.visualstudio.com/). VSCode will automatically suggest a small set of extensions that will surface type and lint errors directly in your editor. It will also auto-format files on save, reducing the need to manually fix lint errors. 17 | 18 | ## Tips 19 | 20 | - To debug React performance issues: 21 | 22 | 1. Install the [React Chrome extension](https://chrome.google.com/webstore/detail/react-developer-tools/fmkadmapgofadopljbjfkapdkoienihi), and check *Highlight Updates* to visualize which components are re-rendering 23 | 2. Add `?react_perf` to the browser URL, run a Performance profile, and expand User Timings to see a flame graph of exactly what work is being done while re-rendering 24 | 3. Yarn install `why-did-you-update` and add it to index.tsx to visualize why components are re-rendering 25 | 26 | - To debug data changes in the Babydux store, add the [`withLogger` decorator](https://github.com/bayesimpact/encompass/blob/7c3c91b/src/services/store.ts#L102) to the store and open your devtools console 27 | 28 | - To debug tricky unit test failures: 29 | 1. Set a breakpoint in your code with the `debugger` pragma 30 | 2. Run `yarn test:debug` 31 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:9.11.1 2 | 3 | ARG MAPBOX_TOKEN 4 | ENV MAPBOX_TOKEN "$MAPBOX_TOKEN" 5 | ARG GA_ID 6 | ENV GA_ID "$GA_ID" 7 | 8 | # Set up nginx to proxy requests. 9 | # Should probably switch to express since we are in a node container anyway. 10 | RUN apt-get update; apt-get -y install nginx 11 | 12 | RUN mkdir -p /usr/src/app 13 | WORKDIR /usr/src/app 14 | 15 | COPY ./frontend/. /usr/src/app 16 | 17 | COPY ./shared /usr/src/shared 18 | 19 | EXPOSE 8081 20 | 21 | RUN yarn 22 | RUN yarn run build:prod 23 | 24 | CMD ["yarn", "run", "start:prod"] 25 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # Encompass (Frontend) 2 | 3 | ## Install 4 | 5 | Clone this repo: 6 | 7 | ```sh 8 | git clone git@github.com:bayesimpact/encompass.git 9 | ``` 10 | 11 | ## Build & Run 12 | 13 | ```sh 14 | docker-compose build frontend 15 | docker-compose run frontend 16 | ``` 17 | 18 | ## Build & Run (for Local Development) 19 | 20 | ### Install Dependencies 21 | 22 | 1. Install [NVM](https://github.com/creationix/nvm#installation) 23 | 2. Install Node 8: `nvm install v8.1.3` 24 | 3. Install [Yarn](https://yarnpkg.com/en/docs/install) 25 | 26 | ### Build & Run 27 | 28 | ```sh 29 | yarn 30 | yarn build 31 | yarn start 32 | ``` 33 | 34 | Then, open [localhost:8081](http://localhost:8081) in your browser. 35 | 36 | ## Codegen API typings and Service Areas 37 | 38 | When the shape of an API endpoint changes, or when available service areas change: 39 | 40 | 1. Update the endpoint's JSON-Schema in [shared/api-spec](../shared/api-spec) 41 | 2. Regenerate code with `yarn codegen` 42 | 43 | ## Tests 44 | 45 | ```sh 46 | yarn test 47 | ``` 48 | 49 | ## Firebase 50 | 51 | To deploy it as a standalone frontend on Firebase: 52 | 53 | ```sh 54 | docker-compose build frontend 55 | docker-compose run --rm frontend bash -c "cp dist/* public/" 56 | cd frontend 57 | firebase deploy 58 | cd - 59 | ``` 60 | -------------------------------------------------------------------------------- /frontend/firebase.json: -------------------------------------------------------------------------------- 1 | { 2 | "hosting": { 3 | "public": "public", 4 | "ignore": [ 5 | "firebase.json", 6 | "**/.*", 7 | "**/node_modules/**" 8 | ], 9 | "rewrites": [ 10 | { 11 | "source": "**", 12 | "destination": "/index.html" 13 | } 14 | ] 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /frontend/remote/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # In the remote environments, we need to start up nginx before running yarn. 4 | # We need to do this in two steps because docker filesystem is weird. 5 | sed -e "s/CHANGEME/${ROOT_DOMAIN}/g" /tmp/nginx.conf 6 | cp /tmp/nginx.conf /etc/nginx/conf.d/nginx.conf 7 | service nginx start 8 | eval "$@" 9 | -------------------------------------------------------------------------------- /frontend/remote/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80; 3 | server_name CHANGEME; 4 | if ($http_x_forwarded_proto = 'http') { 5 | return 301 https://$server_name; 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /frontend/src/components/AboutDialog/AboutDialog.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .AboutLogoImg { 4 | height: 140px; 5 | } 6 | 7 | .AboutDialog hr { 8 | width: 20%; 9 | margin-left: 40%; 10 | text-align: center; 11 | margin-top: 10px; 12 | margin-bottom: 10px; 13 | } 14 | -------------------------------------------------------------------------------- /frontend/src/components/AddDatasetDrawer/AddDatasetDrawer.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .AddDatasetDrawer .MediumWeightMuted { 4 | display: block; 5 | } 6 | 7 | .ExplainerText ul { 8 | padding-left: 20px; 9 | } 10 | 11 | .AnalyzeButton { 12 | margin-top: 50px; 13 | display: flex; 14 | justify-content: space-between; 15 | } -------------------------------------------------------------------------------- /frontend/src/components/AlertDialog/AlertDialog.css: -------------------------------------------------------------------------------- 1 | .AlertDialog + div { 2 | padding-bottom: 0px !important; 3 | } 4 | -------------------------------------------------------------------------------- /frontend/src/components/AlertDialog/AlertDialog.tsx: -------------------------------------------------------------------------------- 1 | import { FlatButton } from 'material-ui' 2 | import Dialog from 'material-ui/Dialog' 3 | import IconButton from 'material-ui/IconButton' 4 | import NavigationClose from 'material-ui/svg-icons/navigation/close' 5 | import * as React from 'react' 6 | import './AlertDialog.css' 7 | 8 | type Props = { 9 | message: string | null 10 | onCloseClick: () => void 11 | } 12 | 13 | export let AlertDialog: React.StatelessComponent = ({ 14 | message, onCloseClick 15 | }) => { 16 | return ]} 21 | onRequestClose={onCloseClick} 22 | title={ 23 |
24 | 25 |
26 | } 27 | > 28 |
29 |

30 | {message} 31 |

32 |
33 |
34 | } 35 | -------------------------------------------------------------------------------- /frontend/src/components/AnalyticsDrawer/AnalyticsDrawer.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .AnalyticsDrawer .Description { 4 | position: relative; 5 | } 6 | 7 | .AnalyticsDrawer .Description > h4 { 8 | width: var(--analytics-title-width); 9 | display: inline-block; 10 | vertical-align: top; 11 | } 12 | 13 | .AnalyticsDrawer .Description > div { 14 | padding: 0 0 0 24px; 15 | position: relative; 16 | display: inline-block; 17 | width: calc(100% - var(--analytics-title-width) - 24px); 18 | } 19 | -------------------------------------------------------------------------------- /frontend/src/components/AnalyticsDrawer/AnalyticsDrawer.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { withStore } from '../../services/store' 3 | import { SecureLink } from '../../utils/link' 4 | import { BackLink } from '../Link/Link' 5 | import { SelectorBlock } from '../Selectors/SelectorBlock' 6 | import './AnalyticsDrawer.css' 7 | import { CensusAnalytics } from './CensusAnalytics' 8 | 9 | /** 10 | * TODO: Show loading indicator while necessary data is being fetched. 11 | */ 12 | export let AnalyticsDrawer = withStore(({ store }) => { 13 | 14 | let selectedDataset = store.get('selectedDataset') 15 | 16 | if (!selectedDataset) { 17 | return
18 | 19 |

Error - Please reload the page

20 |
21 | } 22 | 23 | return
24 | 25 |

{selectedDataset.name}

26 |
27 |

Description

28 |
29 |
30 |
31 | 32 |
33 |
34 | 35 |
36 |
37 |

Data Sources

38 |
39 |
40 |
41 | Population Data: European Commission, Joint Research Centre (JRC); Columbia University, Center for International Earth Science Information Network 42 | - CIESIN (2015): GHS population grid, derived from GPW4, multitemporal (1975, 1990, 2000, 2015). 43 | European Commission, Joint Research Centre (JRC) {SecureLink('http://data.europa.eu/89h/jrc-ghsl-ghs_pop_gpw4_globe_r2015a', '[Dataset]')}. 44 |

45 | Demographic Information: 2012-2016 American Community Survey 5-year estimates. {SecureLink('https://www.census.gov/programs-surveys/acs/news/data-releases/2016/release.html', 'U.S. Census Bureau')}. 46 |
47 |
48 |
49 | }) 50 | -------------------------------------------------------------------------------- /frontend/src/components/AnalyticsDrawer/CensusAnalytics.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .CensusAnalytics .p { 4 | margin-top: 2px; 5 | } 6 | 7 | .CensusAnalytics .DownloadLink { 8 | height: 36px; 9 | padding-top: 10px; 10 | } 11 | 12 | .CensusAnalytics .DownloadAnalysisLink { 13 | position: absolute !important; /* override material-ui inline style */ 14 | right: 20px; 15 | } 16 | 17 | .CircularProgress { 18 | margin-bottom: 20px; 19 | } 20 | 21 | .CensusAnalytics .CensusDataChart { 22 | margin-bottom: 10px; 23 | } 24 | 25 | .CensusAnalytics .HighLevelStats { 26 | margin-bottom: 15px; 27 | } 28 | 29 | .TableHelpText { 30 | font-size: var(--small-font-size); 31 | color: var(--light); 32 | font-style: italic; 33 | } -------------------------------------------------------------------------------- /frontend/src/components/AnalyticsDrawer/CensusAnalytics.tsx: -------------------------------------------------------------------------------- 1 | import { isEmpty } from 'lodash' 2 | import CircularProgress from 'material-ui/CircularProgress' 3 | import * as React from 'react' 4 | import { ADEQUACY_COLORS } from '../../constants/colors' 5 | import { AdequacyMode } from '../../constants/datatypes' 6 | import { withStore } from '../../services/store' 7 | import { summaryStatisticsByServiceArea } from '../../utils/data' 8 | import { formatNumber } from '../../utils/formatters' 9 | import { CensusAdequacyCharts } from '../CensusAdequacyCharts/CensusAdequacyCharts' 10 | import { CensusAdequacyTable } from '../CensusAdequacyTable/CensusAdequacyTable' 11 | import { DownloadAnalysisLink } from '../DownloadAnalysisLink/DownloadAnalysisLink' 12 | import { StatsBox } from '../StatsBox/StatsBox' 13 | import './CensusAnalytics.css' 14 | 15 | export let CensusAnalytics = withStore(({ store }) => { 16 | 17 | let selectedServiceAreas = store.get('selectedServiceAreas') ? store.get('selectedServiceAreas')! : store.get('serviceAreas') 18 | let selectedCensusCategory = store.get('selectedCensusCategory') 19 | let populationByAdequacy = summaryStatisticsByServiceArea(selectedServiceAreas, store.get('adequacies'), store.get('representativePoints')) 20 | let totalPopulation = populationByAdequacy.reduce(function (a, b) { return a + b }, 0) 21 | let totalProviders = store.get('providers').length 22 | 23 | if (isEmpty(store.get('adequacies'))) { 24 | return
25 | 30 |
31 | } 32 | return
33 | 34 | 35 | Total Population 36 | {formatNumber(totalPopulation)} 37 | Providers 38 | {formatNumber(totalProviders)} 39 | 40 | 41 |
Click on a row to filter data on the map.
42 | 43 | < div className='DownloadLink'> 44 | 45 |
46 | 47 |
48 | }) 49 | -------------------------------------------------------------------------------- /frontend/src/components/App/App.tsx: -------------------------------------------------------------------------------- 1 | import MuiThemeProvider from 'material-ui/styles/MuiThemeProvider' 2 | import * as React from 'react' 3 | import '../../services/effects' 4 | import { withStore } from '../../services/store' 5 | import { isWebGLEnabled } from '../../utils/webgl' 6 | import { AlertDialog } from '../AlertDialog/AlertDialog' 7 | import { ErrorBar } from '../ErrorBar/ErrorBar' 8 | import { SuccessBar } from '../ErrorBar/ErrorBar' 9 | import { FilterBar } from '../FilterBar/FilterBar' 10 | import { Header } from '../Header/Header' 11 | import { LeftPane } from '../LeftPane/LeftPane' 12 | import { MapView } from '../MapView/MapView' 13 | import './App.css' 14 | 15 | /** 16 | * Check to see if the client is mobile and display a warning if they are. 17 | */ 18 | 19 | /** 20 | * Check to see if WebGL is supported and if it isn't, offer the user the opportunity to be 21 | * redirected to instructions for how to enable it. 22 | */ 23 | if (!isWebGLEnabled()){ 24 | if (window.confirm('Unfortunately, WebGL is not enabled in your browser and you will not be able to display the map. Click "OK" to learn more, or "Cancel" to load Encompass without the map.')){ 25 | window.location.href = 'https://get.webgl.org/' 26 | } 27 | } 28 | 29 | export let App = withStore(({ store }) => 30 | 31 |
32 |
33 | 34 | 35 | 36 | store.set('error')(null)} 39 | /> 40 | store.set('success')(null)} 43 | /> 44 | store.set('alert')(null)} 47 | /> : null} 48 |
49 |
50 | ) 51 | App.displayName = 'App' 52 | -------------------------------------------------------------------------------- /frontend/src/components/CSVUploader/CSVUploader.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .CSVUploader input[type="file"] { 4 | display: none; 5 | margin: auto; 6 | } 7 | 8 | .CSVUploader { 9 | width: 190px; 10 | text-align: left !important; 11 | } 12 | 13 | .CSVUploader .Button { 14 | color: var(--primary-dark); 15 | margin: auto; 16 | } 17 | -------------------------------------------------------------------------------- /frontend/src/components/CSVUploader/CSVUploader.tsx: -------------------------------------------------------------------------------- 1 | import FlatButton from 'material-ui/FlatButton' 2 | import * as React from 'react' 3 | import { CONFIG } from '../../config/config' 4 | import { withStore } from '../../services/store' 5 | import './CSVUploader.css' 6 | 7 | type Props = { 8 | label: string 9 | onUpload(file: File): void 10 | } 11 | 12 | export let CSVUploader = withStore(({ label, onUpload, store }) => 13 |
14 | 20 | { 22 | if (e.target.files) { 23 | const fileSize = e.target.files[0].size / 1024 / 1024 // in MB 24 | if (CONFIG.limit_upload_file_size && fileSize > 15) { 25 | store.set('alert')('File size exceeds 15 MB. Please try a smaller file.') 26 | } else { 27 | onUpload(e.target.files[0]) 28 | } 29 | e.currentTarget.value = '' 30 | } 31 | }} 32 | type='file' 33 | accept='.csv' 34 | /> 35 | 36 |
37 | ) 38 | CSVUploader.displayName = 'CSVUploader' 39 | -------------------------------------------------------------------------------- /frontend/src/components/CensusAdequacyCharts/CensusAdequacyCharts.tsx: -------------------------------------------------------------------------------- 1 | import 'chart.piecelabel.js' 2 | import 'chartjs-plugin-stacked100' 3 | import { tail } from 'lodash' 4 | import * as React from 'react' 5 | import { CONFIG } from '../../config/config' 6 | import { CENSUS_MAPPING } from '../../constants/census' 7 | import { withStore } from '../../services/store' 8 | import { summaryStatisticsByServiceAreaAndCensus } from '../../utils/data' 9 | import { CensusDataChart } from '../CensusDataChart/CensusDataChart' 10 | 11 | type Props = { 12 | serviceAreas: string[], 13 | censusCategory: string 14 | } 15 | 16 | /** 17 | * Use circular legend patches instead of the default rectangles. 18 | * 19 | * TODO: Fix typings upstream in DefinitelyTyped/chart.js 20 | */ 21 | 22 | export let CensusAdequacyCharts = withStore(({ serviceAreas, censusCategory, store }) => { 23 | let method = store.get('method') 24 | 25 | // Calculate summaryStatistics for each group. 26 | let populationByAdequacyByGroup = summaryStatisticsByServiceAreaAndCensus(serviceAreas, censusCategory, store) 27 | let censusGroups = ['Total Population'].concat(CENSUS_MAPPING[censusCategory]) 28 | 29 | return
30 | 36 | {CONFIG.is_census_data_available ? 37 | : null 43 | } 44 |
45 | }) 46 | -------------------------------------------------------------------------------- /frontend/src/components/ClearInputsButton/ClearInputsButton.tsx: -------------------------------------------------------------------------------- 1 | import FlatButton from 'material-ui/FlatButton' 2 | import * as React from 'react' 3 | 4 | type Props = { 5 | className?: string 6 | onClearInputs(): void 7 | small?: true 8 | } 9 | 10 | export let ClearInputsButton: React.StatelessComponent = ({ className, onClearInputs, small }) => 11 | 17 | ClearInputsButton.displayName = 'ClearInputsButton' 18 | -------------------------------------------------------------------------------- /frontend/src/components/CountySelector/CountySelector.tsx: -------------------------------------------------------------------------------- 1 | import { without } from 'lodash' 2 | import Checkbox from 'material-ui/Checkbox' 3 | import List from 'material-ui/List' 4 | import ListItem from 'material-ui/List/ListItem' 5 | import * as React from 'react' 6 | import { State } from '../../constants/states' 7 | import { COUNTIES_BY_STATE } from '../../constants/zipCodes' 8 | 9 | type Props = { 10 | onChange(value: string[]): void 11 | state: State 12 | value: string[] 13 | } 14 | 15 | export class CountySelector extends React.Component { 16 | areAllCountiesSelected() { 17 | return COUNTIES_BY_STATE[this.props.state].length === this.props.value.length 18 | } 19 | onCheckCounty = (event: React.MouseEvent, isChecked: boolean) => { 20 | let county = event.currentTarget.value 21 | this.props.onChange(isChecked 22 | ? [...this.props.value, county] 23 | : without(this.props.value, county) 24 | ) 25 | } 26 | onCheckSelectAll = () => 27 | this.props.onChange(this.areAllCountiesSelected() 28 | ? [] 29 | : COUNTIES_BY_STATE[this.props.state] 30 | ) 31 | render() { 32 | return
33 | 34 | 43 | } 44 | /> 45 | {COUNTIES_BY_STATE[this.props.state].map(county => 46 | 56 | } 57 | /> 58 | )} 59 | 60 |
61 | } 62 | } 63 | -------------------------------------------------------------------------------- /frontend/src/components/DatasetsDrawer/DatasetsDrawer.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .DatasetsDrawer .TilePicker { 4 | margin: 20px 0; 5 | } 6 | 7 | .DatasetsDrawer a { 8 | color: var(--secondary); 9 | } 10 | -------------------------------------------------------------------------------- /frontend/src/components/DatasetsDrawer/DatasetsDrawer.tsx: -------------------------------------------------------------------------------- 1 | import { memoize } from 'lodash' 2 | // import Add from 'material-ui/svg-icons/content/add-circle' 3 | import * as React from 'react' 4 | import { CONFIG } from '../../config/config' 5 | import { DATASET_COLORS } from '../../constants/colors' 6 | import { DATASETS } from '../../constants/datasets' 7 | import { Dataset } from '../../constants/datatypes' 8 | import { Store, withStore } from '../../services/store' 9 | import { SecureLink } from '../../utils/link' 10 | import { Tile, TilePicker } from '../TilePicker/TilePicker' 11 | import './DatasetsDrawer.css' 12 | 13 | export const UPLOAD_NEW_DATASET_TILE: Tile = { 14 | color: '#237900', 15 | description: 'Analyze your own dataset of providers, facilities, or services', 16 | data: { 17 | dataSources: '', 18 | description: '', 19 | state: 'ca', 20 | name: '', 21 | hint: '', 22 | subtitle: '', 23 | providers: [], 24 | serviceAreaIds: [] 25 | }, 26 | name: Analyze Your Own Data 27 | } 28 | 29 | let toTiles = memoize((datasets: Dataset[]): Tile[] => 30 | datasets.map((_, n) => ({ 31 | color: DATASET_COLORS[n], 32 | data: _, 33 | description: _.subtitle, 34 | name: _.name 35 | }) 36 | ) 37 | ) 38 | 39 | let tiles = [ 40 | ...toTiles(DATASETS), 41 | ...(CONFIG.dataset.enable_upload_your_own ? [UPLOAD_NEW_DATASET_TILE] : []) 42 | ] 43 | let DatasetTilePicker = TilePicker() 44 | 45 | export let DatasetsDrawer = withStore(({ store }) => 46 |
47 |

Welcome to Encompass, a geographic analysis tool built by {SecureLink('http://www.bayesimpact.org', 'Bayes Impact')}. Choose one of the datasets below to begin exploring the accessibility of health care services in different regions of the U.S.

48 | _.data === store.get('selectedDataset')) || null} 52 | /> 53 |

Don't see the dataset you want? To send us feedback or ideas for new datasets, email us at {SecureLink('mailto:encompass@bayesimpact.org?subject=Request a dataset', 'encompass@bayesimpact.org')}.

54 |
55 | ) 56 | 57 | function onChange(store: Store) { 58 | return (tile: Tile) => { 59 | if (tile === UPLOAD_NEW_DATASET_TILE) { 60 | // TODO: Show Analyze Your Own Dataset Modal modal 61 | store.set('route')('/add-data') 62 | return 63 | } 64 | store.set('selectedDataset')(tile.data) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /frontend/src/components/DownloadAnalysisLink/DownloadAnalysisLink.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .DownloadAnalysisLink { 4 | color: var(--primary-dark); 5 | } 6 | -------------------------------------------------------------------------------- /frontend/src/components/DownloadAnalysisLink/DownloadAnalysisLink.tsx: -------------------------------------------------------------------------------- 1 | import FlatButton from 'material-ui/FlatButton' 2 | import DownloadIcon from 'material-ui/svg-icons/file/file-download' 3 | import * as React from 'react' 4 | import * as ReactGA from 'react-ga' 5 | import { CONFIG } from '../../config/config' 6 | import { Store, withStore } from '../../services/store' 7 | import { download } from '../../utils/download' 8 | import { buildCsvFromData, getCsvName, getStaticCsvUrl } from './BuildCSV' 9 | 10 | import './DownloadAnalysisLink.css' 11 | 12 | const useStaticCsvs: boolean = CONFIG.staticAssets.csv.useStaticCsvs 13 | 14 | export let DownloadAnalysisLink = withStore(({ store }) => 15 | } 18 | label='Download' 19 | labelPosition='before' 20 | onClick={() => onClick(store)} 21 | /> 22 | ) 23 | 24 | DownloadAnalysisLink.displayName = 'DownloadAnalysisLink' 25 | 26 | function onClick(store: Store) { 27 | // Get which dataset/method to produce CSV for. 28 | const method = store.get('method') 29 | const selectedDataset = store.get('selectedDataset') 30 | 31 | // Send GA event. 32 | ReactGA.event({ 33 | category: 'Analysis', 34 | action: 'Downloaded analysis results', 35 | label: selectedDataset ? selectedDataset.name : 'Unknown Dataset' 36 | }) 37 | 38 | const csvName = getCsvName(selectedDataset, method) 39 | if (useStaticCsvs) { // If in production, use the cached static CSVs. 40 | const staticCsvUrl = getStaticCsvUrl(csvName) 41 | window.open(staticCsvUrl) 42 | } else { // Otherwise, generate the CSV. 43 | if (store.get('serviceAreas').length > 100 && !confirm('Preparing the file for this state may take a couple of minutes. \n\nPress OK to continue.')) { 44 | return 45 | } 46 | let csv = buildCsvFromData( 47 | method, 48 | store.get('serviceAreas'), 49 | store.get('adequacies'), 50 | store.get('representativePoints') 51 | ) 52 | download(csv, 'text/csv', csvName) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /frontend/src/components/ErrorBar/ErrorBar.css: -------------------------------------------------------------------------------- 1 | /* Override Material-UI inline styles. */ 2 | .ErrorBar > div { 3 | background: rgba(191, 4, 4, 0.88) !important; 4 | max-width: 800px !important; 5 | text-align: center; 6 | } 7 | 8 | .SuccessBar > div { 9 | background: rgba(0, 204, 102, 0.88) !important; 10 | max-width: 800px !important; 11 | text-align: center; 12 | } 13 | -------------------------------------------------------------------------------- /frontend/src/components/ErrorBar/ErrorBar.tsx: -------------------------------------------------------------------------------- 1 | import Snackbar from 'material-ui/Snackbar' 2 | import * as React from 'react' 3 | import './ErrorBar.css' 4 | 5 | type Props = { 6 | message: string | null 7 | onClose(): void 8 | } 9 | 10 | const THIRTY_SECONDS = 30000 11 | 12 | export let ErrorBar: React.StatelessComponent = ({ message, onClose }) => 13 | 20 | 21 | export let SuccessBar: React.StatelessComponent = ({ message, onClose }) => 22 | 29 | -------------------------------------------------------------------------------- /frontend/src/components/FilterBar/FilterBar.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .FilterBar { 4 | display: flex; 5 | padding-bottom: 10px; 6 | position: fixed; 7 | right: 20px; 8 | top: calc(var(--header-height) + 20px); 9 | z-index: 1; 10 | } 11 | 12 | .FilterBar .Filter { 13 | color: var(--muted); 14 | display: flex; 15 | flex-direction: column; 16 | } 17 | 18 | /* So filter bar doesn't suddenly resize when user changes dropdown */ 19 | .FilterBar .Filter.-FixedWidthBig { 20 | width: 222px; 21 | } 22 | 23 | /* So filter bar doesn't suddenly resize when user changes dropdown */ 24 | .FilterBar .Filter.-FixedWidthSmall { 25 | width: 160px; 26 | } 27 | 28 | .FilterBar .Filter > span { 29 | margin: 20px 0 -17px 24px; 30 | } 31 | -------------------------------------------------------------------------------- /frontend/src/components/FilterBar/FilterBar.tsx: -------------------------------------------------------------------------------- 1 | import DropDownMenu from 'material-ui/DropDownMenu' 2 | import MenuItem from 'material-ui/MenuItem' 3 | import Paper from 'material-ui/Paper' 4 | import * as React from 'react' 5 | import * as ReactGA from 'react-ga' 6 | import { CONFIG } from '../../config/config' 7 | import { withStore } from '../../services/store' 8 | import './FilterBar.css' 9 | 10 | export let FilterBar = withStore(({ store }) => { 11 | return 12 |
13 | Measure 14 | { 17 | ReactGA.event({ 18 | category: 'Adequacy', 19 | action: 'Selected an adequacy type', 20 | label: value 21 | }) 22 | store.set('method')(value) 23 | }} 24 | value={store.get('method')} 25 | > 26 | 27 | 28 | 29 | 30 |
31 |
32 | }) 33 | FilterBar.displayName = 'FilterBar' 34 | -------------------------------------------------------------------------------- /frontend/src/components/Header/Header.css: -------------------------------------------------------------------------------- 1 | .Header { 2 | background: #2b3e80 !important; /* Override material-ui style */ 3 | z-index: 1400 !important; 4 | } 5 | 6 | .Header a:link { 7 | text-decoration: none; 8 | color: white; 9 | } 10 | 11 | .Header a:visited { 12 | text-decoration: none; 13 | color: white; 14 | } 15 | -------------------------------------------------------------------------------- /frontend/src/components/Header/Header.tsx: -------------------------------------------------------------------------------- 1 | import AppBar from 'material-ui/AppBar' 2 | import FlatButton from 'material-ui/FlatButton' 3 | import IconButton from 'material-ui/IconButton' 4 | import { white } from 'material-ui/styles/colors' 5 | import GithubIcon from 'mui-icons/cmdi/github' 6 | import * as React from 'react' 7 | import { CONFIG } from '../../config/config' 8 | import { withStore } from '../../services/store' 9 | import { SecureLink } from '../../utils/link' 10 | import { AboutDialog } from '../AboutDialog/AboutDialog' 11 | import { MethodologyDialog } from '../MethodologyDialog/MethodologyDialog' 12 | import './Header.css' 13 | 14 | let title = 'Encompass' + CONFIG.title_suffix 15 | 16 | export let Header = withStore(({ store }) => 17 | < AppBar 18 | className='Header' 19 | title={SecureLink('.', title, '_self')} 20 | iconElementRight={ 21 |
22 | store.set('modal')(null)} /> 25 | store.set('modal')(null)} /> 28 | store.set('modal')('Methodology')} 31 | style={{ color: '#fff' }} /> 32 | store.set('modal')('About')} 35 | style={{ color: '#fff' }} /> 36 | 43 |
44 | } 45 | showMenuIconButton={false} 46 | /> 47 | ) 48 | 49 | Header.displayName = 'Header' 50 | -------------------------------------------------------------------------------- /frontend/src/components/LeftPane/LeftPane.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | 4 | .LeftPane hr { 5 | margin: 0 -20px; 6 | } 7 | 8 | .LeftPane h3 { 9 | line-height: 68px; 10 | } 11 | 12 | .LeftPane { 13 | overflow: visible; 14 | width: calc(var(--leftpane-width)) !important; 15 | min-width: var(--min-leftpane-width) !important; 16 | position: absolute; 17 | bottom: 0; 18 | left: 0; 19 | right: 0; 20 | top: 64px !important; 21 | } 22 | 23 | /* silly inline styles... */ 24 | .LeftPane > div { 25 | padding: 20px; 26 | position: absolute !important; 27 | width: calc(100%-40px) !important; 28 | } 29 | 30 | .LeftPane .DownloadDatasetLink { 31 | position: absolute !important; /* override material-ui inline style */ 32 | left: 20px; 33 | } 34 | 35 | .LeftPane .CountySelector { 36 | overflow-y: auto; 37 | position: absolute; 38 | bottom: 140px; 39 | right: 20px; 40 | left: 20px; 41 | top: 170px; 42 | } 43 | 44 | .LeftPane .CSVUploaderSection { 45 | position: absolute; 46 | bottom: 0; 47 | left: 20px; 48 | right: 20px; 49 | } 50 | 51 | .LeftPane .Drawer { 52 | padding-bottom: 100px; 53 | } 54 | -------------------------------------------------------------------------------- /frontend/src/components/LeftPane/LeftPane.tsx: -------------------------------------------------------------------------------- 1 | import { map } from 'lodash' 2 | import Drawer from 'material-ui-next/Drawer' 3 | import * as React from 'react' 4 | import { withStore } from '../../services/store' 5 | import { AddDatasetDrawer } from '../AddDatasetDrawer/AddDatasetDrawer' 6 | import { AnalyticsDrawer } from '../AnalyticsDrawer/AnalyticsDrawer' 7 | import { DatasetsDrawer } from '../DatasetsDrawer/DatasetsDrawer' 8 | import './LeftPane.css' 9 | 10 | let drawers = { 11 | '/analytics': AnalyticsDrawer, 12 | '/datasets': DatasetsDrawer, 13 | '/add-data': AddDatasetDrawer 14 | } 15 | 16 | export let LeftPane = withStore(({ store }) => 17 | <> 18 | {map(drawers, (Component, route) => 19 | 25 | 26 | 27 | )} 28 | 29 | ) 30 | LeftPane.displayName = 'LeftPane' 31 | -------------------------------------------------------------------------------- /frontend/src/components/Link/Link.css: -------------------------------------------------------------------------------- 1 | .Link { 2 | cursor: pointer; 3 | text-decoration: none; 4 | } -------------------------------------------------------------------------------- /frontend/src/components/Link/Link.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { Route } from '../../constants/datatypes' 3 | import { withStore } from '../../services/store' 4 | import './Link.css' 5 | 6 | type Props = { 7 | className?: string 8 | onClick?(e: React.MouseEvent): void 9 | to: Route 10 | } 11 | 12 | export let Link = withStore(({ children, className, onClick, store, to }) => 13 | { 16 | e.preventDefault() 17 | if (onClick) { 18 | onClick(e) 19 | } 20 | store.set('route')(to) 21 | }} 22 | >{children} 23 | ) 24 | Link.displayName = 'Link' 25 | 26 | export let BackLink = withStore(({ store }) => 27 | { 30 | store.set('uploadedServiceAreasFilename')(null) 31 | store.set('uploadedProvidersFilename')(null) 32 | store.set('selectedDataset')(null) 33 | } 34 | } 35 | to='/datasets' 36 | > ⟵ Back to datasets 37 | ) 38 | -------------------------------------------------------------------------------- /frontend/src/components/MapLegend/MapLegend.css: -------------------------------------------------------------------------------- 1 | .MapLegend { 2 | background: #fff; 3 | border-radius: 2px; 4 | box-shadow: rgba(0, 0, 0, 0.12) 0px 1px 6px, rgba(0, 0, 0, 0.12) 0px 1px 4px; 5 | padding: 10px; 6 | } 7 | 8 | .MapLegend > li { 9 | line-height: 24px; 10 | list-style: none; 11 | } 12 | 13 | .MapLegend .Splotch { 14 | display: inline-block; 15 | height: 16px; 16 | margin-right: 5px; 17 | position: relative; 18 | bottom: -3px; 19 | width: 16px; 20 | border-radius: 50%; 21 | opacity: 0.8; 22 | } 23 | -------------------------------------------------------------------------------- /frontend/src/components/MapLegend/MapLegend.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { ADEQUACY_COLORS } from '../../constants/colors' 3 | import { AdequacyMode } from '../../constants/datatypes' 4 | import { withStore } from '../../services/store' 5 | import { getLegend } from '../../utils/adequacy' 6 | import './MapLegend.css' 7 | 8 | export let MapLegend = withStore(({ store }) => 9 |
    10 | Population by Access 11 |
  • {store.get('selectedCensusGroup')}
  • 12 |
  • {getLegend(store.get('method'), AdequacyMode.ADEQUATE_0)}
  • 16 |
  • {getLegend(store.get('method'), AdequacyMode.ADEQUATE_1)}
  • 20 |
  • {getLegend(store.get('method'), AdequacyMode.ADEQUATE_2)}
  • 24 |
  • {getLegend(store.get('method'), AdequacyMode.INADEQUATE)}
  • 28 |
29 | ) 30 | -------------------------------------------------------------------------------- /frontend/src/components/MapTooltip/MapTooltip.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | :root { 4 | --background: rgba(0, 0, 0, .8); 5 | --text: rgba(255, 255, 255, .8); 6 | } 7 | 8 | .MapTooltip .mapboxgl-popup-content { 9 | background-color: var(--background); 10 | } 11 | 12 | .MapTooltip .mapboxgl-popup-tip { 13 | border-top-color: var(--background) !important; 14 | } 15 | 16 | .MapTooltip table { 17 | font-size: var(--medium-font-size); 18 | color: var(--text); 19 | } 20 | 21 | .MapTooltip td { 22 | padding: 0 10px; 23 | } 24 | 25 | .MapTooltip a { 26 | font-weight: var(--medium-weight) !important; 27 | color: var(--text); 28 | } 29 | 30 | .MapTooltip tr :first-child { 31 | font-weight: var(--heavy-weight); 32 | } 33 | 34 | .MapTooltip .controls { 35 | font-size: var(--medium-font-size); 36 | color: var(--text); 37 | } 38 | 39 | .MapTooltip .icon-button { 40 | color: var(--text) !important; 41 | } 42 | -------------------------------------------------------------------------------- /frontend/src/components/MapTooltip/TableRow.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { formatNumber } from '../../utils/formatters' 3 | import { SecureLink } from '../../utils/link' 4 | import { capitalizeWords } from '../../utils/string' 5 | 6 | type Props = { 7 | name: string 8 | value: string 9 | } 10 | 11 | export let TableRow: React.StatelessComponent = ({ name, value }) => { 12 | if (!value) { 13 | return null 14 | } 15 | if (value.toString().includes('https://www.google.com/maps')) { 16 | return 17 | {capitalizeWords(name)} 18 | {SecureLink(value, 'View on Google Maps')} 19 | 20 | } 21 | value = isNaN(Number(value)) ? value : formatNumber(Number(value)) 22 | return 23 | {capitalizeWords(name)} 24 | {value} 25 | 26 | } 27 | -------------------------------------------------------------------------------- /frontend/src/components/MapView/MapView.css: -------------------------------------------------------------------------------- 1 | @import "mapbox-gl/dist/mapbox-gl.css"; 2 | @import "../../index.css"; 3 | 4 | .MapView { 5 | background: var(--map-background); 6 | position: fixed; 7 | bottom: 0; 8 | left: var(--leftpane-width); 9 | right: 0; 10 | top: var(--header-height); 11 | } 12 | 13 | .MapView .MapLegend { 14 | position: absolute; 15 | right: 20px; 16 | top: 112px; 17 | } 18 | 19 | .MapView .mapboxgl-map { 20 | height: 100%; 21 | width: 100%; 22 | } 23 | 24 | .popup-container .mapboxgl-popup-content { 25 | background-color: rgba(0, 0, 0, 0.7); 26 | } 27 | 28 | .table-value { 29 | padding-left: 15; 30 | } 31 | 32 | .popup-table { 33 | font-size: var(--medium-font-size); 34 | color: rgba(255, 255, 255, 0.8); 35 | } 36 | -------------------------------------------------------------------------------- /frontend/src/components/MethodologyDialog/MethodologyDialog.css: -------------------------------------------------------------------------------- 1 | .Methodology p { 2 | padding: 0px 0px 12px; 3 | } -------------------------------------------------------------------------------- /frontend/src/components/Selectors/CensusCategorySelector.tsx: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { DropDownMenu } from 'material-ui/DropDownMenu' 3 | import MenuItem from 'material-ui/MenuItem' 4 | import * as React from 'react' 5 | import * as ReactGA from 'react-ga' 6 | import { CENSUS_MAPPING } from '../../constants/census' 7 | import { capitalizeWords, snakeCase } from '../../utils/string' 8 | 9 | type Props = { 10 | onChange(censusCategory: string | null): void 11 | value: string | null 12 | className?: string 13 | } 14 | 15 | let menuItems = chain(CENSUS_MAPPING) 16 | .keys() 17 | .map(_ => ) 18 | .value() 19 | 20 | export let CensusCategorySelector: React.StatelessComponent = ({ onChange, value, className }) => 21 | { 24 | ReactGA.event({ 25 | category: 'Census Category', 26 | action: 'Selected a demographic', 27 | label: value 28 | }) 29 | onChange(snakeCase(value)) 30 | }} 31 | value={value}> 32 | {menuItems} 33 | 34 | 35 | CensusCategorySelector.displayName = 'CensusCategorySelector' 36 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/CountyTypeSelector.tsx: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { DropDownMenu } from 'material-ui/DropDownMenu' 3 | import MenuItem from 'material-ui/MenuItem' 4 | import * as React from 'react' 5 | import { CountyType } from '../../constants/datatypes' 6 | 7 | type Props = { 8 | className?: string 9 | onChange(format: CountyType): void 10 | value: CountyType | null 11 | } 12 | 13 | const styles = { 14 | customWidth: { 15 | width: 120 16 | } 17 | } 18 | 19 | let options: CountyType[] = ['All', 'Rural', 'Urban'] 20 | let menuItems = chain(options).map( 21 | _ => 22 | ).value() 23 | 24 | export let CountyTypeSelector: React.StatelessComponent = ({ className, onChange, value }) => 25 | onChange(value)} 28 | value={value} 29 | style={styles.customWidth}> 30 | {menuItems} 31 | 32 | 33 | CountyTypeSelector.displayName = 'CountyTypeSelector' 34 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/FilterMethodSelector.tsx: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { DropDownMenu } from 'material-ui/DropDownMenu' 3 | import MenuItem from 'material-ui/MenuItem' 4 | import * as React from 'react' 5 | import * as ReactGA from 'react-ga' 6 | import { FilterMethod } from '../../constants/datatypes' 7 | 8 | type Props = { 9 | className?: string 10 | onChange(format: FilterMethod): void 11 | value: FilterMethod 12 | } 13 | 14 | let options: FilterMethod[] = ['All', 'County Name', 'County Type'] 15 | let menuItems = chain(options).map( 16 | _ => 17 | ).value() 18 | 19 | export let FilterMethodSelector: React.StatelessComponent = ({ className, onChange, value }) => 20 | { 23 | ReactGA.event({ 24 | category: 'Filter', 25 | action: 'Selected a filter type', 26 | label: value 27 | }) 28 | onChange(value) 29 | }} 30 | value={value}> 31 | {menuItems} 32 | 33 | 34 | FilterMethodSelector.displayName = 'FilterMethodSelector' 35 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/FormatSelector.tsx: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { DropDownMenu } from 'material-ui/DropDownMenu' 3 | import MenuItem from 'material-ui/MenuItem' 4 | import * as React from 'react' 5 | import { Format } from '../../constants/datatypes' 6 | 7 | type Props = { 8 | onChange(format: Format): void 9 | value: Format 10 | className?: string 11 | } 12 | 13 | let options: Format[] = ['Percentage', 'Number'] 14 | let menuItems = chain(options).map( 15 | _ => 16 | ).value() 17 | 18 | export let FormatSelector: React.StatelessComponent = ({ onChange, value, className }) => 19 | onChange(value)} 22 | value={value}> 23 | {menuItems} 24 | 25 | 26 | FormatSelector.displayName = 'FormatSelector' 27 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/SelectorBlock.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .Selectors { 4 | margin: 0px 0 30px; 5 | } 6 | 7 | .Selectors .Menu { 8 | font: var(--font) !important; 9 | vertical-align: bottom; 10 | height: 40px !important; 11 | } 12 | 13 | .Selectors h4 { 14 | width: var(--analytics-title-width); 15 | vertical-align: middle !important; 16 | } 17 | 18 | .SelectorRow * { 19 | display: inline-block !important; 20 | } 21 | 22 | .Menu.MultiSelect > div > div + div { 23 | width: 55% !important; 24 | } 25 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/SelectorBlock.tsx: -------------------------------------------------------------------------------- 1 | 2 | import * as React from 'react' 3 | import { CONFIG } from '../../config/config' 4 | import { FilterMethod } from '../../constants/datatypes' 5 | import { Store, withStore } from '../../services/store' 6 | import { CensusCategorySelector } from './CensusCategorySelector' 7 | import { CountyTypeSelector } from './CountyTypeSelector' 8 | import { FilterMethodSelector } from './FilterMethodSelector' 9 | import { FormatSelector } from './FormatSelector' 10 | import './SelectorBlock.css' 11 | import { ServiceAreaSelector } from './ServiceAreaSelector' 12 | import { StateSelector } from './StateSelector' 13 | 14 | // TODO - Consider abstracting the Selector class to avoid duplications. 15 | export let SelectorBlock = withStore(({ store }) => { 16 | return ( 17 |
18 | {store.get('selectedDataset') && store.get('selectedDataset')!.usaWide ?
19 |

State

20 | 24 |
: null} 25 |
26 |

County Filter

27 | 32 | {selectorByMethod(store.get('selectedFilterMethod'), store)} 33 |
34 | {CONFIG.is_census_data_available ?
35 |

Demographic

36 | 41 |
: null} 42 |
43 |

Values

44 | 49 |
50 |
51 | ) 52 | }) 53 | 54 | function selectorByMethod(method: FilterMethod, store: Store) { 55 | if (method === 'County Name') { 56 | return 61 | } 62 | if (method === 'County Type') { 63 | return 68 | } 69 | return null 70 | } 71 | 72 | SelectorBlock.displayName = 'SelectorBlock' 73 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/ServiceAreaSelector.tsx: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { DropDownMenu } from 'material-ui/DropDownMenu' 3 | import MenuItem from 'material-ui/MenuItem' 4 | import * as React from 'react' 5 | import { withStore } from '../../services/store' 6 | import { capitalizeWords, snakeCase } from '../../utils/string' 7 | 8 | type Props = { 9 | className?: string 10 | onChange(values: string[] | null): void 11 | values: string[] | null 12 | } 13 | 14 | const styles = { 15 | customWidth: { 16 | width: 200 17 | } 18 | } 19 | 20 | export let ServiceAreaSelector = withStore(({ className, onChange, store, values }) => { 21 | let menuItems = chain(store.get('counties')) 22 | .map(_ => ) 23 | .value() 24 | return onChange(value.map((_: string) => snakeCase(_)))} 28 | value={values} 29 | maxHeight={200} 30 | style={styles.customWidth} 31 | autoWidth={false} 32 | > 33 | {menuItems} 34 | 35 | }) 36 | 37 | ServiceAreaSelector.displayName = 'ServiceAreaSelector' 38 | -------------------------------------------------------------------------------- /frontend/src/components/Selectors/StateSelector.tsx: -------------------------------------------------------------------------------- 1 | import { DropDownMenu } from 'material-ui/DropDownMenu' 2 | import MenuItem from 'material-ui/MenuItem' 3 | import * as React from 'react' 4 | import * as ReactGA from 'react-ga' 5 | import { State, STATES } from '../../constants/states' 6 | import { Store } from '../../services/store' 7 | 8 | type StateSelectorProps = { 9 | className?: string 10 | value: State | null 11 | store: Store 12 | } 13 | 14 | /** 15 | * This State Selector is used to select a state to view for US-wide datasets. These datasets 16 | * include points for the entire US and allow the user to select which states to view. 17 | */ 18 | export let StateSelector: React.StatelessComponent = ({ className, store, value }) => 19 | onStateChange(value, store)} 22 | value={value} 23 | autoWidth={true}> 24 | {STATES.map(_ => 25 | 26 | )} 27 | 28 | 29 | /** 30 | * As part of State selection, send an event to GA. 31 | */ 32 | function onStateChange(value: State, store: Store) { 33 | ReactGA.event({ 34 | category: 'Filter', 35 | action: 'Selected a state in a US-wide dataset', 36 | label: value.toString() 37 | }) 38 | store.set('selectedState')(value) 39 | store.set('useCustomCountyUpload')(false) 40 | store.set('selectedCounties')(null) 41 | } 42 | 43 | StateSelector.displayName = 'StateSelector' 44 | -------------------------------------------------------------------------------- /frontend/src/components/StateCountySelector/StateCountySelector.tsx: -------------------------------------------------------------------------------- 1 | import MenuItem from 'material-ui/MenuItem' 2 | import SelectField from 'material-ui/SelectField' 3 | import * as React from 'react' 4 | import { CONFIG } from '../../config/config' 5 | import { State, STATES } from '../../constants/states' 6 | import { Store, withStore } from '../../services/store' 7 | 8 | export let StateCountySelector = withStore(({ store }) => 9 |
10 | onStateChange(state, store)} 12 | value={store.get('selectedState')} 13 | /> 14 | store.set('useCustomCountyUpload')(value)} 16 | value={store.get('useCustomCountyUpload')} 17 | /> 18 |
19 | ) 20 | 21 | function onStateChange(state: State, store: Store) { 22 | store.set('selectedState')(state) 23 | store.set('uploadedServiceAreasFilename')(null) 24 | } 25 | 26 | type CountySelectorProps = { 27 | onChange(value: boolean | null): void 28 | value: boolean | null 29 | } 30 | 31 | let CountySelector: React.StatelessComponent = ({ onChange, value }) => 32 | onChange(value)} 34 | value={value} 35 | floatingLabelText='Counties' 36 | hintText='Select a county' 37 | floatingLabelFixed={true} 38 | autoWidth={false} 39 | style={{ width: 150, paddingLeft: 10 }} 40 | > 41 | 42 | 43 | 44 | 45 | StateCountySelector.displayName = 'StateCountySelector' 46 | 47 | type StateSelectorProps = { 48 | onChange(value: State): void 49 | value: string 50 | } 51 | 52 | let StateSelector: React.StatelessComponent = ({ onChange, value }) => 53 | onChange(value)} 55 | value={value} 56 | floatingLabelText='State' 57 | floatingLabelFixed={true} 58 | autoWidth={false} 59 | style={{ width: 200 }} 60 | > 61 | {STATES.map(_ => 62 | 63 | )} 64 | 65 | 66 | StateSelector.displayName = 'StateSelector' 67 | -------------------------------------------------------------------------------- /frontend/src/components/StatsBox/StatsBox.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .StatsBox { 4 | border-collapse: collapse; 5 | width: 100%; 6 | } 7 | 8 | .StatsBox td, 9 | .StatsBox th { 10 | padding-left: 7px; 11 | padding-right: 7px; 12 | height: 48px; 13 | } 14 | 15 | .StatsBox td { 16 | font-size: var(--medium-font-size); 17 | } 18 | 19 | .StatsBox th { 20 | color: var(--muted); 21 | font-weight: var(--medium-weight); 22 | text-align: left; 23 | } 24 | 25 | .StatsBox.-withBorders { 26 | border: 1px solid var(--verylight); 27 | /* margin: 10px 0 0px; */ 28 | /* padding: 10px 0; */ 29 | } 30 | 31 | .StatsBox.-withFixedColumns { 32 | table-layout: fixed; 33 | } 34 | 35 | .StatsBox.-withBorders td, 36 | .StatsBox.-withBorders th { 37 | border-right: 1px solid var(--verylight); 38 | } 39 | 40 | .StatsBox.-withBorders td, 41 | .StatsBox.-withBorders th { 42 | border-right: 1px solid var(--verylight); 43 | } 44 | 45 | .StatsBox.-withHorizontalLines td { 46 | border-top: 1px solid var(--verylight); 47 | border-right: 0; 48 | padding: 5px 7px; 49 | } 50 | 51 | .StatsBox.-withSingleRow th { 52 | border-right: none; 53 | } 54 | -------------------------------------------------------------------------------- /frontend/src/components/StatsBox/StatsBox.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import './StatsBox.css' 3 | 4 | type Props = { 5 | className?: string 6 | withFixedColumns?: true 7 | withBorders?: true 8 | withHorizontalLines?: true 9 | withSingleRow?: true 10 | } 11 | 12 | export let StatsBox: React.StatelessComponent = ({ 13 | children, className, withFixedColumns, withBorders, withHorizontalLines, withSingleRow 14 | }) => 15 | 24 | {children} 25 |
26 | -------------------------------------------------------------------------------- /frontend/src/components/TilePicker/TilePicker.css: -------------------------------------------------------------------------------- 1 | @import "../../index.css"; 2 | 3 | .TilePicker { 4 | display: flex; 5 | flex-wrap: wrap; 6 | justify-content: space-between; 7 | } 8 | 9 | .TilePicker > .Tile { 10 | flex: 0 49%; 11 | list-style: none; 12 | } 13 | 14 | .Tile { 15 | box-shadow: 0 5px 3px rgba(0, 0, 0, .2); 16 | cursor: pointer; 17 | filter: grayscale(.7) brightness(1.5); 18 | margin-bottom: 15px; 19 | transform: scale(.96, .96); 20 | transition: all .2s cubic-bezier(0.175, 0.885, 0.32, 1.275); 21 | } 22 | .Tile:hover, 23 | .Tile.-Active { 24 | filter: grayscale(0) brightness(1); 25 | transform: scale(1, 1); 26 | } 27 | .Tile:active { 28 | filter: grayscale(0) brightness(.9); 29 | } 30 | 31 | .Tile h2 { 32 | background: #666; 33 | color: #fff; 34 | line-height: 24px; 35 | font-size: var(--giant-font-size); 36 | font-weight: var(--medium-weight); 37 | padding: 80px 10px 10px; 38 | text-transform: none; 39 | height: 52px; 40 | } 41 | 42 | .Tile p { 43 | font-size: var(--medium-font-size); 44 | padding: 10px; 45 | } 46 | 47 | .Tile svg { 48 | height: 40px !important; 49 | position: relative; 50 | bottom: -12px; 51 | width: 40px !important; 52 | } 53 | 54 | .Tile path { 55 | fill: #fff; 56 | } 57 | -------------------------------------------------------------------------------- /frontend/src/components/TilePicker/TilePicker.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import * as ReactGA from 'react-ga' 3 | import './TilePicker.css' 4 | 5 | export type Tile = { 6 | color: string 7 | description: string 8 | data: T 9 | name: string | JSX.Element 10 | } 11 | 12 | type Props = { 13 | onChange(tile: Tile): void 14 | tiles: Tile[] 15 | value: Tile | null 16 | } 17 | 18 | export function TilePicker() { 19 | return class TilePicker extends React.Component> { 20 | render() { 21 | return
    22 | {this.props.tiles.map(tile => 23 |
  • { 27 | // We need to do a check here, because the tile for uploading a custom dataset doesn't have a stringy name attribute. 28 | let eventLabel = typeof tile.name === 'string' ? tile.name : 'Analyze Your Own Data' 29 | ReactGA.event({ 30 | category: 'Dataset', 31 | action: 'Selected a dataset', 32 | label: eventLabel 33 | }) 34 | this.props.onChange(tile) 35 | }} 36 | > 37 |

    {tile.name}

    38 |

    {tile.description}

    39 |
  • 40 | )} 41 |
42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /frontend/src/components/Uploader/Uploader.css: -------------------------------------------------------------------------------- 1 | .Uploader span{ 2 | max-width: 150px; 3 | } -------------------------------------------------------------------------------- /frontend/src/config/config.ts: -------------------------------------------------------------------------------- 1 | // import * as fs from 'fs' 2 | import { Config } from '../constants/datatypes' 3 | 4 | export let CONFIG: Config 5 | 6 | // tslint:disable:no-var-requires 7 | CONFIG = require('../../../shared/config.json') 8 | if (process.env.ENV !== undefined) { 9 | CONFIG.title_suffix = (process.env.ENV !== 'PRD' ? ' - ' + process.env.ENV : '') 10 | } 11 | 12 | CONFIG.api.backend_root = process.env.API_ROOT || CONFIG.api.backend_root 13 | -------------------------------------------------------------------------------- /frontend/src/constants/api/adequacies-request.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Request shape for POST /api/adequacies/ 9 | */ 10 | export interface PostAdequaciesRequest { 11 | /** 12 | * Method used to calculate times and distances 13 | */ 14 | method: 'driving_time' | 'straight_line' | 'walking_time' 15 | providers: { 16 | /** 17 | * An arbitrary ID that is locally unique within this request 18 | */ 19 | id: number 20 | latitude: number 21 | longitude: number 22 | }[] 23 | /** 24 | * Service area IDs in the format "state_city_zip". 25 | * For example, ["ca_san_francisco_94014", "ca_san_francisco_94015"] 26 | */ 27 | service_area_ids: string[] 28 | /** 29 | * Hint to help the backend use cached adequacy results. 30 | */ 31 | dataset_hint?: string 32 | } 33 | -------------------------------------------------------------------------------- /frontend/src/constants/api/adequacies-response.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Response shape for POST /api/adequacies/ 9 | */ 10 | export type PostAdequaciesResponse = { 11 | id: number 12 | /** 13 | * Measure (in meters or minutes) to the closest provider 14 | */ 15 | to_closest_provider: number 16 | closest_providers: number[] 17 | closest_location: { 18 | latitude: number 19 | longitude: number 20 | } 21 | }[] 22 | -------------------------------------------------------------------------------- /frontend/src/constants/api/available-service-areas-response.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Response shape for GET /api/available-service-areas/ 9 | */ 10 | export type GetAvailableServiceAreasResponse = [string, string, string, string, string][] 11 | -------------------------------------------------------------------------------- /frontend/src/constants/api/census-data-response.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | export type CensusCategoryInfo = { 8 | [k: string]: number 9 | }[] 10 | 11 | /** 12 | * Response shape for POST /api/census-data-by-service-area/ 13 | */ 14 | export interface PostCensusDataResponse { 15 | [k: string]: { 16 | [k: string]: CensusCategoryInfo 17 | }[] 18 | } 19 | -------------------------------------------------------------------------------- /frontend/src/constants/api/geocode-request.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Request shape for POST /api/geocode/ 9 | */ 10 | export interface PostGeocodeRequest { 11 | addresses: string[] 12 | } 13 | -------------------------------------------------------------------------------- /frontend/src/constants/api/geocode-response.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Response shape for POST /api/geocode/ 9 | */ 10 | export type PostGeocodeResponse = (Success | Error)[] 11 | 12 | export interface Success { 13 | status: 'success' 14 | lat: number 15 | lng: number 16 | } 17 | export interface Error { 18 | status: 'error' 19 | message: string 20 | } 21 | -------------------------------------------------------------------------------- /frontend/src/constants/api/representative-points-request.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | /** 8 | * Request shape for POST /api/representative_points/ 9 | */ 10 | export interface PostRepresentativePointsRequest { 11 | /** 12 | * Service area IDs in the format "state_city_zip". 13 | * For example, ["ca_san_francisco_94014", "ca_san_francisco_94015"] 14 | */ 15 | service_area_ids: string[] 16 | /** 17 | * Defines if frontend requests census data at the representative point level. 18 | */ 19 | include_census_data?: boolean 20 | } 21 | -------------------------------------------------------------------------------- /frontend/src/constants/api/representative-points-response.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated by json-schema-to-typescript. 3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, 4 | * and run json-schema-to-typescript to regenerate this file. 5 | */ 6 | 7 | export type Bucket = { 8 | name: string 9 | values: number[] 10 | }[] 11 | /** 12 | * Response shape for POST /api/representative_points/ 13 | */ 14 | export type PostRepresentativePointsResponse = { 15 | county: string 16 | demographics?: { 17 | buckets: Bucket 18 | category: string 19 | }[] 20 | id: number 21 | lat: number 22 | lng: number 23 | population: number 24 | service_area_id: string 25 | zip: string 26 | }[] 27 | -------------------------------------------------------------------------------- /frontend/src/constants/census.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file was automatically generated. 3 | * DO NOT MODIFY IT BY HAND. Instead, run "yarn codegen" to regenerate it. 4 | */ 5 | import { CONFIG } from '../config/config' 6 | 7 | type censusMapping = { 8 | [category: string]: string[] 9 | } 10 | 11 | export const CENSUS_MAPPING_ERROR = 'No Census Mapping Detected' 12 | 13 | export let CENSUS_MAPPING: censusMapping = { 14 | age: [ 15 | '0-18 years', 16 | '19-25 years', 17 | '26-34 years', 18 | '35-54 years', 19 | '55-64 years', 20 | '65+ years' 21 | ], 22 | sex: ['Male', 'Female'], 23 | race: [ 24 | 'Hispanic or Latino (any race)', 25 | 'White', 26 | 'Black', 27 | 'American Indian & Alaska Native', 28 | 'Asian', 29 | 'Native Hawaiian & other Pacific Islander', 30 | 'Multiracial or Other' 31 | ], 32 | insurance: [ 33 | 'Private Health Insurance', 34 | 'Public Health Insurance', 35 | 'No Health Insurance' 36 | ], 37 | income: [ 38 | '$0 - $15k', 39 | '$15k - $50k', 40 | '$50k - $100k', 41 | '$100k - $150k', 42 | '$150k - $200k', 43 | '$200k+' 44 | ] 45 | } 46 | 47 | if (!CONFIG.is_census_data_available) { 48 | CENSUS_MAPPING = { unvailable: [] } 49 | } 50 | -------------------------------------------------------------------------------- /frontend/src/constants/colors.ts: -------------------------------------------------------------------------------- 1 | import { AdequacyMode } from './datatypes' 2 | 3 | export const ADEQUACY_COLORS: Record = { 4 | ADEQUATE_0: '#2b3e80', 5 | ADEQUATE_1: '#577dff', 6 | ADEQUATE_2: '#27c0cf', 7 | INADEQUATE: '#e9be5e', 8 | OUT_OF_SCOPE: 'transparent' 9 | } 10 | 11 | export const DATASET_COLORS = [ 12 | '#5f7c8a', 13 | '#47488e', 14 | '#478e8c', 15 | '#478e51', 16 | '#8e7947', 17 | '#8e4747', 18 | '#8e4770' 19 | ] 20 | -------------------------------------------------------------------------------- /frontend/src/constants/datasets.ts: -------------------------------------------------------------------------------- 1 | import { Dataset } from './datatypes' 2 | import { SERVICE_AREAS_BY_STATE } from './zipCodes' 3 | 4 | export let DATASETS: Dataset[] = [ 5 | // FIXME: Programatically read in datasets. 6 | // tslint:disable:no-var-requires 7 | inferServiceAreaIds(require('./datasets/US_mental_health.json')), 8 | inferServiceAreaIds(require('./datasets/US_snap_farmers_markets.json')), 9 | inferServiceAreaIds(require('./datasets/TX_texas_abortion_clinics_address_mar2017.json')), 10 | inferServiceAreaIds(require('./datasets/TX_HCSD_and_Look-Alike_TX.json')), 11 | inferServiceAreaIds(require('./datasets/MS_HCSD_and_Look-Alike_MS.json')), 12 | inferServiceAreaIds(require('./datasets/FL_HCSD_and_Look-Alike_FL.json')), 13 | inferServiceAreaIds(require('./datasets/FL_fl_endocrinologists.json')) 14 | ] 15 | 16 | export function inferServiceAreaIds(dataset: Dataset) { 17 | if (dataset.serviceAreaIds.length === 0) { 18 | dataset.serviceAreaIds = SERVICE_AREAS_BY_STATE[dataset.state] 19 | } 20 | return dataset 21 | } 22 | -------------------------------------------------------------------------------- /frontend/src/constants/map.ts: -------------------------------------------------------------------------------- 1 | // Centering the map over the US. 2 | export const DEFAULT_MAP_CENTER = { 3 | lat: 37.765134, 4 | lng: -100.444687 5 | } 6 | 7 | export const DEFAULT_MAP_ZOOM = [3.5] 8 | -------------------------------------------------------------------------------- /frontend/src/constants/states.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * TODO: Codegen 3 | */ 4 | export const STATES = [ 5 | { shortName: 'al', longName: 'Alabama' }, 6 | { shortName: 'ak', longName: 'Alaska' }, 7 | { shortName: 'ar', longName: 'Arkansas' }, 8 | { shortName: 'az', longName: 'Arizona' }, 9 | { shortName: 'ca', longName: 'California' }, 10 | { shortName: 'co', longName: 'Colorado' }, 11 | { shortName: 'ct', longName: 'Connecticut' }, 12 | { shortName: 'de', longName: 'Delaware' }, 13 | { shortName: 'dc', longName: 'District of Columbia' }, 14 | { shortName: 'fl', longName: 'Florida' }, 15 | { shortName: 'ga', longName: 'Georgia' }, 16 | { shortName: 'hi', longName: 'Hawaii' }, 17 | { shortName: 'id', longName: 'Idaho' }, 18 | { shortName: 'il', longName: 'Illinois' }, 19 | { shortName: 'in', longName: 'Indiana' }, 20 | { shortName: 'ia', longName: 'Iowa' }, 21 | { shortName: 'ks', longName: 'Kansas' }, 22 | { shortName: 'ky', longName: 'Kentucky' }, 23 | { shortName: 'la', longName: 'Louisiana' }, 24 | { shortName: 'me', longName: 'Maine' }, 25 | { shortName: 'md', longName: 'Maryland' }, 26 | { shortName: 'ma', longName: 'Massachusetts' }, 27 | { shortName: 'mi', longName: 'Michigan' }, 28 | { shortName: 'mn', longName: 'Minnesota' }, 29 | { shortName: 'mo', longName: 'Missouri' }, 30 | { shortName: 'ms', longName: 'Mississippi' }, 31 | { shortName: 'mt', longName: 'Montana' }, 32 | { shortName: 'ne', longName: 'Nebraska' }, 33 | { shortName: 'nv', longName: 'Nevada' }, 34 | { shortName: 'nh', longName: 'New Hampshire' }, 35 | { shortName: 'nj', longName: 'New Jersey' }, 36 | { shortName: 'nm', longName: 'New Mexico' }, 37 | { shortName: 'ny', longName: 'New York' }, 38 | { shortName: 'nc', longName: 'North Carolina' }, 39 | { shortName: 'nd', longName: 'North Dakota' }, 40 | { shortName: 'oh', longName: 'Ohio' }, 41 | { shortName: 'ok', longName: 'Oklahoma' }, 42 | { shortName: 'or', longName: 'Oregon' }, 43 | { shortName: 'pa', longName: 'Pennsylvania' }, 44 | { shortName: 'pr', longName: 'Puerto Rico' }, 45 | { shortName: 'ri', longName: 'Rhode Island' }, 46 | { shortName: 'sc', longName: 'South Carolina' }, 47 | { shortName: 'sd', longName: 'South Dakota' }, 48 | { shortName: 'tn', longName: 'Tennessee' }, 49 | { shortName: 'tx', longName: 'Texas' }, 50 | { shortName: 'ut', longName: 'Utah' }, 51 | { shortName: 'vt', longName: 'Vermont' }, 52 | { shortName: 'va', longName: 'Virginia' }, 53 | { shortName: 'wa', longName: 'Washington' }, 54 | { shortName: 'wv', longName: 'West Virginia' }, 55 | { shortName: 'wi', longName: 'Wisconsin' }, 56 | { shortName: 'wy', longName: 'Wyoming' } 57 | ] 58 | 59 | /** 60 | * TODO: Codegen 61 | */ 62 | export type State = 'al' | 'ak' | 'ar' | 'az' | 'ca' | 'co' | 'ct' | 'de' | 'dc' | 'fl' | 'ga' | 'hi' | 'id' | 'il' | 'in' | 'ia' | 'ks' | 'ky' | 'la' | 'me' | 'md' | 'ma' | 'mi' | 'mn' | 'mo' | 'ms' | 'mt' | 'ne' | 'nh' | 'nj' | 'nm' | 'ny' | 'nc' | 'nd' | 'nv' | 'oh' | 'ok' | 'or' | 'pa' | 'ri' | 'sc' | 'sd' | 'tn' | 'tx' | 'ut' | 'vt' | 'va' | 'wa' | 'wv' | 'wi' | 'wy' | 'pr' 63 | -------------------------------------------------------------------------------- /frontend/src/constants/zipCodes.ts: -------------------------------------------------------------------------------- 1 | import { chain, flatten, flow, keys, mapValues, sortBy, values } from 'lodash' 2 | import { serializeServiceArea } from '../utils/serializers' 3 | import { State } from './states' 4 | import { ZIPS_BY_COUNTY_BY_STATE } from './zipCodesByCountyByState' 5 | 6 | export const COUNTIES_BY_STATE: Record = chain(ZIPS_BY_COUNTY_BY_STATE) 7 | .mapValues(flow(keys, sortBy)) 8 | .value() as any // TODO 9 | 10 | export const COUNTIES_BY_ZIP = chain(ZIPS_BY_COUNTY_BY_STATE) 11 | .map(_ => chain(_).map((zs, c) => zs.zip_codes.map(z => [z, c])).flatten().value()) 12 | .flatten() 13 | .fromPairs() 14 | .value() 15 | 16 | /** 17 | * TODO: Assign integer IDs to each county-zip tuple instead 18 | * of using these ad-hoc string keys 19 | */ 20 | export const SERVICE_AREAS_BY_COUNTY_BY_STATE: Record = 21 | mapValues( 22 | ZIPS_BY_COUNTY_BY_STATE, 23 | (cs, s: State) => mapValues( 24 | cs, 25 | (zs, c) => zs.zip_codes.map(z => serializeServiceArea(s, c, z))) 26 | ) as any // TODO 27 | 28 | export const SERVICE_AREAS_BY_STATE: Record = chain(SERVICE_AREAS_BY_COUNTY_BY_STATE) 29 | .mapValues(_ => flatten(values(_))) 30 | .value() as any // TODO 31 | -------------------------------------------------------------------------------- /frontend/src/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bayesimpact/encompass/a4f47e384ef4fe4dc43c30423a1713c2c93dc87f/frontend/src/images/favicon.png -------------------------------------------------------------------------------- /frontend/src/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bayesimpact/encompass/a4f47e384ef4fe4dc43c30423a1713c2c93dc87f/frontend/src/images/logo.png -------------------------------------------------------------------------------- /frontend/src/index.css: -------------------------------------------------------------------------------- 1 | /* 2 | Vars (see https://www.figma.com/file/e05MM0u1xX6bjtTeoiY0h3Cw/NA-Analysis) 3 | */ 4 | 5 | :root { 6 | 7 | /* colors */ 8 | --primary: #2b3e80; 9 | --primary-dark: #20316a; 10 | --primary-light: #90a9ff; 11 | --secondary: #577dff; 12 | --secondary-dark: #20316a; 13 | --secondary-light: #90a9ff; 14 | 15 | --extralight: rgba(0, 0, 0, 0.06); 16 | --verylight: rgba(0, 0, 0, 0.12); 17 | --light: rgba(0, 0, 0, 0.38); 18 | --muted: rgba(0, 0, 0, 0.54); 19 | 20 | --map-background: #f6f6f4; 21 | 22 | /* dimensions */ 23 | --header-height: 64px; 24 | --leftpane-width: calc(40%); 25 | --min-leftpane-width: 512px; 26 | --analytics-title-width: 85px; 27 | 28 | /* fonts */ 29 | --font: 400 var(--medium-font-size)/var(--medium-font-line) Roboto, -apple-system, BlinkMacSystemFont, "Segoe UI", "Helvetica Neue", Helvetica, sans-serif; 30 | 31 | /* font-size */ 32 | --giant-font-size: 22px; 33 | --large-font-size: 16px; 34 | --mediumlarge-font-size: 15px; 35 | --medium-font-size: 14px; 36 | --small-font-size: 12px; 37 | 38 | /* line-height */ 39 | --medium-font-line: 21px; 40 | --large-font-line: 48px; 41 | 42 | /* weights */ 43 | --medium-weight: 400; 44 | --heavy-weight: 600; 45 | } 46 | -------------------------------------------------------------------------------- /frontend/src/index.ejs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | <%= htmlWebpackPlugin.options.title %> 10 | 11 | 12 | 13 | 15 | 16 | 17 | 18 | 19 | 20 | 21 |
22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /frontend/src/index.prod.ejs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | <%= htmlWebpackPlugin.options.title %> 8 | 9 | 10 | 11 | 12 | 13 | 14 | 16 | 17 | 18 | 19 | 20 | 21 | 22 |
23 | 24 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /frontend/src/index.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | import { render } from 'react-dom' 3 | import * as ReactGA from 'react-ga' 4 | import { App } from './components/App/App' 5 | import { getGoogleAnalyticsAppId } from './utils/env' 6 | 7 | // Set up Google Analytics. 8 | const googleAnalyticsAppId = getGoogleAnalyticsAppId() 9 | console.log('Initializing Google Analytics with app ID: ' + googleAnalyticsAppId) 10 | ReactGA.initialize(googleAnalyticsAppId) 11 | ReactGA.pageview(window.location.pathname + window.location.search) 12 | 13 | render(, document.querySelector('#App')) 14 | -------------------------------------------------------------------------------- /frontend/src/types.d.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * TODO: Add typings and publish to DefinitelyTyped. 3 | */ 4 | 5 | declare module 'chart.piecelabel.js' 6 | 7 | declare module 'geojson-filter' 8 | 9 | declare module 'mui-icons/cmdi/*' 10 | 11 | declare module 'react-autocomplete' 12 | 13 | declare module 'rmfr' { 14 | function rmfr(path: string): Promise 15 | export = rmfr 16 | } 17 | 18 | declare module 'react-device-detect' { 19 | let detect: { 20 | isMobile: boolean 21 | } 22 | export = detect 23 | } 24 | 25 | declare module '*.png' 26 | 27 | /** 28 | * Globals 29 | */ 30 | 31 | /** 32 | * `_.chain(..)` gives a value of type `_.LoDashExplicitWrapper`. `chain` is 33 | * convenient because it lets us defer computations over collections and reap 34 | * the performance benefits of Lodash's loop fusion. 35 | * 36 | * We alias this type to `Lazy` and make it available globally for ease of 37 | * use and superior readability. 38 | */ 39 | type Lazy = _.LoDashExplicitWrapper 40 | -------------------------------------------------------------------------------- /frontend/src/utils/adequacy.ts: -------------------------------------------------------------------------------- 1 | import { PostAdequaciesResponse } from '../constants/api/adequacies-response' 2 | import { AdequacyMode, Method } from '../constants/datatypes' 3 | 4 | let ONE_MILE_IN_METERS = 1609.344 5 | let ONE_METER_IN_MILES = 1.0 / ONE_MILE_IN_METERS 6 | 7 | export function getAdequacyMode( 8 | adequacy: PostAdequaciesResponse[0], 9 | method: Method, 10 | serviceAreaId: string, 11 | selectedServiceAreas: string[] | null 12 | ): AdequacyMode { 13 | 14 | if (selectedServiceAreas && !selectedServiceAreas.includes(serviceAreaId)) { 15 | return AdequacyMode.OUT_OF_SCOPE 16 | } 17 | 18 | if (method === 'straight_line') { 19 | if (adequacy.to_closest_provider * ONE_METER_IN_MILES <= 10) { 20 | return AdequacyMode.ADEQUATE_0 21 | } 22 | if (adequacy.to_closest_provider * ONE_METER_IN_MILES <= 20) { 23 | return AdequacyMode.ADEQUATE_1 24 | } 25 | if (adequacy.to_closest_provider * ONE_METER_IN_MILES <= 30) { 26 | return AdequacyMode.ADEQUATE_2 27 | } 28 | if (adequacy.to_closest_provider * ONE_METER_IN_MILES > 30) { 29 | return AdequacyMode.INADEQUATE 30 | } 31 | return AdequacyMode.OUT_OF_SCOPE 32 | } 33 | 34 | if (method === 'driving_time' || method === 'walking_time') { 35 | if (adequacy.to_closest_provider <= 30) { 36 | return AdequacyMode.ADEQUATE_0 37 | } 38 | if (adequacy.to_closest_provider <= 45) { 39 | return AdequacyMode.ADEQUATE_1 40 | } 41 | if (adequacy.to_closest_provider <= 60) { 42 | return AdequacyMode.ADEQUATE_2 43 | } 44 | if (adequacy.to_closest_provider > 60) { 45 | return AdequacyMode.INADEQUATE 46 | } 47 | } 48 | return AdequacyMode.OUT_OF_SCOPE 49 | } 50 | 51 | export function getLegend(method: Method, standard: AdequacyMode) { 52 | switch (method) { 53 | case 'straight_line': 54 | switch (standard) { 55 | case AdequacyMode.ADEQUATE_0: return '0-10 mi' 56 | case AdequacyMode.ADEQUATE_1: return '10-20 mi' 57 | case AdequacyMode.ADEQUATE_2: return '20-30 mi' 58 | case AdequacyMode.INADEQUATE: return '30+ mi' 59 | } 60 | break 61 | case 'driving_time': 62 | case 'walking_time': 63 | switch (standard) { 64 | case AdequacyMode.ADEQUATE_0: return '0-30 min' 65 | case AdequacyMode.ADEQUATE_1: return '30-45 min' 66 | case AdequacyMode.ADEQUATE_2: return '45-60 min' 67 | case AdequacyMode.INADEQUATE: return '60+ min' 68 | } 69 | break 70 | } 71 | return '' 72 | } 73 | -------------------------------------------------------------------------------- /frontend/src/utils/analytics.test.ts: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | import { Adequacy, AdequacyMode, GeocodedProvider } from '../constants/datatypes' 3 | import { averageMeasure, maxMeasure, minMeasure, totalPopulation } from './analytics' 4 | 5 | let provider: GeocodedProvider = { 6 | address: '1234 Main St. San Francisco CA 94111', 7 | languages: ['english', 'japanese'], 8 | lat: 123.456, 9 | lng: -123.456, 10 | npi: '123456789', 11 | specialty: 'internal medicine' 12 | } 13 | 14 | let adequacies1: Lazy = chain([ 15 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 21, closestProvider: provider }, 16 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 31, closestProvider: provider }, 17 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 41, closestProvider: provider }, 18 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 51, closestProvider: provider } 19 | ]) 20 | 21 | let adequacies2: Lazy = chain([ 22 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 0, closestProvider: provider }, 23 | { adequacyMode: AdequacyMode.INADEQUATE, id: 0, toClosestProvider: 0, closestProvider: provider } 24 | ]) 25 | 26 | let representativePoints1 = chain([ 27 | { county: 'Sonoma', id: 1, lat: 123.456, lng: 32.109, population: 100, serviceAreaId: 'Sonoma-90000', zip: '90000' }, 28 | { county: 'Sonoma', id: 2, lat: 123.457, lng: 32.109, population: 101, serviceAreaId: 'Sonoma-90000', zip: '90001' }, 29 | { county: 'Sonoma', id: 3, lat: 123.458, lng: 32.109, population: 200, serviceAreaId: 'Sonoma-90000', zip: '90002' }, 30 | { county: 'Sonoma', id: 4, lat: 123.459, lng: 32.109, population: 102912, serviceAreaId: 'Sonoma-90000', zip: '90003' }, 31 | { county: 'Sonoma', id: 5, lat: 123.460, lng: 32.109, population: 11, serviceAreaId: 'Sonoma-90000', zip: '90004' } 32 | ]) 33 | 34 | let representativePoints2 = chain([ 35 | { county: 'Napa', id: 6, lat: 123.456, lng: 32.109, population: 0, serviceAreaId: 'Napa-90010', zip: '90010' }, 36 | { county: 'Napa', id: 7, lat: 123.457, lng: 32.109, population: 0, serviceAreaId: 'Napa-90011', zip: '90011' }, 37 | { county: 'Napa', id: 8, lat: 123.458, lng: 32.109, population: 0, serviceAreaId: 'Napa-90012', zip: '90012' } 38 | ]) 39 | 40 | test('averageMeasure', () => { 41 | expect(averageMeasure(adequacies1)).toBe(36) 42 | expect(averageMeasure(adequacies2)).toBe(0) 43 | }) 44 | 45 | test('maxMeasure', () => { 46 | expect(maxMeasure(adequacies1)).toBe(51) 47 | expect(maxMeasure(adequacies2)).toBe(0) 48 | }) 49 | 50 | test('minMeasure', () => { 51 | expect(minMeasure(adequacies1)).toBe(21) 52 | expect(minMeasure(adequacies2)).toBe(0) 53 | }) 54 | 55 | test('totalPopulation', () => { 56 | expect(totalPopulation(representativePoints1)).toBe(103324) 57 | expect(totalPopulation(representativePoints2)).toBe(0) 58 | }) 59 | -------------------------------------------------------------------------------- /frontend/src/utils/analytics.ts: -------------------------------------------------------------------------------- 1 | import { identity } from 'lodash' 2 | import { Adequacy, CensusGroup, RepresentativePoint } from '../constants/datatypes' 3 | 4 | /** 5 | * A big generic partially appliable reducer for building math pipelines. 6 | * 7 | * It accepts an `initial` value, a reducer `f`, a plucker `g` that 8 | * takes an object of type `T` and gives a numerical property on it, 9 | * and an array of `data`. We take advantage of Lodash's loop fusion 10 | * to do a map and reduce in *O(n)* time. 11 | * 12 | * We round the result, because floating point math is not always 13 | * accurate (.1 + .2 === 0.30000000000000004). 14 | */ 15 | let fold = (initial: T) => 16 | (f: FoldFn) => 17 | (g: (a: U) => T = identity) => 18 | (data: Lazy) => 19 | data 20 | .map(g) 21 | .reduce(f, initial) 22 | .round() 23 | .value() 24 | 25 | type FoldFn = (acc: U, current: T, index: number, array: T[]) => U 26 | 27 | let fns = { 28 | mean: (a: number, b: number, _index: number, array: number[]) => a + b / array.length, 29 | max: (a: number, b: number) => Math.max(a, b), 30 | min: (a: number, b: number) => Math.min(a, b), 31 | sum: (a: number, b: number) => a + b 32 | } 33 | 34 | let mean = fold(0)(fns.mean) 35 | let max = fold(-Infinity)(fns.max) 36 | let min = fold(Infinity)(fns.min) 37 | let sum = fold(0)(fns.sum) 38 | 39 | export let averageMeasure = mean(_ => _.toClosestProvider) 40 | export let maxMeasure = max(_ => _.toClosestProvider) 41 | export let minMeasure = min(_ => _.toClosestProvider) 42 | 43 | export let totalPopulation = sum(_ => _.population) 44 | 45 | export function populationByCensus(censusGroup: CensusGroup) { 46 | return sum( 47 | _ => _.population * 0.01 * _.demographics[censusGroup.censusCategory][censusGroup.censusGroup] || 0 48 | ) 49 | } 50 | -------------------------------------------------------------------------------- /frontend/src/utils/csv.test.ts: -------------------------------------------------------------------------------- 1 | import { isEmpty } from './csv' 2 | 3 | test('isEmpty', () => { 4 | expect(isEmpty(null)).toBe(true) 5 | expect(isEmpty(undefined)).toBe(true) 6 | expect(isEmpty('')).toBe(true) 7 | expect(isEmpty(' ')).toBe(true) 8 | expect(isEmpty('1')).toBe(false) 9 | expect(isEmpty(' a ')).toBe(false) 10 | }) 11 | -------------------------------------------------------------------------------- /frontend/src/utils/download.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Download a file that was generated on the client. 3 | * 4 | * @see https://github.com/mholt/PapaParse/issues/175#issuecomment-201308792 5 | * 6 | * TODO: Move this to its own module. 7 | */ 8 | export function download(contents: string, mimeType: string, filename: string) { 9 | let csvData = new Blob([contents], { type: `${mimeType};charset=utf-8;` }) 10 | if (navigator.msSaveBlob) { 11 | // IE11 & Edge 12 | navigator.msSaveBlob(csvData, filename) 13 | } else { 14 | // In FF link must be added to DOM to be clicked 15 | let link = document.createElement('a') 16 | link.href = window.URL.createObjectURL(csvData) 17 | link.setAttribute('download', filename) 18 | document.body.appendChild(link) 19 | link.click() 20 | document.body.removeChild(link) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /frontend/src/utils/env.ts: -------------------------------------------------------------------------------- 1 | export function getGoogleAnalyticsAppId(){ 2 | return process.env.GA_ID || 'Unknown App ID' 3 | } 4 | -------------------------------------------------------------------------------- /frontend/src/utils/formatters.test.ts: -------------------------------------------------------------------------------- 1 | import { normalizeZip } from './formatters' 2 | 3 | test('normalizeZip', () => { 4 | expect(normalizeZip('')).toBe('') 5 | expect(normalizeZip('12345')).toBe('12345') 6 | expect(normalizeZip('12345-6789')).toBe('12345') 7 | }) 8 | -------------------------------------------------------------------------------- /frontend/src/utils/geojson.test.ts: -------------------------------------------------------------------------------- 1 | import { boundingBox } from './geojson' 2 | 3 | let representativePoints = [ 4 | { county: 'Sonoma', id: 1, lat: 123.1, lng: 32.109, population: 100, serviceAreaId: 'Sonoma-90000', zip: '90000' }, 5 | { county: 'Sonoma', id: 2, lat: 122.01, lng: 32.3123, population: 101, serviceAreaId: 'Sonoma-90001', zip: '90001' }, 6 | { county: 'Sonoma', id: 3, lat: 122.45, lng: 31.4, population: 200, serviceAreaId: 'Sonoma-90002', zip: '90002' }, 7 | { county: 'Sonoma', id: 4, lat: 121.6, lng: 33.86, population: 102912, serviceAreaId: 'Sonoma-90003', zip: '90003' }, 8 | { county: 'Sonoma', id: 5, lat: 124.8, lng: 30.9889, population: 11, serviceAreaId: 'Sonoma-90004', zip: '90004' }, 9 | { county: 'Sonoma', id: 6, lat: 123.460, lng: 34.91, population: 11, serviceAreaId: 'Sonoma-90005', zip: '90005' }, 10 | { county: 'Sonoma', id: 7, lat: 128.8712, lng: 32.11, population: 11, serviceAreaId: 'Sonoma-90006', zip: '90006' }, 11 | { county: 'Sonoma', id: 8, lat: 122.12, lng: 32.009, population: 11, serviceAreaId: 'Sonoma-90007', zip: '90007' }, 12 | { county: 'Sonoma', id: 9, lat: 125.37, lng: 33.423, population: 11, serviceAreaId: 'Sonoma-90008', zip: '90008' } 13 | ] 14 | 15 | test('boundingBox', () => { 16 | expect(boundingBox([])).toBe(null) 17 | expect(boundingBox(representativePoints)).toEqual({ 18 | sw: { lat: 121.6, lng: 30.9889 }, 19 | ne: { lat: 128.8712, lng: 34.91 } 20 | }) 21 | }) 22 | -------------------------------------------------------------------------------- /frontend/src/utils/lazy.ts: -------------------------------------------------------------------------------- 1 | import { chain } from 'lodash' 2 | 3 | export function lazy(a: T) { 4 | return chain(a) 5 | } 6 | -------------------------------------------------------------------------------- /frontend/src/utils/link.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | 3 | export function SecureLink(href: string, children: React.ReactNode, target?: string) { 4 | target = target !== undefined ? target : '_blank' 5 | return 6 | {children} 7 | 8 | } 9 | -------------------------------------------------------------------------------- /frontend/src/utils/list.test.ts: -------------------------------------------------------------------------------- 1 | import { equals } from './list' 2 | 3 | test('equals', () => { 4 | expect(equals([], [])).toBe(true) 5 | expect(equals([1], [1])).toBe(true) 6 | expect(equals(['a', 'b'], ['a', 'b'])).toBe(true) 7 | expect(equals([['a'], ['b']], [['a'], ['b']])).toBe(true) 8 | expect(equals([{ a: [1], b: [2] }], [{ a: [1], b: [2] }])).toBe(true) 9 | expect(equals([{ a: () => 1 }], [{ a: () => 2 }])).toBe(true) 10 | 11 | expect(equals([], [1])).toBe(false) 12 | expect(equals([1], [])).toBe(false) 13 | expect(equals(['a', 'b'], ['b', 'a'])).toBe(false) 14 | expect(equals(['a', 'b'], ['a'])).toBe(false) 15 | }) 16 | -------------------------------------------------------------------------------- /frontend/src/utils/list.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Quick and dirty comparison to check whether 2 lists are equal. 3 | */ 4 | export function equals(a: T[], b: T[]) { 5 | return JSON.stringify(a) === JSON.stringify(b) 6 | } 7 | -------------------------------------------------------------------------------- /frontend/src/utils/numbers.ts: -------------------------------------------------------------------------------- 1 | export function maybeParseFloat(string: string | null) { 2 | if (string == null) { 3 | return undefined 4 | } 5 | let float = parseFloat(string) 6 | if (Number.isNaN(float)) { 7 | return undefined 8 | } 9 | return float 10 | } 11 | 12 | /** 13 | * Convenience function for rounding without using toFixed (which is for formatting). 14 | */ 15 | export function precisionRound(number: number, precision: number): number { 16 | const factor = Math.pow(10, precision) 17 | return Math.round(number * factor) / factor 18 | } 19 | -------------------------------------------------------------------------------- /frontend/src/utils/serializers.ts: -------------------------------------------------------------------------------- 1 | import { findKey } from 'lodash' 2 | import { State } from '../constants/states' 3 | import { snakeCase } from './string' 4 | 5 | export function serializeServiceArea(state: State, county: string, zip: string) { 6 | return `${state}_${snakeCase(county)}_${zip}` 7 | } 8 | 9 | export function getPropCaseInsensitive(obj: T, name: string): T[keyof T] { 10 | let realName = findKey(obj, function (_value, key: string) { 11 | return snakeCase(key).toLowerCase() === name.toLowerCase() 12 | }) || 'no_key' 13 | return obj[realName as keyof T] 14 | } 15 | -------------------------------------------------------------------------------- /frontend/src/utils/string.test.ts: -------------------------------------------------------------------------------- 1 | import { capitalizeWords, fuzz } from './string' 2 | 3 | test('capitalizeWords', () => { 4 | expect(capitalizeWords('')).toBe('') 5 | expect(capitalizeWords('Foo Bar')).toBe('Foo Bar') 6 | expect(capitalizeWords('abc')).toBe('Abc') 7 | expect(capitalizeWords('abc1')).toBe('Abc1') 8 | expect(capitalizeWords('fooBar')).toBe('Foo Bar') 9 | expect(capitalizeWords('foo bar')).toBe('Foo Bar') 10 | expect(capitalizeWords('foo_bar')).toBe('Foo Bar') 11 | expect(capitalizeWords('foo1')).toBe('Foo1') 12 | }) 13 | 14 | test('fuzz', () => { 15 | expect(fuzz('abc def')).toBe('abcdef') 16 | expect(fuzz('ABC DEF')).toBe('abcdef') 17 | expect(fuzz('ABC def')).toBe('abcdef') 18 | }) 19 | -------------------------------------------------------------------------------- /frontend/src/utils/string.ts: -------------------------------------------------------------------------------- 1 | import { capitalize, memoize, snakeCase as snake_case } from 'lodash' 2 | 3 | /** 4 | * Like `_.capitalize`, but capitalizes all words in the given sentence 5 | * and splits camelcased words. 6 | */ 7 | export let capitalizeWords = memoize((string: string) => 8 | string 9 | .replace(/([^A-Z ])([A-Z])/g, '$1 $2') 10 | .replace(/_/g, ' ') 11 | .split(' ') 12 | .map(capitalize) 13 | .join(' ') 14 | ) 15 | 16 | export let snakeCase = memoize((string: string) => 17 | snake_case(string.toLowerCase()) 18 | ) 19 | 20 | /** 21 | * Useful for fuzzy comparisons. 22 | */ 23 | export let fuzz = memoize((s: string) => 24 | s.toLowerCase().replace(/ /g, '') 25 | ) 26 | -------------------------------------------------------------------------------- /frontend/src/utils/webgl.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Return true if we can get a webgl context, or false otherwise. 3 | */ 4 | export function isWebGLEnabled() { 5 | const canvas = document.createElement('canvas') 6 | const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl') 7 | 8 | return !!(gl && gl instanceof WebGLRenderingContext) 9 | } 10 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as.csv: -------------------------------------------------------------------------------- 1 | CountyName,City,ZipCode,PopulationPointsPerZipCode 2 | San Diego,Vista,92084,100 3 | San Diego,Vista,92085,1 4 | Alameda,Berkeley,94505,1001 5 | alameda,Oakland,94530,1001 6 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_county_only.csv: -------------------------------------------------------------------------------- 1 | CountyName 2 | San Diego 3 | san francisco 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_duplicate_county_only.csv: -------------------------------------------------------------------------------- 1 | county,zip 2 | sanFrancisco, 3 | sanFrancisco, -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_invalid_input_file.csv: -------------------------------------------------------------------------------- 1 | City\8x8 2 | Vista\asdqw\qwewq\asdas, wqei, wqeoiwq 3 | san Francisco 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_invalid_input_file_2.csv: -------------------------------------------------------------------------------- 1 | CountyName,City,ZipCode,PopulationPointsPerZipCode 2 | San Diego,Vista,92084,100 3 | Fake County,Berkeley,94505,1001 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_invalid_input_file_3.csv: -------------------------------------------------------------------------------- 1 | CountyName,City,ZipCode,PopulationPointsPerZipCode 2 | San Diego,Vista,92084,100 3 | Alameda,Berkeley,12345,1001 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_invalid_input_file_4.csv: -------------------------------------------------------------------------------- 1 | CountyName,City,ZipCode,PopulationPointsPerZipCode 2 | San Diego,Vista,92084,100 3 | Alameda,Berkeley, ,1001 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_no_zip_no_county.csv: -------------------------------------------------------------------------------- 1 | City 2 | Vista 3 | san Francisco 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_zip_and_county.csv: -------------------------------------------------------------------------------- 1 | CountyName,City,ZipCode,PopulationPointsPerZipCode 2 | San Diego,Vista,92084,100 3 | san francisco,san Francisco,94117,1001 4 | -------------------------------------------------------------------------------- /frontend/test/mocks/point-as_zip_only.csv: -------------------------------------------------------------------------------- 1 | ZipCode 2 | 94117 3 | 94103 4 | 94102 5 | 94110 6 | 94114 7 | 92154 8 | 91935 9 | 92055 10 | -------------------------------------------------------------------------------- /frontend/test/setupFiles.js: -------------------------------------------------------------------------------- 1 | let fetch = require('fetch-mock') 2 | 3 | fetch.post(/\/api\/adequacies\//, require('./mockResponses/adequacies')) 4 | 5 | fetch.post(/\/api\/providers\//, []) 6 | 7 | fetch.post(/\/api\/representative_points\//, []) 8 | 9 | global.Headers = Map 10 | 11 | // Mock `requestAnimationFrame` (required for React tests) 12 | global.requestAnimationFrame = callback => 13 | setTimeout(callback, 0) 14 | 15 | // Mock `URL.createObjectURL` (required for Mapbox tests) 16 | global.URL.createObjectURL = () => ({}) 17 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowJs": true, 4 | "allowSyntheticDefaultImports": false, 5 | "forceConsistentCasingInFileNames": true, 6 | "jsx": "react", 7 | "lib": [ 8 | "dom", 9 | "es2017" 10 | ], 11 | "module": "commonjs", 12 | "moduleResolution": "node", 13 | "newLine": "LF", 14 | "noUnusedLocals": true, 15 | "noUnusedParameters": true, 16 | "outDir": "dist", 17 | "preserveConstEnums": true, 18 | "sourceMap": true, 19 | "strict": true, 20 | "strictFunctionTypes": false, 21 | "target": "es5" 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /frontend/tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "tslint-react" 4 | ], 5 | "rules": { 6 | "adjacent-overload-signatures": true, 7 | "class-name": true, 8 | "comment-format": [ 9 | true, 10 | "check-space" 11 | ], 12 | "eofline": true, 13 | "indent": [ 14 | true, 15 | "spaces" 16 | ], 17 | "jsx-alignment": false, 18 | "jsx-boolean-value": false, 19 | "jsx-no-lambda": false, 20 | "jsx-no-multiline-js": false, 21 | "jsx-no-string-ref": true, 22 | "jsx-self-close": true, 23 | "jsx-wrap-multiline": false, 24 | "linebreak-style": [ 25 | true, 26 | "LF" 27 | ], 28 | "member-access": false, 29 | "no-angle-bracket-type-assertion": true, 30 | "no-bitwise": true, 31 | "no-conditional-assignment": true, 32 | "no-consecutive-blank-lines": [ 33 | true, 34 | 1 35 | ], 36 | "no-construct": true, 37 | "no-debugger": true, 38 | "no-default-export": true, 39 | "no-duplicate-variable": true, 40 | "no-eval": true, 41 | "no-internal-module": true, 42 | "no-invalid-this": true, 43 | "no-reference": true, 44 | "no-trailing-whitespace": true, 45 | "no-unused-expression": true, 46 | "no-var-keyword": true, 47 | "no-var-requires": true, 48 | "object-literal-key-quotes": [ 49 | true, 50 | "as-needed" 51 | ], 52 | "object-literal-shorthand": true, 53 | "one-line": [ 54 | true, 55 | "check-catch", 56 | "check-finally", 57 | "check-else", 58 | "check-open-brace" 59 | ], 60 | "one-variable-per-declaration": [ 61 | true, 62 | "ignore-for-loop" 63 | ], 64 | "ordered-imports": [ 65 | true 66 | ], 67 | "quotemark": [ 68 | true, 69 | "single", 70 | "avoid-escape" 71 | ], 72 | "semicolon": [ 73 | true, 74 | "never" 75 | ], 76 | "trailing-comma": [ 77 | true, 78 | { 79 | "multiline": "never", 80 | "singleline": "never" 81 | } 82 | ], 83 | "triple-equals": [ 84 | true, 85 | "allow-null-check" 86 | ], 87 | "typedef-whitespace": [ 88 | true, 89 | { 90 | "call-signature": "nospace", 91 | "index-signature": "nospace", 92 | "parameter": "nospace", 93 | "property-declaration": "nospace", 94 | "variable-declaration": "nospace" 95 | } 96 | ], 97 | "use-isnan": true, 98 | "whitespace": [ 99 | true, 100 | "check-branch", 101 | "check-decl", 102 | "check-module", 103 | "check-operator", 104 | "check-separator", 105 | "check-type" 106 | ] 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /frontend/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const webpack = require('webpack') 3 | const Dotenv = require('dotenv-webpack') 4 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 5 | const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin') 6 | const HtmlWebpackPlugin = require('html-webpack-plugin') 7 | 8 | module.exports = { 9 | mode: 'development', 10 | devServer: { 11 | contentBase: __dirname + '/dist', 12 | compress: true, 13 | // hot: true, 14 | https: false, 15 | port: 8081, 16 | disableHostCheck: true, 17 | publicPath: '/' 18 | }, 19 | entry: './src/index.tsx', 20 | externals: { 21 | 'chart.js': 'Chart', 22 | lodash: '_', 23 | 'mapbox-gl': 'mapboxgl', 24 | moment: 'moment', 25 | react: 'React', 26 | 'react-dom': 'ReactDOM', 27 | rx: 'Rx' 28 | }, 29 | output: { 30 | filename: 'bundle.js', 31 | path: __dirname + '/dist' 32 | }, 33 | resolve: { 34 | extensions: ['.ts', '.tsx', '.js', '.json'] 35 | }, 36 | module: { 37 | rules: [ 38 | { 39 | include: path.resolve(__dirname, 'src'), 40 | loader: 'ts-loader', 41 | options: { 42 | transpileOnly: true 43 | }, 44 | test: /\.tsx?$/, 45 | }, 46 | { 47 | include: path.resolve(__dirname, 'src'), 48 | loader: 'file-loader', 49 | test: /\.png$/, 50 | }, 51 | { 52 | include: path.resolve(__dirname, 'src'), 53 | test: /\.css$/, 54 | use: ExtractTextPlugin.extract({ 55 | use: ['css-loader', { 56 | loader: 'postcss-loader', 57 | options: { 58 | plugins: loader => [ 59 | require('postcss-import')(), 60 | require('postcss-cssnext')({ 61 | browsers: '>2%' 62 | }) 63 | ] 64 | } 65 | }] 66 | }) 67 | } 68 | ] 69 | }, 70 | plugins: [ 71 | new Dotenv({ 72 | path: '../.env' 73 | }), 74 | new ExtractTextPlugin({ 75 | filename: 'bundle.css' 76 | }), 77 | new HtmlWebpackPlugin({ 78 | filename: 'index.html', 79 | template: 'src/index.ejs', 80 | title: 'bayes-network-adequacy-explorer', 81 | favicon: path.join(__dirname, 'src/images/favicon.png') 82 | }), 83 | new webpack.DefinePlugin({ 84 | 'process.env.ENV': JSON.stringify(process.env.ENV), 85 | 'process.env.NODE_ENV': JSON.stringify('development'), 86 | 'process.env.API_ROOT': JSON.stringify(process.env.API_ROOT), 87 | 'process.env.MAPBOX_TOKEN': JSON.stringify(process.env.MAPBOX_TOKEN), 88 | 'process.env.GA_ID': JSON.stringify(process.env.GA_ID) 89 | }), 90 | new ForkTsCheckerWebpackPlugin 91 | ] 92 | } 93 | -------------------------------------------------------------------------------- /frontend/webpack.config.prod.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const webpack = require('webpack') 3 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 4 | const HtmlWebpackPlugin = require('html-webpack-plugin') 5 | const RobotstxtPlugin = require("robotstxt-webpack-plugin").default 6 | 7 | module.exports = { 8 | mode: 'production', 9 | devServer: { 10 | contentBase: __dirname + '/dist', 11 | compress: true, 12 | // hot: true, 13 | https: false, 14 | port: 8081, 15 | disableHostCheck: true, 16 | publicPath: '/' 17 | }, 18 | entry: './src/index.tsx', 19 | externals: { 20 | 'chart.js': 'Chart', 21 | lodash: '_', 22 | 'mapbox-gl': 'mapboxgl', 23 | moment: 'moment', 24 | react: 'React', 25 | 'react-dom': 'ReactDOM', 26 | rx: 'Rx' 27 | }, 28 | output: { 29 | filename: 'bundle.js', 30 | path: __dirname + '/dist' 31 | }, 32 | resolve: { 33 | extensions: ['.ts', '.tsx', '.js', '.json'] 34 | }, 35 | module: { 36 | rules: [ 37 | { 38 | include: path.resolve(__dirname, 'src'), 39 | loader: 'ts-loader', 40 | options: { 41 | transpileOnly: false 42 | }, 43 | test: /\.tsx?$/, 44 | }, 45 | { 46 | include: path.resolve(__dirname, 'src'), 47 | loader: 'file-loader', 48 | test: /\.png$/, 49 | }, 50 | { 51 | test: /\.css$/, 52 | use: ExtractTextPlugin.extract({ 53 | use: ['css-loader', { 54 | loader: 'postcss-loader', 55 | options: { 56 | plugins: loader => [ 57 | require('postcss-import')(), 58 | require('postcss-cssnext')({ 59 | browsers: '>2%' 60 | }) 61 | ] 62 | } 63 | }] 64 | }) 65 | } 66 | ] 67 | }, 68 | plugins: [ 69 | new ExtractTextPlugin({ 70 | filename: 'bundle.css' 71 | }), 72 | new HtmlWebpackPlugin({ 73 | filename: 'index.html', 74 | template: 'src/index.prod.ejs', 75 | title: 'bayes-network-adequacy-explorer', 76 | favicon: path.join(__dirname, 'src/images/favicon.png') 77 | }), 78 | new webpack.DefinePlugin({ 79 | 'process.env.ENV': JSON.stringify(process.env.ENV), 80 | 'process.env.NODE_ENV': JSON.stringify('production'), 81 | 'process.env.API_ROOT': JSON.stringify(process.env.API_ROOT), 82 | 'process.env.MAPBOX_TOKEN': JSON.stringify(process.env.MAPBOX_TOKEN), 83 | 'process.env.GA_ID': JSON.stringify(process.env.GA_ID) 84 | }), 85 | new RobotstxtPlugin({ 86 | policy: [ 87 | { 88 | userAgent: "*", 89 | disallow: "/" 90 | } 91 | ] 92 | }) 93 | ] 94 | } 95 | -------------------------------------------------------------------------------- /osrm/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM osrm/osrm-backend:latest 2 | 3 | # Install Python3, pip, and setuptools. 4 | # See https://github.com/frol/docker-alpine-python3/blob/master/Dockerfile. 5 | RUN apk add --no-cache python3 && \ 6 | python3 -m ensurepip && \ 7 | rm -r /usr/lib/python*/ensurepip && \ 8 | pip3 install --upgrade pip setuptools && \ 9 | if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \ 10 | if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \ 11 | rm -r /root/.cache 12 | 13 | EXPOSE 5000 14 | -------------------------------------------------------------------------------- /shared/api-spec/adequacies-request.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTAdequaciesRequest", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Request shape for POST /api/adequacies/", 5 | "type": "object", 6 | "properties": { 7 | "method": { 8 | "description": "Method used to calculate times and distances", 9 | "enum": ["driving_time", "straight_line", "walking_time"] 10 | }, 11 | "providers": { 12 | "type": "array", 13 | "items": { 14 | "properties": { 15 | "id": { 16 | "description": "An arbitrary ID that is locally unique within this request", 17 | "type": "integer" 18 | }, 19 | "latitude": { 20 | "type": "number" 21 | }, 22 | "longitude": { 23 | "type": "number" 24 | } 25 | }, 26 | "additionalProperties": false, 27 | "required": ["id", "latitude", "longitude"] 28 | } 29 | }, 30 | "service_area_ids": { 31 | "description": "Service area IDs in the format \"state_city_zip\".\nFor example, [\"ca_san_francisco_94014\", \"ca_san_francisco_94015\"]", 32 | "type": "array", 33 | "items": { 34 | "type": "string" 35 | } 36 | }, 37 | "dataset_hint": { 38 | "description": "Hint to help the backend use cached adequacy results.", 39 | "type": "string" 40 | } 41 | }, 42 | "additionalProperties": false, 43 | "required": ["method", "providers", "service_area_ids"] 44 | } 45 | -------------------------------------------------------------------------------- /shared/api-spec/adequacies-response.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTAdequaciesResponse", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Response shape for POST /api/adequacies/", 5 | "type": "array", 6 | "items": { 7 | "properties": { 8 | "id": { 9 | "type": "integer" 10 | }, 11 | "to_closest_provider": { 12 | "description": "Measure (in meters or minutes) to the closest provider", 13 | "type": "integer" 14 | }, 15 | "closest_providers": { 16 | "type": "array", 17 | "items": { 18 | "type": "integer" 19 | } 20 | }, 21 | "closest_location":{ 22 | "properties": { 23 | "latitude": {"type": "number"}, 24 | "longitude": {"type": "number"} 25 | }, 26 | "additionalProperties": false, 27 | "required": ["latitude", "longitude"] 28 | } 29 | }, 30 | "additionalProperties": false, 31 | "required": ["id", "to_closest_provider", "closest_providers", "closest_location"] 32 | }, 33 | "additionalProperties": false 34 | } 35 | -------------------------------------------------------------------------------- /shared/api-spec/available-service-areas-response.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "GETAvailableServiceAreasResponse", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Response shape for GET /api/available-service-areas/", 5 | "type": "array", 6 | "items": { 7 | "type": "array", 8 | "items": [{ 9 | "description": "Service area ID", 10 | "type": "string" 11 | }, 12 | { 13 | "description": "County name", 14 | "type": "string" 15 | }, 16 | { 17 | "description": "Zip code", 18 | "type": "string" 19 | }, 20 | { 21 | "description": "State", 22 | "type": "string" 23 | }, 24 | { 25 | "description": "NCHS urban/rural code", 26 | "type": "string" 27 | } 28 | ] 29 | }, 30 | "additionalProperties": false 31 | } 32 | -------------------------------------------------------------------------------- /shared/api-spec/census-data-response.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "PostCensusDataResponse", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Response shape for POST /api/census-data-by-service-area/", 5 | "definitions": { 6 | "census_category_info": { 7 | "items": { 8 | "type": "object", 9 | "additionalProperties": {"type" : "number"} 10 | } 11 | } 12 | }, 13 | "type": "object", 14 | "additionalProperties": { 15 | "items": { 16 | "type": "object", 17 | "additionalProperties": { 18 | "$ref": "#/definitions/census_category_info" 19 | } 20 | }} 21 | } 22 | -------------------------------------------------------------------------------- /shared/api-spec/geocode-request.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTGeocodeRequest", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Request shape for POST /api/geocode/", 5 | "type": "object", 6 | "properties": { 7 | "addresses": { 8 | "type": "array", 9 | "items": { 10 | "description": "Excluding suite #.\nEg. \"1234 Main St. San Francisco, CA 94114\"", 11 | "type": "string" 12 | } 13 | } 14 | }, 15 | "additionalProperties": false, 16 | "required": ["addresses"] 17 | } 18 | -------------------------------------------------------------------------------- /shared/api-spec/geocode-response.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTGeocodeResponse", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Response shape for POST /api/geocode/", 5 | "type": "array", 6 | "definitions": { 7 | "success": { 8 | "properties": { 9 | "status": { 10 | "enum": ["success"] 11 | }, 12 | "lat": { 13 | "type": "number" 14 | }, 15 | "lng": { 16 | "type": "number" 17 | } 18 | }, 19 | "additionalProperties": false, 20 | "required": ["status", "lat", "lng"] 21 | }, 22 | "error": { 23 | "properties": { 24 | "status": { 25 | "enum": ["error"] 26 | }, 27 | "message": { 28 | "type": "string" 29 | } 30 | }, 31 | "additionalProperties": false, 32 | "required": ["status", "message"] 33 | } 34 | }, 35 | "items": { 36 | "oneOf": [{ 37 | "$ref": "#/definitions/success" 38 | }, { 39 | "$ref": "#/definitions/error" 40 | }] 41 | }, 42 | "additionalProperties": false 43 | } 44 | -------------------------------------------------------------------------------- /shared/api-spec/representative-points-request.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTRepresentativePointsRequest", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Request shape for POST /api/representative_points/", 5 | "type": "object", 6 | "properties": { 7 | "service_area_ids": { 8 | "description": "Service area IDs in the format \"state_city_zip\".\nFor example, [\"ca_san_francisco_94014\", \"ca_san_francisco_94015\"]", 9 | "type": "array", 10 | "items": { 11 | "type": "string" 12 | } 13 | }, 14 | "include_census_data": { 15 | "description": "Defines if frontend requests census data at the representative point level.", 16 | "type": "boolean" 17 | } 18 | }, 19 | "additionalProperties": false, 20 | "required": ["service_area_ids"] 21 | } 22 | -------------------------------------------------------------------------------- /shared/api-spec/representative-points-response.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "POSTRepresentativePointsResponse", 3 | "$schema": "http://json-schema.org/draft-04/schema#", 4 | "description": "Response shape for POST /api/representative_points/", 5 | "definitions": { 6 | "bucket": { 7 | "items": { 8 | "properties": { 9 | "name": { 10 | "type": "string" 11 | }, 12 | "values": { 13 | "items": { 14 | "type": "number" 15 | }, 16 | "type": "array" 17 | } 18 | }, 19 | "additionalProperties": false, 20 | "required": ["name", "values"] 21 | }, 22 | "type": "array" 23 | } 24 | }, 25 | "type": "array", 26 | "items": { 27 | "properties": { 28 | "county": { 29 | "type": "string" 30 | }, 31 | "demographics": { 32 | "type": "array", 33 | "items": { 34 | "properties": { 35 | "buckets": { 36 | "$ref": "#/definitions/bucket" 37 | }, 38 | "category": { 39 | "type": "string" 40 | } 41 | }, 42 | "additionalProperties": false, 43 | "required": ["buckets", "category"] 44 | } 45 | }, 46 | "id": { 47 | "type": "integer" 48 | }, 49 | "lat": { 50 | "type": "number" 51 | }, 52 | "lng": { 53 | "type": "number" 54 | }, 55 | "population": { 56 | "type": "integer" 57 | }, 58 | "service_area_id": { 59 | "type": "string" 60 | }, 61 | "zip": { 62 | "type": "string" 63 | } 64 | }, 65 | "additionalProperties": false, 66 | "required": ["census_tract", "county", "id", "lat", "lng", "population", "service_area_id", "zip"] 67 | }, 68 | "additionalProperties": false 69 | } 70 | -------------------------------------------------------------------------------- /terraform/README.md: -------------------------------------------------------------------------------- 1 | # Infrastructure Management 2 | ## Summary 3 | This directory contains configuration files for deploying the Encompass application using AWS resources. This project utilises [Terraform](terraform.io) for managing infrastructure resources. 4 | 5 | _TODO: extract references to Bayes Impact specific IDs so that the repo contains a configuration that can be used by others._ 6 | ## A note on VPCs 7 | When setting up a new environment, it is recommended that you import your existing main VPC: 8 | ```bash 9 | terraform import module.stack.aws_vpc.main 10 | ``` 11 | -------------------------------------------------------------------------------- /terraform/environments/demo/main.tf: -------------------------------------------------------------------------------- 1 | variable "db_password" { 2 | description = "The password to use for TDS DB access" 3 | type = "string" 4 | } 5 | 6 | module "stack" { 7 | source = "../../template" 8 | 9 | env_name = "demo" 10 | db_id = "demo" 11 | instance_name_tag = "demo" 12 | db_password = "${var.db_password}" 13 | ssl_certificate_arn = "arn:aws:acm:us-west-2:951168128976:certificate/8e85a5c7-4540-4fd5-b4e2-913fc085243a" 14 | } 15 | 16 | # Backend definition can't have interpolation, so unfortunately this does need to be 17 | # duplicated between environment definitions. It also needs hardcoded values, hence 18 | # why we can't use the aws_region variable. 19 | terraform { 20 | backend "s3" { 21 | bucket = "encompass-terraform" 22 | key = "demo/terraform.tfstate" 23 | region = "us-west-2" 24 | 25 | # ddb table to hold tfstate locks. 26 | dynamodb_table = "tflock" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /terraform/environments/prod/main.tf: -------------------------------------------------------------------------------- 1 | variable "db_password" { 2 | description = "The password to use for TDS DB access" 3 | type = "string" 4 | } 5 | 6 | locals { 7 | env_name = "prod" 8 | } 9 | 10 | module "stack" { 11 | source = "../../template" 12 | 13 | env_name = "${local.env_name}" 14 | db_id = "time-distance-database" # This name predates the use of terraform. 15 | instance_name_tag = "encompass-${local.env_name}" 16 | db_password = "${var.db_password}" 17 | } 18 | 19 | # Backend definition can't have interpolation, so unfortunately this does need to be 20 | # duplicated between environment definitions. It also needs hardcoded values, hence 21 | # why we can't use the aws_region variable. 22 | terraform { 23 | backend "s3" { 24 | bucket = "encompass-terraform" 25 | key = "prod/terraform.tfstate" 26 | region = "us-west-2" 27 | 28 | # ddb table to hold tfstate locks. 29 | dynamodb_table = "tflock" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /terraform/environments/qa/main.tf: -------------------------------------------------------------------------------- 1 | variable "db_password" { 2 | description = "The password to use for TDS DB access" 3 | type = "string" 4 | } 5 | 6 | locals { 7 | env_name = "qa" 8 | } 9 | 10 | module "stack" { 11 | source = "../../template" 12 | 13 | env_name = "${local.env_name}" 14 | db_id = "encompass-${local.env_name}" 15 | instance_name_tag = "encompass-${local.env_name}" 16 | db_password = "${var.db_password}" 17 | } 18 | 19 | # Backend definition can't have interpolation, so unfortunately this does need to be 20 | # duplicated between environment definitions. It also needs hardcoded values, hence 21 | # why we can't use the aws_region variable. 22 | terraform { 23 | backend "s3" { 24 | bucket = "encompass-terraform" 25 | key = "qa/terraform.tfstate" 26 | region = "us-west-2" 27 | 28 | # ddb table to hold tfstate locks. 29 | dynamodb_table = "tflock" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /terraform/template/variables.tf: -------------------------------------------------------------------------------- 1 | variable "app_ami" { 2 | description = "The AMI ID to use for the application server" 3 | type = "string" 4 | default = "ami-9549f5ed" # TDS Base Image from 2018-1-10 in us-west-2 5 | } 6 | 7 | variable "db_snapshot_id" { 8 | description = "The snapshot ID to use for initialising the DB server" 9 | type = "string" 10 | default = "encompass-prod-2018-02-07" 11 | } 12 | 13 | variable "env_name" { 14 | description = "The environment identifier e.g. prod" 15 | type = "string" 16 | } 17 | 18 | variable "db_id" { 19 | description = "The RDS instance identifier e.g. philip-test" 20 | type = "string" 21 | } 22 | 23 | variable "instance_name_tag" { 24 | description = "The EC2 instance Name tag e.g. na-teddy" 25 | type = "string" 26 | } 27 | 28 | variable "app_security_group_name" { 29 | description = "The security group name to use for app servers" 30 | type = "string" 31 | default = "na_app_sg" 32 | } 33 | 34 | variable "db_security_group_name" { 35 | description = "The security group name to use for DB servers" 36 | type = "string" 37 | default = "na_db_sg" 38 | } 39 | 40 | variable "load_balancer_name" { 41 | # module.stack.aws_lb.na_app_elb: only alphanumeric characters and hyphens allowed in "name" 42 | description = "The ELB name to use for app servers" 43 | type = "string" 44 | default = "na-app-alb" 45 | } 46 | 47 | variable "default_subnets" { 48 | description = "Default subnets in us-west-2 for Bayes Impact default VPC" 49 | type = "list" 50 | default = ["subnet-ac9498ea", "subnet-7a9d531f"] 51 | } 52 | 53 | variable "default_vpc_cidr_block" { 54 | description = "CIDR block for default us-west-2 Bayes Impact VPC" 55 | type = "string" 56 | default = "172.31.0.0/16" 57 | } 58 | 59 | variable "db_password" { 60 | description = "The password to use for TDS DB access" 61 | type = "string" 62 | } 63 | 64 | variable "aws_region" { 65 | description = "The region to use for the AWS provider" 66 | type = "string" 67 | default = "us-west-2" # Default to Oregon 68 | } 69 | 70 | # Default value is for the nonprod wildcard cert. 71 | variable "ssl_certificate_arn" { 72 | description = "ARN for SSL cert to use in LBs" 73 | type = "string" 74 | default = "arn:aws:acm:us-west-2:951168128976:certificate/ff93f488-e4a6-4f0c-b494-755cfdade4ee" 75 | } 76 | -------------------------------------------------------------------------------- /test/performance/README.md: -------------------------------------------------------------------------------- 1 | # Performance Testing Resources 2 | ## Summary 3 | This directory contains resources for testing Encompass using [Apache JMeter](https://jmeter.apache.org). 4 | 5 | ## Scenario 1 6 | This test simulates typical user behaviour through the HTTP interface. Each agent performs the following steps in sequence: 7 | 1. Requests the home page. 8 | 2. Selects a dataset at random and requests its representative points. 9 | 3. Requests the adequacies for that dataset using the driving distance metric. 10 | 4. Idles for 60-90 seconds to simulate playing with the frontend analysis and features. 11 | --------------------------------------------------------------------------------