├── .bumpversion.cfg
├── .dockerignore
├── .editorconfig
├── .env.example
├── .git-blame-ignore-revs
├── .github
├── PULL_REQUEST_TEMPLATE.md
├── release-drafter.yml
└── workflows
│ ├── ci.yaml
│ ├── create-release.yml
│ ├── dependabot-assignments.yml
│ ├── missing_signals.yaml
│ ├── performance-tests.yml
│ ├── release-helper.yml
│ └── update_gdocs_data.yml
├── .gitignore
├── Jenkinsfile
├── LICENSE
├── README.md
├── build.js
├── deploy.json
├── dev
├── docker
│ ├── database
│ │ ├── epidata
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── _init.sql
│ │ └── mysql.d
│ │ │ └── my.cnf
│ ├── python
│ │ ├── Dockerfile
│ │ └── setup.sh
│ └── redis
│ │ └── Dockerfile
└── local
│ ├── .dockerignore
│ ├── Makefile
│ ├── install.sh
│ ├── pyproject.toml
│ └── setup.cfg
├── devops
├── Dockerfile
└── gunicorn_conf.py
├── docs
├── .gitignore
├── 404.html
├── Gemfile
├── Gemfile.lock
├── _config.yml
├── _includes
│ └── head_custom.html
├── about_delphi.md
├── api
│ ├── 01meta.md
│ ├── README.md
│ ├── api_keys.md
│ ├── cdc.md
│ ├── client_libraries.md
│ ├── covid_hosp.md
│ ├── covid_hosp_facility.md
│ ├── covid_hosp_facility_lookup.md
│ ├── covidcast-signals
│ │ ├── _source-template.md
│ │ ├── chng.md
│ │ ├── covid-act-now.md
│ │ ├── covid-trends-and-impact-survey.md
│ │ ├── doctor-visits.md
│ │ ├── dsew-cpr.md
│ │ ├── ght.md
│ │ ├── google-survey.md
│ │ ├── google-symptoms.md
│ │ ├── hhs.md
│ │ ├── hospital-admissions.md
│ │ ├── indicator-combination.md
│ │ ├── jhu-csse.md
│ │ ├── nchs-mortality.md
│ │ ├── nhsn.md
│ │ ├── nssp.md
│ │ ├── quidel.md
│ │ ├── safegraph.md
│ │ ├── usa-facts.md
│ │ └── youtube-survey.md
│ ├── covidcast.md
│ ├── covidcast_changelog.md
│ ├── covidcast_clients.md
│ ├── covidcast_geography.md
│ ├── covidcast_licensing.md
│ ├── covidcast_meta.md
│ ├── covidcast_signals.md
│ ├── covidcast_times.md
│ ├── delphi.md
│ ├── delphi_research_group.md
│ ├── dengue_digital_surveillance.md
│ ├── dengue_nowcast.md
│ ├── digital_surveillance_sensors.md
│ ├── ecdc_ili.md
│ ├── epidata_signals.md
│ ├── flusurv.md
│ ├── fluview.md
│ ├── fluview_clinical.md
│ ├── fluview_meta.md
│ ├── gft.md
│ ├── ght.md
│ ├── ili_nearby_nowcast.md
│ ├── kcdc_ili.md
│ ├── missing_codes.md
│ ├── nidss_dengue.md
│ ├── nidss_flu.md
│ ├── norostat.md
│ ├── norostat_meta.md
│ ├── paho_dengue.md
│ ├── privacy_statement.md
│ ├── quidel.md
│ ├── twitter.md
│ └── wiki.md
├── developer_support.md
├── epidata_development.md
├── favicon.ico
├── index.md
├── new_endpoint_tutorial.md
├── related_work.md
└── symptom-survey
│ ├── COVID Symptom Survey Variable Categories.pdf
│ ├── codebook.csv
│ ├── coding.md
│ ├── collaboration-revision.md
│ ├── contingency-codebook.csv
│ ├── contingency-tables.md
│ ├── data-access.md
│ ├── end-of-survey.md
│ ├── index.md
│ ├── limitations.md
│ ├── modules.md
│ ├── problems.md
│ ├── publications.md
│ ├── screenshots
│ ├── cyberduck-1.png
│ ├── cyberduck-2.png
│ ├── winscp-small1.png
│ └── winscp-small2.png
│ ├── server-access.md
│ ├── survey-files.md
│ ├── survey-utils.R
│ ├── symptom-survey-weights.pdf
│ ├── waves
│ ├── CMU CTIS Wave 12 Full Launch.docx
│ ├── CMU CTIS Wave 12 Full Launch.pdf
│ ├── CMU Survey Wave 10.docx
│ ├── CMU Survey Wave 10.pdf
│ ├── CMU Survey Wave 11.docx
│ ├── CMU Survey Wave 11.pdf
│ ├── CMU Survey Wave 12.docx
│ ├── CMU Survey Wave 12.pdf
│ ├── CMU Survey Wave 6.docx
│ ├── CMU Survey Wave 6.pdf
│ ├── CMU Survey Wave 8.docx
│ ├── CMU Survey Wave 8.pdf
│ ├── CTIS US Wave 13.docx
│ ├── CTIS US Wave 13.pdf
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.docx
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.pdf
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.docx
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.pdf
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.docx
│ ├── Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.pdf
│ ├── Survey_of_COVID-Like_Illness_-_Wave_4.docx
│ ├── Survey_of_COVID-Like_Illness_-_Wave_4.pdf
│ ├── Survey_of_COVID-Like_Illness_-_Wave_4.qsf
│ ├── Survey_of_COVID-Like_Illness_-_Wave_5.docx
│ ├── Survey_of_COVID-Like_Illness_-_Wave_5.pdf
│ ├── Survey_of_COVID-Like_Illness_-_Wave_7.docx
│ └── Survey_of_COVID-Like_Illness_-_Wave_7.pdf
│ └── weights.md
├── integrations
├── __init__.py
├── acquisition
│ ├── covid_hosp
│ │ ├── facility
│ │ │ ├── __init__.py
│ │ │ └── test_scenarios.py
│ │ ├── state_daily
│ │ │ ├── __init__.py
│ │ │ └── test_scenarios.py
│ │ └── state_timeseries
│ │ │ ├── __init__.py
│ │ │ └── test_scenarios.py
│ └── covidcast
│ │ ├── __init__.py
│ │ ├── delete_batch.csv
│ │ ├── test_coverage_crossref_update.py
│ │ ├── test_covidcast_meta_caching.py
│ │ ├── test_csv_uploading.py
│ │ ├── test_db.py
│ │ └── test_delete_batch.py
├── client
│ ├── __init__.py
│ ├── test_delphi_epidata.R
│ ├── test_delphi_epidata.html
│ ├── test_delphi_epidata.js
│ └── test_delphi_epidata.py
├── server
│ ├── __init__.py
│ ├── test_api_keys.py
│ ├── test_cdc.py
│ ├── test_covid_hosp.py
│ ├── test_covidcast.py
│ ├── test_covidcast_endpoints.py
│ ├── test_covidcast_meta.py
│ ├── test_delphi.py
│ ├── test_dengue_nowcast.py
│ ├── test_dengue_sensors.py
│ ├── test_ecdc_ili.py
│ ├── test_flusurv.py
│ ├── test_fluview.py
│ ├── test_fluview_clinical.py
│ ├── test_fluview_meta.py
│ ├── test_gft.py
│ ├── test_ght.py
│ ├── test_kcdc_ili.py
│ ├── test_meta.py
│ ├── test_nidss_dengue.py
│ ├── test_nidss_flu.py
│ ├── test_norostat.py
│ ├── test_nowcast.py
│ ├── test_paho_dengue.py
│ ├── test_quidel.py
│ ├── test_sensors.py
│ ├── test_signal_dashboard.py
│ ├── test_twitter.py
│ └── test_wiki.py
└── test_deploy_syntax.py
├── labels
├── articles.txt
├── cities.txt
├── ecdc_regions.txt
├── flusurv_locations.txt
├── nidss_locations.txt
├── nidss_regions.txt
├── regions.txt
└── states.txt
├── mypy.ini
├── package-lock.json
├── package.json
├── pyproject.toml
├── requirements.api.txt
├── requirements.dev.txt
├── scripts
├── report_missing_covidcast_meta.py
└── signal_spreadsheet_updater.R
├── src
├── acquisition
│ ├── cdcp
│ │ ├── cdc_dropbox_receiver.py
│ │ ├── cdc_extract.py
│ │ └── cdc_upload.py
│ ├── covid_hosp
│ │ ├── README.md
│ │ ├── common
│ │ │ ├── README.md
│ │ │ ├── database.py
│ │ │ ├── network.py
│ │ │ ├── test_utils.py
│ │ │ └── utils.py
│ │ ├── facility
│ │ │ ├── README.md
│ │ │ ├── database.py
│ │ │ ├── network.py
│ │ │ └── update.py
│ │ ├── state_daily
│ │ │ ├── README.md
│ │ │ ├── database.py
│ │ │ ├── network.py
│ │ │ └── update.py
│ │ └── state_timeseries
│ │ │ ├── README.md
│ │ │ ├── database.py
│ │ │ ├── network.py
│ │ │ └── update.py
│ ├── covidcast
│ │ ├── csv_importer.py
│ │ ├── csv_to_database.py
│ │ ├── data_dir_readme.md
│ │ ├── database.py
│ │ ├── file_archiver.py
│ │ └── test_utils.py
│ ├── ecdc
│ │ ├── ecdc_db_update.py
│ │ └── ecdc_ili.py
│ ├── flusurv
│ │ ├── flusurv.py
│ │ └── flusurv_update.py
│ ├── fluview
│ │ ├── README.md
│ │ ├── fluview.py
│ │ ├── fluview_locations.py
│ │ ├── fluview_notify.py
│ │ ├── fluview_update.py
│ │ └── impute_missing_values.py
│ ├── ght
│ │ ├── ght_update.py
│ │ └── google_health_trends.py
│ ├── kcdc
│ │ └── kcdc_update.py
│ ├── nidss
│ │ ├── taiwan_nidss.py
│ │ └── taiwan_update.py
│ ├── paho
│ │ ├── paho_db_update.py
│ │ └── paho_download.py
│ ├── quidel
│ │ ├── quidel.py
│ │ └── quidel_update.py
│ ├── twtr
│ │ ├── healthtweets.py
│ │ ├── pageparser.py
│ │ └── twitter_update.py
│ └── wiki
│ │ ├── __init__.py
│ │ ├── create_wiki_meta_table.sql
│ │ ├── create_wiki_raw_table.sql
│ │ ├── create_wiki_table.sql
│ │ ├── wiki.py
│ │ ├── wiki_download.py
│ │ ├── wiki_extract.py
│ │ ├── wiki_update.py
│ │ └── wiki_util.py
├── client
│ ├── delphi_epidata.R
│ ├── delphi_epidata.d.ts
│ ├── delphi_epidata.js
│ ├── delphi_epidata.py
│ └── packaging
│ │ ├── npm
│ │ ├── .gitignore
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── tests
│ │ │ ├── __snapshots__
│ │ │ └── delphi_epidata.spec.js.snap
│ │ │ └── delphi_epidata.spec.js
│ │ └── pypi
│ │ ├── .bumpversion.cfg
│ │ ├── .gitignore
│ │ ├── CHANGELOG.md
│ │ ├── LICENSE
│ │ ├── README.md
│ │ ├── delphi_epidata
│ │ └── __init__.py
│ │ └── pyproject.toml
├── common
│ ├── covidcast_row.py
│ └── integration_test_base_class.py
├── ddl
│ ├── api_user.sql
│ ├── cdc.sql
│ ├── covid_hosp.sql
│ ├── ecdc_ili.sql
│ ├── fluview.sql
│ ├── forecasts.sql
│ ├── gft.sql
│ ├── ght.sql
│ ├── kcdc_ili.sql
│ ├── migrations
│ │ ├── covid_hosp_facility_v0.2-v0.3.sql
│ │ ├── covid_hosp_facility_v0.3-v0.3.1.sql
│ │ ├── covid_hosp_meta_v0.4.4-v0.4.5.sql
│ │ ├── covid_hosp_state_v0.2-v0.3.sql
│ │ ├── fluview_clinical_v0.1.sql
│ │ └── v4_renaming.sql
│ ├── nidss.sql
│ ├── nowcasts.sql
│ ├── paho_dengue.sql
│ ├── quidel.sql
│ ├── sensors.sql
│ ├── signal_dashboard.sql
│ ├── twitter.sql
│ ├── v4_schema.sql
│ ├── v4_schema_aliases.sql
│ └── wiki.sql
├── maintenance
│ ├── coverage_crossref_updater.py
│ ├── covidcast_meta_cache_updater.py
│ ├── delete_batch.py
│ ├── remove_outdated_keys.py
│ ├── signal_dash_data_generator.py
│ └── update_last_usage.py
└── server
│ ├── .htaccess
│ ├── README.md
│ ├── _common.py
│ ├── _config.py
│ ├── _db.py
│ ├── _exceptions.py
│ ├── _limiter.py
│ ├── _pandas.py
│ ├── _params.py
│ ├── _printer.py
│ ├── _query.py
│ ├── _security.py
│ ├── _validate.py
│ ├── admin
│ ├── __init__.py
│ ├── api_key_form_script.js
│ ├── models.py
│ └── templates
│ │ └── index.html
│ ├── covidcast_issues_migration
│ ├── .gitignore
│ ├── proc_db_backups_pd.py
│ └── requirements.txt
│ ├── endpoints
│ ├── __init__.py
│ ├── admin.py
│ ├── cdc.py
│ ├── covid_hosp_facility.py
│ ├── covid_hosp_facility_lookup.py
│ ├── covid_hosp_state_timeseries.py
│ ├── covidcast.py
│ ├── covidcast_meta.py
│ ├── covidcast_utils
│ │ ├── __init__.py
│ │ ├── dashboard_signals.py
│ │ ├── db_signals.csv
│ │ ├── db_sources.csv
│ │ ├── descriptions.raw.txt
│ │ ├── meta.py
│ │ ├── model.py
│ │ ├── questions.raw.txt
│ │ └── trend.py
│ ├── delphi.py
│ ├── dengue_nowcast.py
│ ├── dengue_sensors.py
│ ├── ecdc_ili.py
│ ├── flusurv.py
│ ├── fluview.py
│ ├── fluview_clinicial.py
│ ├── fluview_meta.py
│ ├── gft.py
│ ├── ght.py
│ ├── ilinet.py
│ ├── kcdc_ili.py
│ ├── meta.py
│ ├── meta_norostat.py
│ ├── nidss_dengue.py
│ ├── nidss_flu.py
│ ├── norostat.py
│ ├── nowcast.py
│ ├── paho_dengue.py
│ ├── quidel.py
│ ├── sensors.py
│ ├── signal_dashboard_coverage.py
│ ├── signal_dashboard_status.py
│ ├── twitter.py
│ └── wiki.py
│ ├── index.html
│ ├── main.py
│ ├── simulate_api_response.py
│ └── utils
│ ├── __init__.py
│ └── dates.py
├── tasks.py
├── testdata
└── acquisition
│ └── covid_hosp
│ ├── common
│ ├── metadata.csv
│ └── metadata.json
│ ├── facility
│ ├── dataset.csv
│ ├── dataset_update_facility.csv
│ ├── metadata.csv
│ └── metadata_update_facility.csv
│ ├── state_daily
│ ├── dataset.csv
│ ├── metadata.csv
│ └── metadata2.csv
│ └── state_timeseries
│ ├── dataset.csv
│ ├── dataset2.csv
│ ├── metadata.csv
│ └── metadata2.csv
└── tests
├── acquisition
├── covid_hosp
│ ├── common
│ │ ├── __init__.py
│ │ ├── test_database.py
│ │ ├── test_network.py
│ │ └── test_utils.py
│ ├── facility
│ │ ├── __init__.py
│ │ ├── test_database.py
│ │ ├── test_network.py
│ │ └── test_update.py
│ ├── state_daily
│ │ ├── __init__.py
│ │ ├── test_database.py
│ │ ├── test_network.py
│ │ └── test_update.py
│ └── state_timeseries
│ │ ├── __init__.py
│ │ ├── test_database.py
│ │ ├── test_network.py
│ │ └── test_update.py
├── covidcast
│ ├── __init__.py
│ ├── test_csv_importer.py
│ ├── test_csv_to_database.py
│ ├── test_database.py
│ └── test_file_archiver.py
├── flusurv
│ ├── __init__.py
│ ├── test_flusurv.py
│ └── test_flusurv_update.py
└── fluview
│ ├── __init__.py
│ └── test_impute_missing_values.py
├── client
├── __init__.py
└── test_delphi_epidata.py
├── common
└── test_covidcast_row.py
├── maintenance
├── __init__.py
├── test_covidcast_meta_cache_updater.py
└── test_signal_dash_data_generator.py
└── server
├── __init__.py
├── dev_test_granular_sensor_authentication.py
├── endpoints
├── covidcast_utils
│ └── test_trend.py
├── test_covidcast.py
└── test_nidss_flu.py
├── test_exceptions.py
├── test_pandas.py
├── test_params.py
├── test_query.py
├── test_security.py
├── test_validate.py
└── utils
└── test_dates.py
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 4.1.33
3 | commit = False
4 | tag = False
5 |
6 | [bumpversion:file:src/server/_config.py]
7 |
8 | [bumpversion:file:src/client/delphi_epidata.js]
9 |
10 | [bumpversion:file:src/client/delphi_epidata.R]
11 |
12 | [bumpversion:file:src/client/packaging/npm/package.json]
13 |
14 | [bumpversion:file:dev/local/setup.cfg]
15 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | /delphi-epidata
2 | **/.mypy_cache
3 | /.github
4 | /docs
5 | **/__pycache__
6 | **/.pytest_cache
7 | **/node_modules
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig helps developers define and maintain consistent
2 | # coding styles between different editors and IDEs
3 | # editorconfig.org
4 |
5 | root = true
6 |
7 | [*]
8 | # We recommend you to keep these unchanged
9 | end_of_line = lf
10 | charset = utf-8
11 | trim_trailing_whitespace = true
12 | insert_final_newline = true
13 |
14 |
15 | [*.py]
16 | # Change these settings to your own preference
17 | indent_style = space
18 | indent_size = 4
19 |
20 |
21 | [*.md]
22 | trim_trailing_whitespace = false
23 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | FLASK_DEBUG=True
2 | SQLALCHEMY_DATABASE_URI=sqlite:///test.db
3 | FLASK_SECRET=abc
4 | #API_KEY_REQUIRED_STARTING_AT=2021-07-30
5 | API_KEY_ADMIN_PASSWORD=abc
6 | API_KEY_REGISTER_WEBHOOK_TOKEN=abc
7 |
8 | # Sentry
9 | # If setting a Sentry DSN, note that the URL should NOT be quoted!
10 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # style(black): format cdc acquisition
2 | 980b0b7e80c7923b79e14fee620645e680785703
3 | # style(black): format ecdc acquisition
4 | d1141d904da4e62992b97c92d5caebd8fadffd42
5 | # style(black): format flusurv acquisition
6 | 08af0f6b7bff85bbc2b193b63b5abf6a16ba03e4
7 | # style(black): format fluview acquisition
8 | 0133ef2042c4df8867e91595eb1f64873edb4632
9 | # style(black): format ght acquisition
10 | b8900a0bc846888885310911efd6e26459effa99
11 | # style(black): format kcdc acquisition
12 | a849384c884934b3b7c3c67b68aa6240277d6b6d
13 | # style(black): format nidss acquisition
14 | d04af3c02fda7708a16bec0952b1aa7475acaec7
15 | # style(black): format paho acquisition
16 | 7f60fbba572c1b6e5153a9ef216895bdc2f7f5b3
17 | # style(black): format quidel acquisition
18 | b9ceb400d9248c8271e8342275664ac5524e335d
19 | # style(black): format twitter acquisition
20 | 07ed83e5768f717ab0f9a62a9209e4e2cffa058d
21 | # style(black): format wiki acquisition
22 | 923852eafa86b8f8b182d499489249ba8f815843
23 | # lint: trailing whitespace changes
24 | 81179c5f144b8f25421e799e823e18cde43c84f9
25 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | addresses issue(s) #ISSUE
2 |
3 | ### Summary:
4 |
5 |
8 |
9 | ### Prerequisites:
10 |
11 | - [ ] Unless it is a documentation hotfix it should be merged against the `dev` branch
12 | - [ ] Branch is up-to-date with the branch to be merged with, i.e. `dev`
13 | - [ ] Build is successful
14 | - [ ] Code is cleaned up and formatted
15 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name-template: "v$RESOLVED_VERSION"
2 | tag-template: "v$RESOLVED_VERSION"
3 | categories:
4 | - title: "🚀 API Changes"
5 | labels:
6 | - "api change"
7 | - title: "🚀 Python Client Changes"
8 | labels:
9 | - "python client"
10 | - title: "🚀 R Client Changes"
11 | labels:
12 | - "r client"
13 | - title: "🚀 JavaScript Client Changes"
14 | labels:
15 | - "js client"
16 | - "javascript"
17 | - title: "📕 Documentation"
18 | labels:
19 | - "documentation"
20 | - title: "🧰 Development"
21 | labels:
22 | - "chore"
23 | - "documentation"
24 | - "dependencies"
25 | - "acquisition"
26 | change-template: "- #$NUMBER $TITLE"
27 | change-title-escapes: '\<*_&`#@'
28 | template: |
29 | $CHANGES
30 |
31 | Thanks to $CONTRIBUTORS
32 |
--------------------------------------------------------------------------------
/.github/workflows/dependabot-assignments.yml:
--------------------------------------------------------------------------------
1 | name: Dependabot auto-assign reviewer
2 | on: pull_request
3 |
4 | permissions:
5 | pull-requests: write
6 |
7 | jobs:
8 | dependabot:
9 | runs-on: ubuntu-latest
10 | env:
11 | GH_TOKEN: ${{ secrets.CMU_DELPHI_AUTOMATION_MACHINE_DEPENDABOT_PAT }}
12 | if: ${{ github.actor == 'dependabot[bot]' }}
13 | steps:
14 | - name: Assign team to PR
15 | run: gh pr edit "$PR_URL" --add-reviewer "cmu-delphi/code-reviewers"
16 | env:
17 | PR_URL: ${{github.event.pull_request.html_url}}
18 |
--------------------------------------------------------------------------------
/.github/workflows/missing_signals.yaml:
--------------------------------------------------------------------------------
1 | name: Missing Signal Detector
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | build:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - name: Check out code
11 | uses: actions/checkout@v2
12 | with:
13 | ref: dev
14 | - name: Set up Python 3.8
15 | uses: actions/setup-python@v2
16 | with:
17 | python-version: 3.8
18 | - name: Install Dependencies
19 | run: pip install requests pandas
20 | - name: Run Missing Signals Detector
21 | run: python scripts/report_missing_covidcast_meta.py
22 | - name: Upload Missing Artifact
23 | if: failure()
24 | uses: actions/upload-artifact@v4
25 | with:
26 | name: missing_db_signals.csv
27 | path: missing_db_signals.csv
28 |
29 |
--------------------------------------------------------------------------------
/.github/workflows/update_gdocs_data.yml:
--------------------------------------------------------------------------------
1 | name: Update Google Docs Meta Data
2 | on:
3 | workflow_dispatch:
4 | jobs:
5 | update_gdocs:
6 | runs-on: ubuntu-latest
7 | steps:
8 | - name: Check out code
9 | uses: actions/checkout@v2
10 | with:
11 | branch: dev
12 | ssh-key: ${{ secrets.CMU_DELPHI_DEPLOY_MACHINE_SSH }}
13 | - name: Set up Python 3.8
14 | uses: actions/setup-python@v2
15 | with:
16 | python-version: 3.8
17 | - uses: actions/cache@v4
18 | with:
19 | path: ~/.cache/pip
20 | key: ${{ runner.os }}-pipd-${{ hashFiles('requirements.dev.txt') }}
21 | restore-keys: |
22 | ${{ runner.os }}-pipd-
23 | - name: Install Dependencies
24 | run: |
25 | pip -V
26 | python -m pip install pip==22.0.2
27 | pip install -r requirements.dev.txt
28 | - name: Update Docs
29 | run: inv update-gdoc
30 | - name: Create pull request into dev
31 | uses: peter-evans/create-pull-request@v3
32 | with:
33 | branch: bot/update-docs
34 | commit-message: 'chore: update docs'
35 | title: Update Google Docs Meta Data
36 | labels: chore
37 | reviewers: melange396
38 | assignees: melange396
39 | body: |
40 | Updating Google Docs Meta Data
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.pyc
3 | *~
4 | \#*#
5 | .DS_Store
6 | /.vscode
7 | /delphi-epidata
8 | /.env
9 | *.db
10 | /build
11 | /node_modules
12 | .mypy_cache
13 | /missing_db_signals.csv
14 |
15 |
16 |
17 | ##Added by Tina to avoid tracking R studio config files
18 | # History files
19 | .Rhistory
20 | .Rapp.history
21 |
22 | # Session Data files
23 | .RData
24 | .RDataTmp
25 |
26 | # User-specific files
27 | .Ruserdata
28 |
29 | # RStudio files
30 | .Rproj.user/
31 |
32 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3
33 | .httr-oauth
34 |
35 | # knitr and R markdown default cache directories
36 | *_cache/
37 | /cache/
38 |
39 | # R Environment Variables
40 | .Renviron
41 |
42 | # RStudio Connect folder
43 | rsconnect/
44 |
45 | *.Rproj
46 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | #!groovy
2 |
3 | // import shared library: https://github.com/cmu-delphi/jenkins-shared-library
4 | @Library('jenkins-shared-library') _
5 |
6 | pipeline {
7 |
8 | agent any
9 |
10 | stages {
11 |
12 | stage('Deploy to AWS') {
13 | when {
14 | branch "main"
15 | }
16 | steps {
17 | sh "echo This is a temporary no-op. A Jenkins job called \
18 | deploy-epidata-api-stack-aws is independently \
19 | configured to run whenever this pipeline stage \
20 | executes."
21 | }
22 | }
23 | }
24 |
25 | post {
26 | always {
27 | script {
28 | slackNotifier(currentBuild.currentResult)
29 | }
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2018 The Delphi Group at Carnegie Mellon University
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/build.js:
--------------------------------------------------------------------------------
1 | const { process, minimizeJs, rename } = require('delphi-cmu-buildtools');
2 |
3 | Promise.all([
4 | process('*.+(coffee|js|py|R)', [], { srcDir: './src/client', dstDir: './build/lib' }),
5 | process('*.js', [minimizeJs(), rename((f) => f.replace('.js', '.min.js'))], { srcDir: './src/client', dstDir: './build/lib' }),
6 | ]).then((r) => console.log(r.flat()));
--------------------------------------------------------------------------------
/dev/docker/database/epidata/Dockerfile:
--------------------------------------------------------------------------------
1 | # start with a standard percona mysql image
2 | FROM percona:ps-8
3 |
4 | # percona exits with the mysql user but we need root for additional setup
5 | USER root
6 |
7 | # use delphi's timezome
8 | RUN ln -s -f /usr/share/zoneinfo/America/New_York /etc/localtime
9 |
10 | # specify a development-only password for the database user "root"
11 | ENV MYSQL_ROOT_PASSWORD pass
12 |
13 | # create the `epidata` database
14 | ENV MYSQL_DATABASE epidata
15 |
16 | # create the `user` account with a development-only password
17 | ENV MYSQL_USER user
18 | ENV MYSQL_PASSWORD pass
19 |
20 | # provide DDL which will configure dev environment at container startup
21 | COPY repos/delphi/delphi-epidata/dev/docker/database/epidata/_init.sql /docker-entrypoint-initdb.d/
22 |
23 | # provide DDL which will create empty tables at container startup
24 | COPY repos/delphi/delphi-epidata/src/ddl/*.sql /docker-entrypoint-initdb.d/
25 |
26 | # provide additional configuration needed for percona
27 | COPY repos/delphi/delphi-epidata/dev/docker/database/mysql.d/*.cnf /etc/my.cnf.d/
28 |
29 | # grant access to SQL scripts
30 | RUN chmod o+r /docker-entrypoint-initdb.d/*.sql
31 |
32 | # restore mysql user for percona
33 | USER mysql
34 |
--------------------------------------------------------------------------------
/dev/docker/database/epidata/README.md:
--------------------------------------------------------------------------------
1 | # `delphi_database_epidata`
2 |
3 | This image extends a Percona database by:
4 |
5 | - adding the `user` account
6 | - adding the `epidata` & other appropriate databases
7 | - creating empty tables in those databases
8 |
9 | To start a container from this image, run:
10 |
11 | ```bash
12 | docker run --rm -p 13306:3306 \
13 | --network delphi-net --name delphi_database_epidata \
14 | delphi_database_epidata
15 | ```
16 |
17 | For debugging purposes, you can interactively connect to the database inside
18 | the container using a `mysql` client (either installed locally or supplied via
19 | a docker image) like this:
20 |
21 | ```bash
22 | mysql --user=user --password=pass --port 13306 --host 127.0.0.1 epidata
23 | ```
24 |
25 | Note that using host `localhost` may fail on some platforms as mysql will
26 | attempt, and fail, to use a Unix socket. Using `127.0.0.1`, which implies
27 | TCP/IP, works instead.
28 |
--------------------------------------------------------------------------------
/dev/docker/database/epidata/_init.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE covid;
2 | GRANT ALL ON covid.* TO 'user';
3 |
--------------------------------------------------------------------------------
/dev/docker/database/mysql.d/my.cnf:
--------------------------------------------------------------------------------
1 | [mysqld]
2 | default_authentication_plugin=mysql_native_password
--------------------------------------------------------------------------------
/dev/docker/python/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8-buster
2 |
3 | RUN apt-get update && apt-get install -y r-base && Rscript -e "install.packages(c('httr','xml2'))"
4 |
5 | WORKDIR /usr/src/app
6 |
7 | COPY repos repos
8 | COPY repos/delphi/delphi-epidata/dev/docker/python/setup.sh .
9 |
10 | RUN ln -s -f /usr/share/zoneinfo/America/New_York /etc/localtime && \
11 | chmod -R o+r repos/ && \
12 | bash setup.sh && \
13 | pip install --no-cache-dir -r repos/delphi/delphi-epidata/requirements.api.txt -r repos/delphi/delphi-epidata/requirements.dev.txt
14 |
--------------------------------------------------------------------------------
/dev/docker/python/setup.sh:
--------------------------------------------------------------------------------
1 | # This script sets up the correct directory structure within the `delphi_img`
2 | # docker image.
3 |
4 | # Some notes on package structure:
5 | # - Python package names can't contain hyphens, so hyphens in repo names are
6 | # replaced with underscores in the package hierarchy. (An exception is the
7 | # repo `delphi-epidata`, which is renamed to simply `epidata`.)
8 | # - Repos are organized such that the main code for the package is inside of
9 | # a `src/` directory. When deployed, `src/` is elided. (An exception is the
10 | # legacy `undef-analysis` repo, which has sources at the top-level.)
11 |
12 | # bail if anything fails
13 | set -e
14 |
15 | # create python package `undefx`
16 | mkdir undefx
17 | mv repos/undefx/py3tester/src undefx/py3tester
18 | mv repos/undefx/undef-analysis undefx/undef_analysis
19 |
20 | # create python package `delphi`
21 | mkdir delphi
22 | mv repos/delphi/operations/src delphi/operations
23 | mv repos/delphi/utils/src delphi/utils
24 | mv repos/delphi/github-deploy-repo/src delphi/github_deploy_repo
25 | mv repos/delphi/delphi-epidata/src delphi/epidata
26 | mv repos/delphi/flu-contest/src delphi/flu_contest
27 | mv repos/delphi/nowcast/src delphi/nowcast
28 |
--------------------------------------------------------------------------------
/dev/docker/redis/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM redis
2 |
3 | ENV REDIS_PASSWORD=$REDIS_PASSWORD
4 |
5 | CMD ["sh", "-c", "exec redis-server --requirepass \"$REDIS_PASSWORD\""]
--------------------------------------------------------------------------------
/dev/local/.dockerignore:
--------------------------------------------------------------------------------
1 | # Ignore everything by default
2 | *
3 | # Don't ignore repos dir
4 | !repos
5 | # Ignore everything to do with git
6 | **/*.git
7 |
--------------------------------------------------------------------------------
/dev/local/install.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Bootstrap delphi-epidata development
3 | #
4 | # Downloads the repos needed for local delphi-epidata development into current dir
5 | # and provides a Makefile with Docker control commands
6 | # as well as pyproject/setup.cfg files for IDE mappings.
7 | #
8 | # Creates the directory structure:
9 | #
10 | # driver/
11 | # .dockerignore
12 | # Makefile
13 | # repos/
14 | # pyproject.toml
15 | # setup.cfg
16 | # delphi/
17 | # operations/
18 | # delphi-epidata/
19 | # utils/
20 | # flu-contest/
21 | # nowcast/
22 | # github-deploy-repo/
23 | # undefx/
24 | # py3tester/
25 | # undef-analysis/
26 | #
27 | # Leaves you in driver, the main workdir.
28 | #
29 |
30 |
31 | mkdir -p driver/repos/delphi
32 | cd driver/repos/delphi
33 | git clone https://github.com/cmu-delphi/operations
34 | git clone https://github.com/cmu-delphi/delphi-epidata
35 | git clone https://github.com/cmu-delphi/utils
36 | git clone https://github.com/cmu-delphi/flu-contest
37 | git clone https://github.com/cmu-delphi/nowcast
38 | git clone https://github.com/cmu-delphi/github-deploy-repo
39 | cd ../../
40 |
41 | mkdir -p repos/undefx
42 | cd repos/undefx
43 | git clone https://github.com/undefx/py3tester
44 | git clone https://github.com/undefx/undef-analysis
45 | cd ../../
46 |
47 | ln -s repos/delphi/delphi-epidata/dev/local/Makefile
48 | ln -s repos/delphi/delphi-epidata/dev/local/.dockerignore
49 | cd repos
50 | ln -s delphi/delphi-epidata/dev/local/pyproject.toml
51 | ln -s delphi/delphi-epidata/dev/local/setup.cfg
52 | cd -
53 |
--------------------------------------------------------------------------------
/dev/local/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=65", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
--------------------------------------------------------------------------------
/devops/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:lts-buster AS builder
2 | WORKDIR /src
3 | COPY . /src
4 | RUN npm ci && npm run build
5 |
6 | FROM tiangolo/meinheld-gunicorn:python3.8
7 | LABEL org.opencontainers.image.source=https://github.com/cmu-delphi/delphi-epidata
8 |
9 | COPY ./devops/gunicorn_conf.py /app
10 | RUN mkdir -p /app/delphi/epidata
11 | COPY ./src/server /app/delphi/epidata/server
12 | COPY ./src/common /app/delphi/epidata/common
13 | COPY --from=builder ./src/build/lib/ /app/delphi/epidata/lib/
14 |
15 | COPY requirements.api.txt /app/requirements_also.txt
16 |
17 | RUN ln -s -f /usr/share/zoneinfo/America/New_York /etc/localtime \
18 | && rm -rf /app/delphi/epidata/__pycache__ \
19 | && chmod -R o+r /app/delphi/epidata \
20 | && pip install --no-cache-dir -r /tmp/requirements.txt -r requirements_also.txt
21 | # the file /tmp/requirements.txt is created in the parent docker definition. (see:
22 | # https://github.com/tiangolo/meinheld-gunicorn-docker/blob/master/docker-images/python3.8.dockerfile#L5 )
23 | # this combined requirements installation ensures all version constrants are accounted for.
24 |
25 | # disable python stdout buffering
26 | ENV PYTHONUNBUFFERED 1
27 |
28 | ENTRYPOINT [ "/entrypoint.sh" ]
29 | CMD [ "/start.sh" ]
30 |
--------------------------------------------------------------------------------
/devops/gunicorn_conf.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import json
4 | import multiprocessing
5 | import os
6 |
7 | workers_per_core_str = os.getenv("WORKERS_PER_CORE", "2")
8 | web_concurrency_str = os.getenv("WEB_CONCURRENCY", None)
9 | host = os.getenv("HOST", "0.0.0.0")
10 | port = os.getenv("PORT", "80")
11 | bind_env = os.getenv("BIND", None)
12 | use_loglevel = os.getenv("LOG_LEVEL", "info")
13 | if bind_env:
14 | use_bind = bind_env
15 | else:
16 | use_bind = "{host}:{port}".format(host=host, port=port)
17 |
18 | cores = multiprocessing.cpu_count()
19 | workers_per_core = float(workers_per_core_str)
20 | default_web_concurrency = workers_per_core * cores
21 | if web_concurrency_str:
22 | web_concurrency = int(web_concurrency_str)
23 | assert web_concurrency > 0
24 | else:
25 | web_concurrency = int(default_web_concurrency)
26 |
27 | # Gunicorn config variables
28 | loglevel = use_loglevel
29 | workers = web_concurrency
30 | bind = use_bind
31 | keepalive = 120
32 | timeout = 300
33 |
34 | errorlog = "-"
35 | accesslog = "-"
36 | capture_output = True
37 | enable_stdio_inheritance = True
38 |
39 | # disable limit request line till 414 issue is resolved
40 | limit_request_line = 0
41 | limit_request_field_size = 0
42 |
43 |
44 | # For debugging and testing
45 | log_data = {
46 | "loglevel": loglevel,
47 | "workers": workers,
48 | "bind": bind,
49 | # Additional, non-gunicorn variables
50 | "workers_per_core": workers_per_core,
51 | "host": host,
52 | "port": port,
53 | }
54 | print(json.dumps(log_data), flush=True)
55 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _site
2 | .sass-cache
3 | .jekyll-metadata
4 |
--------------------------------------------------------------------------------
/docs/404.html:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | ---
4 |
5 |
18 |
19 |
20 |
404
21 |
22 |
Page not found :(
23 |
The requested page could not be found.
24 |
25 |
--------------------------------------------------------------------------------
/docs/Gemfile:
--------------------------------------------------------------------------------
1 | source "https://rubygems.org"
2 |
3 | # Hello! This is where you manage which Jekyll version is used to run.
4 | # When you want to use a different version, change it below, save the
5 | # file and run `bundle install`. Run Jekyll with `bundle exec`, like so:
6 | #
7 | # bundle exec jekyll serve
8 | #
9 | # This will help ensure the proper Jekyll version is running.
10 | # Happy Jekylling!
11 | #gem "jekyll", "~> 3.8.7"
12 |
13 | # This is the default theme for new Jekyll sites. You may change this to anything you like.
14 | gem "minima", "~> 2.0"
15 |
16 | # If you want to use GitHub Pages, remove the "gem "jekyll"" above and
17 | # uncomment the line below. To upgrade, run `bundle update github-pages`.
18 | gem "github-pages", group: :jekyll_plugins
19 |
20 | # If you have any plugins, put them here!
21 | group :jekyll_plugins do
22 | gem "jekyll-feed", "~> 0.6"
23 | end
24 |
25 | # Windows does not include zoneinfo files, so bundle the tzinfo-data gem
26 | # and associated library.
27 | install_if -> { RUBY_PLATFORM =~ %r!mingw|mswin|java! } do
28 | gem "tzinfo", "~> 1.2"
29 | gem "tzinfo-data"
30 | end
31 |
32 | # Performance-booster for watching directories on Windows
33 | gem "wdm", "~> 0.1.0", :install_if => Gem.win_platform?
34 |
35 |
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | # Welcome to Jekyll!
2 | #
3 | # This config file is meant for settings that affect your whole blog, values
4 | # which you are expected to set up once and rarely edit after that. If you find
5 | # yourself editing this file very often, consider using Jekyll's data files
6 | # feature for the data you need to update frequently.
7 | #
8 | # For technical reasons, this file is *NOT* reloaded automatically when you use
9 | # 'bundle exec jekyll serve'. If you change this file, please restart the server process.
10 |
11 | # Site settings
12 | # These are used to personalize your new site. If you look in the HTML files,
13 | # you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
14 | # You can create any custom variable you would like, and they will be accessible
15 | # in the templates via {{ site.myvariable }}.
16 | title: Delphi Epidata API
17 | email: your-email@example.com
18 | description: >- # this means to ignore newlines until "baseurl:"
19 | Documentation for the Delphi research group\'s Epidata API, providing access
20 | to curated streams of epidemic data and forecasts.
21 | baseurl: "/delphi-epidata" # the subpath of your site, e.g. /blog
22 | url: "" # the base hostname & protocol for your site, e.g. http://example.com
23 | twitter_username: jekyllrb
24 | github_username: jekyll
25 |
26 | # Build settings
27 | markdown: kramdown
28 | plugins:
29 | - jekyll-relative-links
30 |
31 | remote_theme: pmarsceill/just-the-docs
32 |
33 | ## Just the Docs config
34 | # The theme compresses HTML (remove newlines, whitespace) by default, but this
35 | # breaks KaTeX, since kramdown inserts % in some places in the output.
36 | compress_html:
37 | ignore:
38 | envs: "all"
39 |
40 | aux_links:
41 | "CMU Delphi Research Group":
42 | - "https://delphi.cmu.edu/"
43 |
44 | footer_content: "Copyright © 2020, Delphi Research Group at Carnegie Mellon University"
45 |
46 | heading_anchors: true
47 |
48 | favicon_ico: "favicon.ico"
49 | # Exclude from processing.
50 | # The following items will not be processed, by default. Create a custom list
51 | # to override the default setting.
52 | # exclude:
53 | # - Gemfile
54 | # - Gemfile.lock
55 | # - node_modules
56 | # - vendor/bundle/
57 | # - vendor/cache/
58 | # - vendor/gems/
59 | # - vendor/ruby/
60 |
--------------------------------------------------------------------------------
/docs/_includes/head_custom.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
5 |
--------------------------------------------------------------------------------
/docs/about_delphi.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: About Delphi
3 | nav_order: 60
4 | has_children: false
5 | ---
6 |
7 | # About The Delphi Group
8 |
9 | The Delphi Group's mission is to develop the theory and practice of epidemic detection, tracking and forecasting, and their use in decision making, both public and private.
10 |
11 | Our vision is to make this technology as useful as weather forecasting is today.
12 |
13 | Find out more about the Delphi Research Group on our offical webpage.
14 | Please read our blog for additional news, findings, and musings from the team.
15 |
16 |
--------------------------------------------------------------------------------
/docs/api/cdc.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive CDC
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | ---
7 |
8 | # CDC
9 |
10 | This is the API documentation for accessing the CDC (`cdc`) endpoint of
11 | [Delphi](https://delphi.cmu.edu/)'s epidemiological data.
12 |
13 | General topics not specific to any particular endpoint are discussed in the
14 | [API overview](README.md). Such topics include:
15 | [contributing](README.md#contributing), [citing](README.md#citing), and
16 | [data licensing](README.md#data-licensing).
17 |
18 | ## CDC Data
19 |
20 | ...
21 |
22 | # The API
23 |
24 | The base URL is: https://api.delphi.cmu.edu/epidata/cdc/
25 |
26 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
27 |
28 | ## Parameters
29 |
30 | ### Required
31 |
32 | | Parameter | Description | Type |
33 | | --- | --- | --- |
34 | | `auth` | password | string |
35 | | `epiweeks` | epiweeks | `list` of epiweeks |
36 | | `locations` | locations | `list` of [region](https://github.com/cmu-delphi/delphi-epidata/blob/main/labels/regions.txt)/[state](https://github.com/cmu-delphi/delphi-epidata/blob/main/labels/states.txt) labels |
37 |
38 | ## Response
39 |
40 | | Field | Description | Type |
41 | |-----------|-----------------------------------------------------------------|------------------|
42 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
43 | | `epidata` | list of results | array of objects |
44 | | ... | ... | ... |
45 | | `message` | `success` or error message | string |
46 |
47 | # Example URLs
48 |
49 |
50 |
51 | # Code Samples
52 |
53 |
54 |
--------------------------------------------------------------------------------
/docs/api/covidcast_clients.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: API Clients
3 | parent: Main Endpoint (COVIDcast)
4 | nav_order: 1
5 | ---
6 |
7 | # Main Endpoint API Clients
8 |
9 | To access COVIDcast data, we recommend using the following client libraries:
10 |
11 | - R: [epidatr](https://cmu-delphi.github.io/epidatr/)
12 | - Python: [epidatpy](https://cmu-delphi.github.io/epidatpy/) (recommended) and [delphi-epidata](https://pypi.org/project/delphi-epidata/),
13 |
14 | These packages provide a convenient way to obtain COVIDcast data as a data frame
15 | ready to be used in further analyses and provide convenient mapping and analysis
16 | functions. For installation instructions and examples, consult their respective
17 | webpages.
18 |
19 | To access Epidata datasets on other diseases, visit our
20 | [Epidata API Client Libraries](client_libraries.md) documentation.
21 |
--------------------------------------------------------------------------------
/docs/api/delphi_research_group.md:
--------------------------------------------------------------------------------
1 | https://delphi.cmu.edu/
--------------------------------------------------------------------------------
/docs/api/dengue_digital_surveillance.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive Dengue Digital Surveillance
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | permalink: api/dengue_sensors.html
7 | ---
8 |
9 | # Dengue Digital Surveillance Sensors
10 |
11 | This is the API documentation for accessing the Dengue Digital Surveillance
12 | Sensors (`dengue_sensors`) endpoint of [Delphi](https://delphi.cmu.edu/)'s
13 | epidemiological data.
14 |
15 | General topics not specific to any particular endpoint are discussed in the
16 | [API overview](README.md). Such topics include:
17 | [contributing](README.md#contributing), [citing](README.md#citing), and
18 | [data licensing](README.md#data-licensing).
19 |
20 | ## Delphi's Dengue Digital Surveillance Sensors Data
21 |
22 | ...
23 |
24 | # The API
25 |
26 | The base URL is: https://api.delphi.cmu.edu/epidata/dengue_sensors/
27 |
28 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
29 |
30 | ## Parameters
31 |
32 | ### Required
33 |
34 |
35 |
36 | ## Response
37 |
38 | | Field | Description | Type |
39 | |-----------|-----------------------------------------------------------------|------------------|
40 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
41 | | `epidata` | list of results | array of objects |
42 | | ... | ... | ... |
43 | | `message` | `success` or error message | string |
44 |
45 | # Example URLs
46 |
47 |
48 |
49 | # Code Samples
50 |
51 |
52 |
--------------------------------------------------------------------------------
/docs/api/dengue_nowcast.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive Dengue Nowcast
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | ---
7 |
8 | # Delphi's Dengue Nowcast
9 |
10 | This is the API documentation for accessing the Dengue Nowcast
11 | (`dengue_nowcast`) endpoint of [Delphi](https://delphi.cmu.edu/)'s
12 | epidemiological data.
13 |
14 | General topics not specific to any particular endpoint are discussed in the
15 | [API overview](README.md). Such topics include:
16 | [contributing](README.md#contributing), [citing](README.md#citing), and
17 | [data licensing](README.md#data-licensing).
18 |
19 | ## Delphi's Dengue Nowcast Data
20 |
21 | ...
22 |
23 | # The API
24 |
25 | The base URL is: https://api.delphi.cmu.edu/epidata/dengue_nowcast/
26 |
27 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
28 |
29 | ## Parameters
30 |
31 | ### Required
32 |
33 |
34 |
35 | ## Response
36 |
37 | | Field | Description | Type |
38 | |-----------|-----------------------------------------------------------------|------------------|
39 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
40 | | `epidata` | list of results | array of objects |
41 | | ... | ... | ... |
42 | | `message` | `success` or error message | string |
43 |
44 | # Example URLs
45 |
46 |
47 |
48 | # Code Samples
49 |
50 |
51 |
--------------------------------------------------------------------------------
/docs/api/epidata_signals.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Data Sources and Signals
3 | parent: Other Endpoints (COVID-19 and Other Diseases)
4 | nav_order: 2
5 | has_children: true
6 |
7 | ---
8 |
9 | # Delphi's Epidata Data Sources and Signals
10 |
11 | Delphi's Epidata endpoints include the following data sources.
12 | Most of these sources are updated weekly.
13 | Unlike [the main endpoint](covidcast.md), the format of different Epidata endpoints varies.
14 | The API for retrieving data from these sources is described in the
15 | [API clients documentation](client_libraries.md).
16 |
17 | To obtain many of these signals and update them daily, Delphi has written
18 | extensive software to obtain data from various sources, aggregate the data,
19 | calculate statistical estimates, and format the data to be shared through the Delphi Epidata API.
20 | This code is
21 | [open source and available on GitHub](https://github.com/cmu-delphi/delphi-epidata),
22 | and contributions are welcome.
23 |
24 |
--------------------------------------------------------------------------------
/docs/api/fluview_meta.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: FluView Metadata
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 1
6 | ---
7 |
8 | # FluView Metadata
9 |
10 | This is the API documentation for accessing the FluView metadata
11 | (`fluview_meta`) endpoint of [Delphi](https://delphi.cmu.edu/)'s epidemiological
12 | data.
13 |
14 | General topics not specific to any particular endpoint are discussed in the
15 | [API overview](README.md). Such topics include:
16 | [contributing](README.md#contributing), [citing](README.md#citing), and
17 | [data licensing](README.md#data-licensing).
18 |
19 | ## FluView Metadata
20 |
21 | Returns information about the [`fluview` endpoint](fluview.md).
22 |
23 | # The API
24 |
25 | The base URL is: https://api.delphi.cmu.edu/epidata/fluview_meta/
26 |
27 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
28 |
29 | ## Parameters
30 |
31 | There are no parameters for this endpoint.
32 |
33 | ## Response
34 |
35 | | Field | Description | Type |
36 | |---------------------------|-----------------------------------------------------------------|------------------|
37 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
38 | | `epidata` | list of results | array of objects |
39 | | `epidata[].latest_update` | date when data was last updated | string |
40 | | `epidata[].latest_issue` | most recent "issue" (epiweek) in the data | integer |
41 | | `epidata[].table_rows` | total number of rows in the table | integer |
42 | | `message` | `success` or error message | string |
43 |
44 | # Example URLs
45 |
46 | ### FluView Metadata
47 | https://api.delphi.cmu.edu/epidata/fluview_meta/
48 |
49 | ```json
50 | {
51 | "result": 1,
52 | "epidata": [
53 | {
54 | "latest_update": "2020-04-24",
55 | "latest_issue": 202016,
56 | "table_rows": 957673
57 | }
58 | ],
59 | "message": "success"
60 | }
61 | ```
62 |
--------------------------------------------------------------------------------
/docs/api/missing_codes.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: NaN Missing Codes
3 | parent: Main Endpoint (COVIDcast)
4 | nav_order: 7
5 | ---
6 |
7 | # Missing Value Coding
8 |
9 | Occasionally, data will be missing from our database and will be explicitly coded as NaN.
10 | In these cases, we strive to supply our best-known reason for the value to be missing by
11 | providing an integer code in the corresponding `missing_` column (i.e. `missing_value`
12 | corresponds to the `value` column). The integer codes are as follows
13 |
14 | | Code | Name | Description |
15 | | --- | --- | --- |
16 | | 0 | NOT MISSING | This is the default value for when the field is not missing. |
17 | | 1 | NOT APPLICABLE | This value is used when the field is not expected to have a value (e.g. stderr for a signal that is not estimated from a sample). |
18 | | 2 | REGION EXCEPTION | This value is used when the field is not reported because the particular indicator does not serve the geographical region requested. |
19 | | 3 | CENSORED | This value is used when the field has been censored for data privacy reasons. This could be due to reasons such as low sample sizes or simply a requirement from our data partners. |
20 | | 4 | DELETED | This value is used when the field was present in previous issues, but is no longer reported. Deletions can arise due to bug fixes, changing censorship requirements, or data corrections from the source. |
21 | | 5 | OTHER | This value is used when the field is missing, but does not fall into any of the categories above. |
22 |
23 | These codes are supplied as part of the `delphi_utils` Python library (see [here](https://pypi.org/project/delphi-utils/)).
24 |
--------------------------------------------------------------------------------
/docs/api/norostat.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive NoroSTAT
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | ---
7 |
8 | # NoroSTAT
9 |
10 | This is the documentation of the API for accessing the NoroSTAT (`norostat`) endpoint of
11 | the [Delphi](https://delphi.cmu.edu/)'s epidemiological data.
12 |
13 | General topics not specific to any particular endpoint are discussed in the
14 | [API overview](README.md). Such topics include:
15 | [contributing](README.md#contributing), [citing](README.md#citing), and
16 | [data licensing](README.md#data-licensing).
17 |
18 | **NOTE**: Delphi stopped stopped acquiring data from this data source in November 2020.
19 |
20 | ## NoroSTAT Data
21 |
22 | ...
23 |
24 | # The API
25 |
26 | The base URL is: https://api.delphi.cmu.edu/epidata/norostat/
27 |
28 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
29 |
30 | ## Parameters
31 |
32 | ### Required
33 |
34 |
35 |
36 | ## Response
37 |
38 | | Field | Description | Type |
39 | |-----------|-----------------------------------------------------------------|------------------|
40 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
41 | | `epidata` | list of results | array of objects |
42 | | ... | ... | ... |
43 | | `message` | `success` or error message | string |
44 |
45 | # Example URLs
46 |
47 |
48 |
49 | # Code Samples
50 |
51 |
52 |
--------------------------------------------------------------------------------
/docs/api/norostat_meta.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive NoroSTAT Metadata
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | permalink: api/meta_norostat.html
7 | ---
8 |
9 | # NoroSTAT Metadata
10 |
11 | This is the documentation of the API for accessing the NoroSTAT Metadata (`meta_norostat`) endpoint of
12 | the [Delphi](https://delphi.cmu.edu/)'s epidemiological data.
13 |
14 | General topics not specific to any particular endpoint are discussed in the
15 | [API overview](README.md). Such topics include:
16 | [contributing](README.md#contributing), [citing](README.md#citing), and
17 | [data licensing](README.md#data-licensing).
18 |
19 | ## NoroSTAT Metadata
20 |
21 | ...
22 |
23 | # The API
24 |
25 | The base URL is: https://api.delphi.cmu.edu/epidata/meta_norostat/
26 |
27 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
28 |
29 | ## Parameters
30 |
31 | ### Required
32 |
33 |
34 |
35 | ## Response
36 |
37 | | Field | Description | Type |
38 | |-----------|-----------------------------------------------------------------|------------------|
39 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
40 | | `epidata` | list of results | array of objects |
41 | | ... | ... | ... |
42 | | `message` | `success` or error message | string |
43 |
44 | # Example URLs
45 |
46 |
47 |
48 | # Code Samples
49 |
50 |
51 |
--------------------------------------------------------------------------------
/docs/api/paho_dengue.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive PAHO Dengue
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | ---
7 |
8 | # PAHO Dengue
9 |
10 | This is the documentation of the API for accessing the PAHO Dengue (`paho_dengue`) endpoint of
11 | the [Delphi](https://delphi.cmu.edu/)'s epidemiological data.
12 |
13 | General topics not specific to any particular endpoint are discussed in the
14 | [API overview](README.md). Such topics include:
15 | [contributing](README.md#contributing), [citing](README.md#citing), and
16 | [data licensing](README.md#data-licensing).
17 |
18 | ## PAHO Dengue Data
19 |
20 | ...
21 |
22 | # The API
23 |
24 | The base URL is: https://api.delphi.cmu.edu/epidata/paho_dengue/
25 |
26 | See [this documentation](README.md) for details on specifying epiweeks, dates, and lists.
27 |
28 | ## Parameters
29 |
30 | ### Required
31 |
32 |
33 |
34 | ## Response
35 |
36 | | Field | Description | Type |
37 | |-----------|-----------------------------------------------------------------|------------------|
38 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
39 | | `epidata` | list of results | array of objects |
40 | | ... | ... | ... |
41 | | `message` | `success` or error message | string |
42 |
43 | # Example URLs
44 |
45 |
46 |
47 | # Code Samples
48 |
49 |
50 |
--------------------------------------------------------------------------------
/docs/api/quidel.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: inactive Quidel
3 | parent: Data Sources and Signals
4 | grand_parent: Other Endpoints (COVID-19 and Other Diseases)
5 | nav_order: 2
6 | ---
7 |
8 | # Quidel
9 |
10 | This is the documentation of the API for accessing the Quidel (`quidel`) endpoint of the Delphi’s epidemiological data.
11 |
12 | General topics not specific to any particular endpoint are discussed in the [API overview](https://cmu-delphi.github.io/delphi-epidata/). Such topics include: [contributing](https://cmu-delphi.github.io/delphi-epidata/api/README.html#contributing), [citing](https://cmu-delphi.github.io/delphi-epidata/api/README.html#citing), and [data licensing](https://cmu-delphi.github.io/delphi-epidata/api/README.html#data-licensing).
13 |
14 | ## Quidel Data
15 |
16 | Data provided by Quidel Corp., which contains flu lab test results.
17 |
18 | ## The API
19 |
20 | The base URL is: https://api.delphi.cmu.edu/epidata/quidel/
21 |
22 | See this [documentation](https://cmu-delphi.github.io/delphi-epidata/api/README.html) for details on specifying epiweeks, dates, and lists.
23 |
24 | ## Parameters
25 |
26 | ### Required
27 |
28 | | Parameter | Description | Type |
29 | | --- | --- | --- |
30 | | `auth` | password | string |
31 | | `epiweeks` | epiweeks | `list` of epiweeks |
32 | | `locations` | locations | `list` of `hhs<#>` [region](https://github.com/cmu-delphi/delphi-epidata/blob/main/labels/regions.txt) labels |
33 |
34 | ## Response
35 |
36 | | Field | Description | Type |
37 | |-----------|-----------------------------------------------------------------|------------------|
38 | | `result` | result code: 1 = success, 2 = too many results, -2 = no results | integer |
39 | | `epidata` | list of results | array of objects |
40 | | ... | ... | ... |
41 | | `message` | `success` or error message | string |
--------------------------------------------------------------------------------
/docs/developer_support.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Developer Support
3 | nav_order: 5
4 | has_children: true
5 | ---
6 |
7 | # Developer Support
8 | Delphi's Developer Support section provides several helpful resources for developers including a quickstart guide and tutorial.
9 |
--------------------------------------------------------------------------------
/docs/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/favicon.ico
--------------------------------------------------------------------------------
/docs/related_work.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Motivating Works
3 | parent: Epidata API Intro
4 | nav_order: 4
5 | ---
6 |
7 | # Motivating Works
8 |
9 | The works listed below, along with numerous others, were an early inspiration
10 | for the Epidata API:
11 |
12 | - Cook, Samantha, et al. "Assessing Google flu trends performance in the United
13 | States during the 2009 influenza virus A (H1N1) pandemic." PloS one 6.8
14 | (2011): e23610.
15 | - Broniatowski, David A., Michael J. Paul, and Mark Dredze. "National and local
16 | influenza surveillance through Twitter: an analysis of the 2012-2013
17 | influenza epidemic." (2013): e83672.
18 | - Dredze, Mark, et al. "HealthTweets. org: A Platform for Public Health
19 | Surveillance using Twitter." AAAI Conference on Artificial
20 | Intelligence. 2014.
21 | - Generous, Nicholas, et al. "Global disease monitoring and forecasting with
22 | Wikipedia." (2014): e1003892.
23 | - Hickmann, Kyle S., et al. "Forecasting the 2013–2014 Influenza Season Using
24 | Wikipedia." (2015): e1004239.
25 | - McIver, David J., and John S. Brownstein. "Wikipedia usage estimates
26 | prevalence of influenza-like illness in the United States in near real-time."
27 | PLoS Comput Biol 10.4 (2014): e1003581.
28 |
--------------------------------------------------------------------------------
/docs/symptom-survey/COVID Symptom Survey Variable Categories.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/COVID Symptom Survey Variable Categories.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/collaboration-revision.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Collaboration and Survey Revision
3 | parent: inactive COVID-19 Trends and Impact Survey
4 | nav_order: 1
5 | nav_exclude: true
6 | ---
7 |
8 | # Collaboration and Survey Revision
9 |
10 | Update: CTIS data collection has ended. We are no longer
12 | revising the survey or hosting collaboration meetings.
13 |
14 | Delphi continues to revise the COVID-19 Trends and Impact Survey (CTIS)
15 | instruments in order to prioritize items that have the greatest utility for the
16 | response to the COVID-19 pandemic. We conduct revisions in collaboration with
17 | data users, fellow researchers, and public health officials, to ensure the
18 | survey data best serves public health and research goals.
19 |
20 | ## Proposing Revisions
21 |
22 | If there is a revision or question you would like us to consider, please fill
23 | out [this form requesting details about your
24 | proposal](https://forms.gle/q6NS8fPJJofKQ9mM8). This request can be submitted by
25 | researchers regardless of whether they have a signed Data Use Agreement for the
26 | individual responses to the COVID-19 Trends and Impact Survey.
27 |
28 | ## Collaboration Meetings
29 |
30 | Collaboration in this ongoing effort is our priority. Delphi hosts a
31 | collaboration meeting the first Friday of each month at 2–3pm ET. The meeting is
32 | a chance to announce upcoming changes to the survey, have a discussion and get
33 | input about the instrument, share preliminary findings and network with other
34 | researchers.
35 |
36 | If you're interested in joining, contact us at
37 | .
38 |
--------------------------------------------------------------------------------
/docs/symptom-survey/contingency-tables.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Contingency Tables
3 | parent: inactive COVID-19 Trends and Impact Survey
4 | nav_order: 4
5 | ---
6 |
7 | # Contingency Tables
8 | {: .no_toc}
9 |
10 | This documentation describes the fine-resolution contingency tables produced by
11 | grouping [US COVID-19 Trends and Impact Survey (CTIS)](./index.md) individual
12 | responses by various self-reported demographic features. The contingency tables
13 | are publicly available for download as a complete set from the Inter-university
14 | Consortium for Political Science Research (ICPSR):
15 |
16 | * Reinhart, Alex, Mejia, Robin, and Tibshirani, Ryan J. COVID-19 Trends and
17 | Impact Survey (CTIS), United States, 2020-2022. Inter-university Consortium
18 | for Political and Social Research [distributor], 2025-02-28.
19 |
20 |
21 | Select the dataset "DS0 Study-Level Files" to download the complete set of
22 | contingency tables and all survey documentation files, including the codebooks
23 | and an Aggregate Contingency Table User Guide that describes the data
24 | processing and file formats, and includes example R code.
25 |
26 | These contingency tables provide granular breakdowns of COVID-related topics
27 | such as vaccine uptake and acceptance. Compatible tables are also available for
28 | the [UMD Global CTIS](https://covidmap.umd.edu/) for more than 100 countries and
29 | territories worldwide, also [through
30 | ICPSR](https://www.icpsr.umich.edu/web/ICPSR/studies/39206).
31 |
32 | These tables are more detailed than the [coarse aggregates reported in the
33 | COVIDcast Epidata API](../api/covidcast-signals/fb-survey.md), which are grouped
34 | only by geographic region. [Individual response data](survey-files.md) for the
35 | survey is available, but only to researchers who request restricted data access
36 | via ICPSR, whereas these contingency tables are available to the general public.
37 |
38 | Please see our survey [credits](index.md#credits) and [citation information](index.md#citing-the-survey)
39 | for information on how to cite this data if you use it in a publication.
40 |
41 | Our [Data and Sampling Errors](problems.md) documentation lists important
42 | updates for data users, including corrections to data or updates on data
43 | processing delays.
44 |
--------------------------------------------------------------------------------
/docs/symptom-survey/modules.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Survey Modules & Randomization
3 | parent: inactive COVID-19 Trends and Impact Survey
4 | nav_order: 7
5 | ---
6 |
7 | # Survey Modules & Randomization
8 | {: .no_toc}
9 |
10 | To reduce the overall length of the instrument and minimize response burden,
11 | the COVID-19 Trends and Impact Survey (CTIS) will consist of a block of daily
12 | core questions and will use a randomized module approach for the other topics.
13 | Implementation of this approach started in [Wave 11](coding.md#wave-11), which
14 | launched on May 20, 2021.
15 |
16 | Each respondent invited to take the survey will be asked the daily core
17 | questions. The daily core questions for Wave 11 include:
18 |
19 | * Symptoms
20 | * Testing
21 | * COVID-19 vaccine
22 | * Behaviors
23 |
24 | After answering these questions, survey respondents will be randomly allocated
25 | and evenly distributed to Module A or Module B.
26 |
27 | Survey items in Module A cover the following topics:
28 |
29 | * Beliefs and Norms
30 | * Knowledge and Information
31 | * Healthcare
32 |
33 | Survey items in Module B cover the following topics:
34 |
35 | * Well-being
36 | * Parenting behaviors (including schooling)
37 | * Specific demographic questions (i.e. cigarette use and pregnancy)
38 |
39 | After the modules, the survey will conclude with asking respondents demographics
40 | and occupation questions. It is also noteworthy that after randomization to
41 | either Module A or B, the platform does not allow respondents to navigate back
42 | to change their previous responses in the Daily Core questions.
43 |
44 | Microdata files available to users with data use agreements include a `module`
45 | column that indicates the module the respondent was randomly assigned to; this
46 | column contains the values `A`, `B`, or `NA` (for respondents not assigned to a
47 | module, e.g. because they completed a prior survey wave).
48 |
--------------------------------------------------------------------------------
/docs/symptom-survey/screenshots/cyberduck-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/screenshots/cyberduck-1.png
--------------------------------------------------------------------------------
/docs/symptom-survey/screenshots/cyberduck-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/screenshots/cyberduck-2.png
--------------------------------------------------------------------------------
/docs/symptom-survey/screenshots/winscp-small1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/screenshots/winscp-small1.png
--------------------------------------------------------------------------------
/docs/symptom-survey/screenshots/winscp-small2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/screenshots/winscp-small2.png
--------------------------------------------------------------------------------
/docs/symptom-survey/symptom-survey-weights.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/symptom-survey-weights.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU CTIS Wave 12 Full Launch.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU CTIS Wave 12 Full Launch.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU CTIS Wave 12 Full Launch.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU CTIS Wave 12 Full Launch.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 10.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 10.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 10.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 10.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 11.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 11.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 11.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 11.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 12.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 12.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 12.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 12.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 6.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 6.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 6.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 6.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 8.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 8.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CMU Survey Wave 8.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CMU Survey Wave 8.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CTIS US Wave 13.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CTIS US Wave 13.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/CTIS US Wave 13.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/CTIS US Wave 13.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY-_US_Expansion_-_With_Translations.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY_2020-04-06.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_TODEPLOY__-_US_Expansion.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_4.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_4.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_4.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_4.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_5.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_5.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_5.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_5.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_7.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_7.docx
--------------------------------------------------------------------------------
/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_7.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/docs/symptom-survey/waves/Survey_of_COVID-Like_Illness_-_Wave_7.pdf
--------------------------------------------------------------------------------
/docs/symptom-survey/weights.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Survey Weights
3 | parent: inactive COVID-19 Trends and Impact Survey
4 | nav_order: 5
5 | ---
6 |
7 | # Survey Weights
8 | {: .no_toc}
9 |
10 | The survey's individual response files contain respondent weights calculated
11 | by Facebook. These weights are also used to produce our
12 | [public contingency tables](./contingency-tables.md) and the geographic aggregates
13 | [in the COVIDcast Epidata API](../api/covidcast-signals/fb-survey.md).
14 |
15 | Facebook has developed a [User Guide for the CTIS
16 | Weights](https://dataforgood.facebook.com/dfg/resources/user-guide-for-ctis-weights)
17 | (updated May 2022). This manual explains the weight methodology, gives examples
18 | of how to use the weights when calculating estimates, and states the known
19 | limitations of the weights. We also have separate information about the
20 | [survey's limitations](limitations.md) that affect what conclusions can be drawn
21 | from the survey data.
22 |
--------------------------------------------------------------------------------
/integrations/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/acquisition/covid_hosp/facility/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/acquisition/covid_hosp/state_daily/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/acquisition/covid_hosp/state_timeseries/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/acquisition/covidcast/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/acquisition/covidcast/delete_batch.csv:
--------------------------------------------------------------------------------
1 | geo_id,value,stderr,sample_size,issue,time_value,geo_type,signal,source
2 | d_nonlatest,0,0,0,1,0,msa,sig,src
3 | d_latest, 0,0,0,3,0,msa,sig,src
4 | d_justone, 0,0,0,1,0,msa,sig,src
--------------------------------------------------------------------------------
/integrations/client/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/client/test_delphi_epidata.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
26 |
--------------------------------------------------------------------------------
/integrations/client/test_delphi_epidata.js:
--------------------------------------------------------------------------------
1 | const Epidata = require('../../src/client/delphi_epidata').Epidata;
2 |
3 | function cb(r, msg, data) {
4 | console.log(r, msg, data);
5 | }
6 | Epidata.covidcast(cb, 'fb-survey', Array(1).fill('raw_cli').join(','), 'day', 'state', '2020-12-01', 'ca');
7 | Epidata.covidcast(cb, 'fb-survey', Array(1000).fill('raw_cli').join(','), 'day', 'state', '2020-12-01', 'ca');
8 |
--------------------------------------------------------------------------------
/integrations/server/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/integrations/server/test_cdc.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class CdcTest(DelphiTestBase):
6 | """Basic integration tests for cdc endpint."""
7 |
8 | def localSetUp(self) -> None:
9 | self.truncate_tables_list = ["cdc_extract"]
10 | self.role_name = "cdc"
11 |
12 | def test_cdc(self):
13 | """Basic integration test for cdc endpoint"""
14 | self.cur.execute(
15 | "INSERT INTO `cdc_extract`(`epiweek`, `state`, `num1`, `num2`, `num3`, `num4`, `num5`, `num6`, `num7`, `num8`, `total`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
16 | ("201102", "AK", "16", "35", "51", "96", "30", "748", "243", "433", "65"),
17 | )
18 | self.cnx.commit()
19 | response = self.epidata_client.cdc(auth="cdc_key", epiweeks=201102, locations="cen9")
20 | self.assertEqual(
21 | response,
22 | {
23 | "epidata": [
24 | {
25 | "location": "cen9",
26 | "epiweek": 201102,
27 | "num1": 16,
28 | "num2": 35,
29 | "num3": 51,
30 | "num4": 96,
31 | "num5": 30,
32 | "num6": 748,
33 | "num7": 243,
34 | "num8": 433,
35 | "total": 65,
36 | "value": None,
37 | }
38 | ],
39 | "result": 1,
40 | "message": "success",
41 | },
42 | )
43 |
--------------------------------------------------------------------------------
/integrations/server/test_delphi.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | # first party
4 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
5 |
6 |
7 | class DelphiTest(DelphiTestBase):
8 | """Basic integration tests for delphi endpint."""
9 |
10 | def localSetUp(self):
11 | self.truncate_tables_list = ["forecasts"]
12 |
13 | def test_delphi(self):
14 | """Basic integration test for delphi endpoint"""
15 | self.cur.execute(
16 | "INSERT INTO `forecasts` (`system`, `epiweek`, `json`) VALUES(%s, %s, %s)",
17 | (
18 | "eb",
19 | "201441",
20 | json.dumps(
21 | {
22 | "_version": "version",
23 | "name": "name",
24 | "season": "season",
25 | "epiweek": "epiweek",
26 | "year_weeks": 222,
27 | "season_weeks": 111,
28 | "ili_bins": "ili_bins_123",
29 | "ili_bin_size": "ili_bin_size231",
30 | }
31 | ),
32 | ),
33 | )
34 | self.cnx.commit()
35 | response = self.epidata_client.delphi(system="eb", epiweek=201441)
36 | self.assertEqual(
37 | response,
38 | {
39 | "epidata": [
40 | {
41 | "epiweek": 201441,
42 | "forecast": {
43 | "_version": "version",
44 | "epiweek": "epiweek",
45 | "ili_bin_size": "ili_bin_size231",
46 | "ili_bins": "ili_bins_123",
47 | "name": "name",
48 | "season": "season",
49 | "season_weeks": 111,
50 | "year_weeks": 222,
51 | },
52 | "system": "eb",
53 | }
54 | ],
55 | "message": "success",
56 | "result": 1,
57 | },
58 | )
59 |
--------------------------------------------------------------------------------
/integrations/server/test_dengue_nowcast.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class DengueNowcastTest(DelphiTestBase):
6 | """Basic integration tests for dengue_nowcast endpint."""
7 |
8 | def localSetUp(self):
9 | create_dengue_nowcasts = """
10 | CREATE TABLE IF NOT EXISTS `dengue_nowcasts` (
11 | `id` int NOT NULL AUTO_INCREMENT,
12 | `target` varchar(32) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL,
13 | `epiweek` int NOT NULL,
14 | `location` varchar(12) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL,
15 | `value` float NOT NULL,
16 | `std` float NOT NULL,
17 | PRIMARY KEY (`id`),
18 | UNIQUE KEY `entry` (`target`,`epiweek`,`location`),
19 | KEY `target` (`target`),
20 | KEY `epiweek` (`epiweek`),
21 | KEY `location` (`location`)
22 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
23 | """
24 | self.create_tables_list = [create_dengue_nowcasts]
25 | self.truncate_tables_list = ["dengue_nowcasts"]
26 |
27 | def test_dengue_nowcasts(self):
28 | """Basic integration test for dengue_nowcasts endpoint"""
29 | self.cur.execute(
30 | "INSERT INTO dengue_nowcasts(target, epiweek, location, value, std) VALUES(%s, %s, %s, %s, %s)",
31 | ("num_dengue", "201409", "ar", "85263", "351456"),
32 | )
33 | self.cnx.commit()
34 | response = self.epidata_client.dengue_nowcast(locations="ar", epiweeks=201409)
35 | self.assertEqual(
36 | response,
37 | {
38 | "epidata": [{"location": "ar", "epiweek": 201409, "value": 85263.0, "std": 351456.0}],
39 | "result": 1,
40 | "message": "success",
41 | },
42 | )
43 |
--------------------------------------------------------------------------------
/integrations/server/test_dengue_sensors.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class DengueSensorsTest(DelphiTestBase):
6 | """Basic integration tests for dengue_sensors endpint."""
7 |
8 | def localSetUp(self):
9 | create_dengue_sensors = """
10 | CREATE TABLE IF NOT EXISTS `dengue_sensors` (
11 | `id` int NOT NULL AUTO_INCREMENT,
12 | `target` varchar(32) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL,
13 | `name` varchar(8) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL,
14 | `epiweek` int NOT NULL,
15 | `location` varchar(12) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL,
16 | `value` float NOT NULL,
17 | PRIMARY KEY (`id`),
18 | UNIQUE KEY `entry` (`target`,`name`,`epiweek`,`location`),
19 | KEY `sensor` (`target`,`name`),
20 | KEY `epiweek` (`epiweek`),
21 | KEY `location` (`location`)
22 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3;
23 | """
24 | self.create_tables_list = [create_dengue_sensors]
25 | self.truncate_tables_list = ["dengue_sensors"]
26 | self.role_name = "sensors"
27 |
28 | def test_dengue_sensors(self):
29 | """Basic integration test for dengue_sensors endpoint"""
30 | self.cur.execute(
31 | "INSERT INTO `dengue_sensors`(`target`, `name`, `epiweek`, `location`, `value`) VALUES(%s, %s, %s, %s, %s)",
32 | ("num_dengue", "ght", "201432", "ag", "1234"),
33 | )
34 | self.cnx.commit()
35 | response = self.epidata_client.dengue_sensors(auth="sensors_key", names="ght", locations="ag", epiweeks="201432")
36 | self.assertEqual(
37 | response,
38 | {
39 | "epidata": [{"name": "ght", "location": "ag", "epiweek": 201432, "value": 1234.0}],
40 | "result": 1,
41 | "message": "success",
42 | },
43 | )
44 |
--------------------------------------------------------------------------------
/integrations/server/test_ecdc_ili.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class EcdcIliTest(DelphiTestBase):
6 | """Basic integration tests for edcd_ili endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["ecdc_ili"]
10 |
11 | def test_ecdc_ili(self):
12 | """Basic integration test for ecdc_ili endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `ecdc_ili`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `incidence_rate`) VALUES(%s, %s, %s, %s, %s, %s)",
15 | ("2020-03-26", "202012", "201840", "76", "Armenia", "0"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.ecdc_ili(regions="Armenia", epiweeks="201840")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2020-03-26",
25 | "region": "Armenia",
26 | "issue": 202012,
27 | "epiweek": 201840,
28 | "lag": 76,
29 | "incidence_rate": 0.0,
30 | }
31 | ],
32 | "result": 1,
33 | "message": "success",
34 | },
35 | )
36 |
--------------------------------------------------------------------------------
/integrations/server/test_flusurv.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class FlusurvTest(DelphiTestBase):
6 | """Basic integration tests for flusurv endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["flusurv"]
10 |
11 | def test_flusurv(self):
12 | """Basic integration test for flusurv endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `flusurv`(`release_date`, `issue`, `epiweek`, `location`, `lag`, `rate_age_0`, `rate_age_1`, `rate_age_2`, `rate_age_3`, `rate_age_4`, `rate_overall`, `rate_age_5`, `rate_age_6`, `rate_age_7`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
15 | ("2012-11-02", "201243", "201143", "CA", "52", "0", "0", "0", "0.151", "0", "0.029", "0", "0", "0"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.flusurv(epiweeks=201143, locations="CA")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2012-11-02",
25 | "location": "CA",
26 | "issue": 201243,
27 | "epiweek": 201143,
28 | "lag": 52,
29 | "rate_age_0": 0.0,
30 | "rate_age_1": 0.0,
31 | "rate_age_2": 0.0,
32 | "rate_age_3": 0.151,
33 | "rate_age_4": 0.0,
34 | "rate_overall": 0.029,
35 | }
36 | ],
37 | "result": 1,
38 | "message": "success",
39 | },
40 | )
41 |
--------------------------------------------------------------------------------
/integrations/server/test_fluview_clinical.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class FluviewClinicalTest(DelphiTestBase):
6 | """Basic integration tests for fluview_clinical endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["fluview_clinical"]
10 |
11 | def test_fluview_clinical(self):
12 | """Basic integration test for fluview_clinical endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `fluview_clinical`(`release_date`, `issue`, `epiweek`, `region`, `lag`, `total_specimens`, `total_a`, `total_b`, `percent_positive`, `percent_a`, `percent_b`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
15 | ("2018-10-10", "201839", "201640", "al", "103", "406", "4", "1", "1.32", "0.99", "0.25"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.fluview_clinical(epiweeks=201640, regions="al")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2018-10-10",
25 | "region": "al",
26 | "issue": 201839,
27 | "epiweek": 201640,
28 | "lag": 103,
29 | "total_specimens": 406,
30 | "total_a": 4,
31 | "total_b": 1,
32 | "percent_positive": 1.32,
33 | "percent_a": 0.99,
34 | "percent_b": 0.25,
35 | }
36 | ],
37 | "result": 1,
38 | "message": "success",
39 | },
40 | )
41 |
--------------------------------------------------------------------------------
/integrations/server/test_gft.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class GftTest(DelphiTestBase):
6 | """Basic integration tests for gft endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["gft"]
10 |
11 | def test_gft(self):
12 | """Basic integration test for gft endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `gft`(`epiweek`, `location`, `num`) VALUES(%s, %s, %s)",
15 | ("200340", "nat", "902"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.gft(locations="nat", epiweeks="200340")
19 | self.assertEqual(
20 | response,
21 | {"epidata": [{"location": "nat", "epiweek": 200340, "num": 902}], "result": 1, "message": "success"},
22 | )
23 |
--------------------------------------------------------------------------------
/integrations/server/test_ght.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class GhtTest(DelphiTestBase):
6 | """Basic integration tests for ght endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["ght"]
10 | self.role_name = "ght"
11 |
12 | def test_ght(self):
13 | """Basic integration test for ght endpoint"""
14 | self.cur.execute(
15 | "INSERT INTO `ght`(`query`, `location`, `epiweek`, `value`) VALUES(%s, %s, %s, %s)",
16 | ("/n/query", "US", "200101", "12345"),
17 | )
18 | self.cnx.commit()
19 | response = self.epidata_client.ght(locations="US", epiweeks="200101", query="/n/query", auth="ght_key")
20 | self.assertEqual(
21 | response,
22 | {"epidata": [{"location": "US", "epiweek": 200101, "value": 12345.0}], "result": 1, "message": "success"},
23 | )
24 |
--------------------------------------------------------------------------------
/integrations/server/test_kcdc_ili.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class KcdcIliTest(DelphiTestBase):
6 | """Basic integration tests for kcdc_ili endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["kcdc_ili"]
10 |
11 | def test_kcdc_ili(self):
12 | """Basic integration test for kcdc_ili endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `kcdc_ili`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `ili`) VALUES(%s, %s, %s, %s, %s, %s)",
15 | ("2020-03-27", "202013", "200432", "222", "REG", "0.25"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.kcdc_ili(regions="REG", epiweeks="200432")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2020-03-27",
25 | "region": "REG",
26 | "issue": 202013,
27 | "epiweek": 200432,
28 | "lag": 222,
29 | "ili": 0.25,
30 | }
31 | ],
32 | "result": 1,
33 | "message": "success",
34 | },
35 | )
36 |
--------------------------------------------------------------------------------
/integrations/server/test_meta.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class MetaTest(DelphiTestBase):
6 | """Basic integration tests for meta endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["forecasts", "fluview", "wiki", "wiki_meta", "twitter"]
10 |
11 | def test_meta(self):
12 | """Basic integration test for meta endpoint"""
13 | response = self.epidata_client.meta()
14 | self.assertEqual(
15 | response,
16 | {
17 | "epidata": [
18 | {
19 | "delphi": [],
20 | "fluview": [{"latest_issue": None, "latest_update": None, "table_rows": 0}],
21 | "twitter": [],
22 | "wiki": [{"latest_update": None, "table_rows": 0}],
23 | }
24 | ],
25 | "message": "success",
26 | "result": 1,
27 | },
28 | )
29 |
--------------------------------------------------------------------------------
/integrations/server/test_nidss_dengue.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class NiddsDengueTest(DelphiTestBase):
6 | """Basic integration tests for nids_dengue endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["nidss_dengue"]
10 |
11 | def test_nidss_dengue(self):
12 | """Basic integration test for nidds_dengue endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `nidss_dengue`(`epiweek`, `location`, `region`, `count`) VALUES(%s, %s, %s, %s)",
15 | ("200340", "SomeCity", "Central", "0"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.nidss_dengue(locations="SomeCity", epiweeks="200340")
19 | self.assertEqual(
20 | response,
21 | {"epidata": [{"location": "SomeCity", "epiweek": 200340, "count": 0}], "result": 1, "message": "success"},
22 | )
23 |
--------------------------------------------------------------------------------
/integrations/server/test_nidss_flu.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class NiddsFluTest(DelphiTestBase):
6 | """Basic integration tests for nids_flu endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["nidss_flu"]
10 |
11 | def test_nidss_flu(self):
12 | """Basic integration test for nidds_flu endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `nidss_flu`(`release_date`, `issue`, `epiweek`, `region`, `lag`, `visits`, `ili`) VALUES(%s, %s, %s, %s, %s, %s, %s)",
15 | ("2015-09-05", "201530", "200111", "SomeRegion", "222", "333", "444"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.nidss_flu(regions="SomeRegion", epiweeks="200111")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2015-09-05",
25 | "region": "SomeRegion",
26 | "issue": 201530,
27 | "epiweek": 200111,
28 | "lag": 222,
29 | "visits": 333,
30 | "ili": 444.0,
31 | }
32 | ],
33 | "result": 1,
34 | "message": "success",
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/integrations/server/test_nowcast.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class NowcastTest(DelphiTestBase):
6 | """Basic integration tests for nowcast endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["nowcasts"]
10 |
11 | def test_nowcast(self):
12 | """Basic integration test for nowcast endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `nowcasts`(`epiweek`, `location`, `value`, `std`) VALUES(%s, %s, %s, %s)",
15 | ("201145", "nat", "12345", "0.01234"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.nowcast(locations="nat", epiweeks="201145")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [{"location": "nat", "epiweek": 201145, "value": 12345.0, "std": 0.01234}],
23 | "result": 1,
24 | "message": "success",
25 | },
26 | )
27 |
--------------------------------------------------------------------------------
/integrations/server/test_paho_dengue.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class PahoDengueTest(DelphiTestBase):
6 | """Basic integration tests for paho_dengue endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["paho_dengue"]
10 |
11 | def test_paho_dengue(self):
12 | """Basic integration test for paho_dengue endpoint"""
13 | self.cur.execute(
14 | "INSERT INTO `paho_dengue`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `total_pop`, `serotype`, `num_dengue`, `incidence_rate`, `num_severe`, `num_deaths`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
15 | ("2018-12-01", "201848", "201454", "204", "AG", "91", "DEN 1,4", "37", "40.66", "0", "0"),
16 | )
17 | self.cnx.commit()
18 | response = self.epidata_client.paho_dengue(regions="AG", epiweeks="201454")
19 | self.assertEqual(
20 | response,
21 | {
22 | "epidata": [
23 | {
24 | "release_date": "2018-12-01",
25 | "region": "AG",
26 | "serotype": "DEN 1,4",
27 | "issue": 201848,
28 | "epiweek": 201454,
29 | "lag": 204,
30 | "total_pop": 91,
31 | "num_dengue": 37,
32 | "num_severe": 0,
33 | "num_deaths": 0,
34 | "incidence_rate": 40.66,
35 | }
36 | ],
37 | "result": 1,
38 | "message": "success",
39 | },
40 | )
41 |
--------------------------------------------------------------------------------
/integrations/server/test_quidel.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class QuidelTest(DelphiTestBase):
6 | """Basic integration tests for quidel endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["quidel"]
10 | self.role_name = "quidel"
11 |
12 | def test_quidel(self):
13 | """Basic integration test for quidel endpoint"""
14 | self.cur.execute(
15 | "INSERT INTO `quidel`(`location`, `epiweek`, `value`, `num_rows`, `num_devices`) VALUES(%s, %s, %s, %s, %s)",
16 | ("loc1", "201111", "1", "0", "0"),
17 | )
18 | self.cnx.commit()
19 | response = self.epidata_client.quidel(locations="loc1", epiweeks="201111", auth="quidel_key")
20 | self.assertEqual(
21 | response,
22 | {"epidata": [{"location": "loc1", "epiweek": 201111, "value": 1.0}], "result": 1, "message": "success"},
23 | )
24 |
--------------------------------------------------------------------------------
/integrations/server/test_sensors.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class SensorsTest(DelphiTestBase):
6 | """Basic integration tests for sensors endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["sensors"]
10 | self.role_name = "sensors"
11 |
12 | def test_sensors(self):
13 | """Basic integration test for sensors endpoint"""
14 | self.cur.execute(
15 | "INSERT INTO `sensors`(`name`, `epiweek`, `location`, `value`) VALUES(%s, %s, %s, %s)",
16 | ("sens1", "201111", "loc1", "222"),
17 | )
18 | self.cnx.commit()
19 | response = self.epidata_client.sensors(names="sens1", locations="loc1", epiweeks="201111", auth="sensors_key")
20 | self.assertEqual(
21 | response,
22 | {
23 | "epidata": [{"name": "sens1", "location": "loc1", "epiweek": 201111, "value": 222.0}],
24 | "result": 1,
25 | "message": "success",
26 | },
27 | )
28 |
--------------------------------------------------------------------------------
/integrations/server/test_twitter.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class TwitterTest(DelphiTestBase):
6 | """Basic integration tests for twitter endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["twitter"]
10 | self.role_name = "twitter"
11 |
12 | def test_twitter(self):
13 | """Basic integration test for twitter endpoint"""
14 |
15 | self.cur.execute(
16 | 'INSERT INTO `twitter`(`date`, `state`, `num`, `total`) VALUES ("2015-07-29", "AK", "1", "223"), ("2020-07-29", "CT", "12", "778")',
17 | )
18 | self.cnx.commit()
19 | response = self.epidata_client.twitter(auth="twitter_key", locations="cen9", dates="20150701-20160101")
20 | self.assertEqual(
21 | response,
22 | {
23 | "epidata": [{"location": "cen9", "date": "2015-07-29", "num": 1, "total": 223, "percent": 0.4484}],
24 | "result": 1,
25 | "message": "success",
26 | },
27 | )
28 |
--------------------------------------------------------------------------------
/integrations/server/test_wiki.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.common.integration_test_base_class import DelphiTestBase
3 |
4 |
5 | class WikiTest(DelphiTestBase):
6 | """Basic integration tests for wiki endpint."""
7 |
8 | def localSetUp(self):
9 | self.truncate_tables_list = ["wiki", "wiki_meta"]
10 |
11 | def test_wiki(self):
12 | """Basic integration test for wiki endpoint"""
13 |
14 | self.cur.execute(
15 | 'INSERT INTO `wiki`(`datetime`, `article`, `count`, `language`) VALUES ("2007-12-09 18:00:00", "amantadine", "3", "en"), ("2008-12-09 18:00:00", "test", "5", "en")',
16 | )
17 | self.cur.execute(
18 | 'INSERT INTO `wiki_meta`(`datetime`, `date`, `epiweek`, `total`, `language`) VALUES ("2007-12-09 18:00:00", "2007-12-09", "200750", "969214", "en"), ("2008-12-09 18:00:00", "2008-12-09", "200750", "123321", "en")'
19 | )
20 | self.cnx.commit()
21 | response = self.epidata_client.wiki(articles="test", epiweeks="200701-200801")
22 | self.assertEqual(
23 | response,
24 | {
25 | "epidata": [
26 | {"article": "test", "count": 5, "total": 123321, "hour": -1, "epiweek": 200750, "value": 40.544595}
27 | ],
28 | "result": 1,
29 | "message": "success",
30 | },
31 | )
32 |
--------------------------------------------------------------------------------
/integrations/test_deploy_syntax.py:
--------------------------------------------------------------------------------
1 | """Simple integration test to validate the syntax of `deploy.json`."""
2 |
3 | # standard library
4 | import json
5 | import unittest
6 |
7 |
8 | class DeploySyntaxTests(unittest.TestCase):
9 | """Tests for `deploy.json`."""
10 |
11 | def test_syntax(self):
12 | """Ensure that `deploy.json` is valid JSON."""
13 |
14 | with open('repos/delphi/delphi-epidata/deploy.json', 'r') as f:
15 | self.assertIsInstance(json.loads(f.read()), dict)
16 |
--------------------------------------------------------------------------------
/labels/articles.txt:
--------------------------------------------------------------------------------
1 | amantadine
2 | antiviral_drugs
3 | avian_influenza
4 | canine_influenza
5 | cat_flu
6 | chills
7 | common_cold
8 | cough
9 | equine_influenza
10 | fatigue_(medical)
11 | fever
12 | flu_season
13 | gastroenteritis
14 | headache
15 | hemagglutinin_(influenza)
16 | human_flu
17 | influenza
18 | influenzalike_illness
19 | influenzavirus_a
20 | influenzavirus_c
21 | influenza_a_virus
22 | influenza_a_virus_subtype_h10n7
23 | influenza_a_virus_subtype_h1n1
24 | influenza_a_virus_subtype_h1n2
25 | influenza_a_virus_subtype_h2n2
26 | influenza_a_virus_subtype_h3n2
27 | influenza_a_virus_subtype_h3n8
28 | influenza_a_virus_subtype_h5n1
29 | influenza_a_virus_subtype_h7n2
30 | influenza_a_virus_subtype_h7n3
31 | influenza_a_virus_subtype_h7n7
32 | influenza_a_virus_subtype_h7n9
33 | influenza_a_virus_subtype_h9n2
34 | influenza_b_virus
35 | influenza_pandemic
36 | influenza_prevention
37 | influenza_vaccine
38 | malaise
39 | myalgia
40 | nasal_congestion
41 | nausea
42 | neuraminidase_inhibitor
43 | orthomyxoviridae
44 | oseltamivir
45 | paracetamol
46 | rhinorrhea
47 | rimantadine
48 | shivering
49 | sore_throat
50 | swine_influenza
51 | viral_neuraminidase
52 | viral_pneumonia
53 | vomiting
54 | zanamivir
55 |
--------------------------------------------------------------------------------
/labels/cities.txt:
--------------------------------------------------------------------------------
1 | Albany_NY
2 | Albuquerque_NM
3 | Anchorage_AK
4 | Arlington_VA
5 | Atlanta_GA
6 | Austin_TX
7 | Baltimore_MD
8 | Baton_Rouge_LA
9 | Beaverton_OR
10 | Bellevue_WA
11 | Berkeley_CA
12 | Birmingham_AL
13 | Boise_ID
14 | Boston_MA
15 | Buffalo_NY
16 | Cary_NC
17 | Charlotte_NC
18 | Chicago_IL
19 | Cleveland_OH
20 | Colorado_Springs_CO
21 | Columbia_SC
22 | Columbus_OH
23 | Dallas_TX
24 | Dayton_OH
25 | Denver_CO
26 | Des_Moines_IA
27 | Durham_NC
28 | Eugene_OR
29 | Fresno_CA
30 | Ft_Worth_TX
31 | Gainesville_FL
32 | Grand_Rapids_MI
33 | Greensboro_NC
34 | Greenville_SC
35 | Honolulu_HI
36 | Houston_TX
37 | Indianapolis_IN
38 | Irvine_CA
39 | Irving_TX
40 | Jacksonville_FL
41 | Jackson_MS
42 | Kansas_City_MO
43 | Knoxville_TN
44 | Las_Vegas_NV
45 | Lexington_KY
46 | Lincoln_NE
47 | Little_Rock_AR
48 | Los_Angeles_CA
49 | Lubbock_TX
50 | Madison_WI
51 | Memphis_TN
52 | Mesa_AZ
53 | Miami_FL
54 | Milwaukee_WI
55 | Nashville_TN
56 | Newark_NJ
57 | New_Orleans_LA
58 | New_York_NY
59 | Norfolk_VA
60 | Oakland_CA
61 | Oklahoma_City_OK
62 | Omaha_NE
63 | Orlando_FL
64 | Philadelphia_PA
65 | Phoenix_AZ
66 | Pittsburgh_PA
67 | Plano_TX
68 | Portland_OR
69 | Providence_RI
70 | Raleigh_NC
71 | Reno_NV
72 | Reston_VA
73 | Richmond_VA
74 | Rochester_NY
75 | Roswell_GA
76 | Sacramento_CA
77 | Salt_Lake_City_UT
78 | Santa_Clara_CA
79 | San_Antonio_TX
80 | San_Diego_CA
81 | San_Francisco_CA
82 | San_Jose_CA
83 | Scottsdale_AZ
84 | Seattle_WA
85 | Somerville_MA
86 | Spokane_WA
87 | Springfield_MO
88 | State_College_PA
89 | St_Louis_MO
90 | St_Paul_MN
91 | Sunnyvale_CA
92 | Tampa_FL
93 | Tempe_AZ
94 | Tucson_AZ
95 | Tulsa_OK
96 | Washington_DC
97 | Wichita_KS
98 |
--------------------------------------------------------------------------------
/labels/ecdc_regions.txt:
--------------------------------------------------------------------------------
1 | Armenia
2 | Austria
3 | Azerbaijan
4 | Belarus
5 | Belgium
6 | Czech Republic
7 | Denmark
8 | Estonia
9 | Finland
10 | France
11 | Georgia
12 | Iceland
13 | Ireland
14 | Israel
15 | Italy
16 | Kazakhstan
17 | Kosovo*
18 | Kyrgyzstan
19 | Latvia
20 | Lithuania
21 | Luxembourg
22 | Malta
23 | Moldova
24 | Montenegro
25 | Netherlands
26 | North Macedonia
27 | Norway
28 | Poland
29 | Portugal
30 | Romania
31 | Russia
32 | Serbia
33 | Slovakia
34 | Slovenia
35 | Spain
36 | Switzerland
37 | Tajikistan
38 | Turkey
39 | Turkmenistan
40 | Ukraine
41 | United Kingdom - England
42 | United Kingdom - Northern Irel
43 | United Kingdom - Scotland
44 | United Kingdom - Wales
45 | Uzbekistan
--------------------------------------------------------------------------------
/labels/flusurv_locations.txt:
--------------------------------------------------------------------------------
1 | CA
2 | CO
3 | CT
4 | GA
5 | IA
6 | ID
7 | MD
8 | MI
9 | MN
10 | NM
11 | NY_albany
12 | NY_rochester
13 | OH
14 | OK
15 | OR
16 | RI
17 | SD
18 | TN
19 | UT
20 | network_all
21 | network_eip
22 | network_ihsp
23 |
--------------------------------------------------------------------------------
/labels/nidss_locations.txt:
--------------------------------------------------------------------------------
1 | changhua_county
2 | chiayi_city
3 | chiayi_county
4 | hsinchu_city
5 | hsinchu_county
6 | hualien_county
7 | kaohsiung_city
8 | keelung_city
9 | kinmen_county
10 | lienchiang_county
11 | miaoli_county
12 | nantou_county
13 | new_taipei_city
14 | penghu_county
15 | pingtung_county
16 | taichung_city
17 | tainan_city
18 | taipei_city
19 | taitung_county
20 | taoyuan_city
21 | yilan_county
22 | yunlin_county
23 |
--------------------------------------------------------------------------------
/labels/nidss_regions.txt:
--------------------------------------------------------------------------------
1 | nationwide
2 | central
3 | eastern
4 | kaoping
5 | northern
6 | southern
7 | taipei
8 |
--------------------------------------------------------------------------------
/labels/regions.txt:
--------------------------------------------------------------------------------
1 | nat
2 | hhs1
3 | hhs2
4 | hhs3
5 | hhs4
6 | hhs5
7 | hhs6
8 | hhs7
9 | hhs8
10 | hhs9
11 | hhs10
12 | cen1
13 | cen2
14 | cen3
15 | cen4
16 | cen5
17 | cen6
18 | cen7
19 | cen8
20 | cen9
21 |
--------------------------------------------------------------------------------
/labels/states.txt:
--------------------------------------------------------------------------------
1 | AK
2 | AL
3 | AR
4 | AZ
5 | CA
6 | CO
7 | CT
8 | DC
9 | DE
10 | FL
11 | GA
12 | HI
13 | IA
14 | ID
15 | IL
16 | IN
17 | KS
18 | KY
19 | LA
20 | MA
21 | MD
22 | ME
23 | MI
24 | MN
25 | MO
26 | MS
27 | MT
28 | NC
29 | ND
30 | NE
31 | NH
32 | NJ
33 | NM
34 | NV
35 | NY
36 | OH
37 | OK
38 | OR
39 | PA
40 | RI
41 | SC
42 | SD
43 | TN
44 | TX
45 | UT
46 | VA
47 | VT
48 | WA
49 | WI
50 | WV
51 | WY
52 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | plugins = sqlmypy
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "delphi-epidata",
3 | "private": true,
4 | "dependencies": {},
5 | "devDependencies": {
6 | "delphi-cmu-buildtools": "git+https://github.com/cmu-delphi/delphi-cmu-buildtools.git"
7 | },
8 | "scripts": {
9 | "build": "node build.js",
10 | "image": "docker build -t delphi-epidata:latest --file ./devops/Dockerfile ."
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 100
3 | target-version = ['py38']
4 | include = 'server,tests/server'
5 |
6 | [tool.pylint]
7 | [tool.pylint.'MESSAGES CONTROL']
8 | max-line-length = 100
9 | disable = [
10 | 'logging-format-interpolation',
11 | # Allow pytest functions to be part of a class
12 | 'no-self-use',
13 | 'too-many-locals',
14 | 'too-many-arguments',
15 | # Allow pytest classes to have one test
16 | 'too-few-public-methods',
17 | ]
18 |
19 | [tool.pylint.'BASIC']
20 | # Allow arbitrarily short-named variables.
21 | variable-rgx = ['[a-z_][a-z0-9_]*']
22 | argument-rgx = [ '[a-z_][a-z0-9_]*' ]
23 | attr-rgx = ['[a-z_][a-z0-9_]*']
24 |
25 | [tool.pylint.'DESIGN']
26 | ignored-argument-names = ['(_.*|run_as_module)']
27 |
--------------------------------------------------------------------------------
/requirements.api.txt:
--------------------------------------------------------------------------------
1 | delphi_utils
2 | epiweeks==2.1.2
3 | Flask==2.2.5
4 | Flask-Limiter==3.3.0
5 | more_itertools==8.4.0
6 | mysqlclient==2.1.1
7 | orjson==3.9.15
8 | pandas==1.2.3
9 | python-dotenv==0.15.0
10 | pyyaml
11 | redis==3.5.3
12 | requests==2.32.0
13 | scipy==1.10.0
14 | sentry-sdk[flask]
15 | SQLAlchemy==1.4.40
16 | structlog==22.1.0
17 | tenacity==7.0.0
18 | typing-extensions
19 | werkzeug==3.0.6
20 |
--------------------------------------------------------------------------------
/requirements.dev.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.9.4
2 | black>=20.8b1
3 | covidcast==0.1.5
4 | delphi_utils
5 | docker==6.0.1
6 | dropbox==11.36.0
7 | freezegun==1.2.2
8 | invoke>=1.4.1
9 | lxml==4.9.1
10 | matplotlib==3.6.2
11 | mypy>=0.790
12 | mysql-connector==2.2.9
13 | numpy==1.22.4
14 | pycountry==22.3.5
15 | pytest==7.2.0
16 | pytest-check==1.3.0
17 | sas7bdat==2.2.3
18 | selenium==4.7.2
19 | sqlalchemy-stubs>=0.3
20 | tenacity==7.0.0
21 | xlrd==2.0.1
22 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/common/README.md:
--------------------------------------------------------------------------------
1 | # common
2 |
3 | This directory contains somewhat generic code that's used by multiple
4 | `covid_hosp` scrapers. This includes code that's used by multiple unit and
5 | integration tests (see `test_utils.py`), even when that code isn't directly
6 | used in production.
7 |
8 | Since the operational difference between `covid_hosp` datasets is very small,
9 | most of the actual logic is contained here.
10 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/common/network.py:
--------------------------------------------------------------------------------
1 | # third party
2 | import pandas
3 |
4 |
5 | class Network:
6 | METADATA_URL_TEMPLATE = \
7 | 'https://healthdata.gov/api/views/%s/rows.csv'
8 |
9 | def fetch_metadata_for_dataset(dataset_id, logger=False):
10 | """Download and return metadata.
11 |
12 | Parameters
13 | ----------
14 | dataset_id : str
15 | healthdata.gov dataset identifier of the dataset.
16 | logger : structlog.Logger [optional; default False]
17 | Logger to receive messages.
18 |
19 | Returns
20 | -------
21 | object
22 | The metadata object.
23 | """
24 | url = Network.METADATA_URL_TEMPLATE % dataset_id
25 | if logger:
26 | logger.info('fetching metadata', url=url)
27 | df = Network.fetch_dataset(url)
28 | df["Update Date"] = pandas.to_datetime(df["Update Date"])
29 | df.sort_values("Update Date", inplace=True)
30 | df.set_index("Update Date", inplace=True)
31 | return df
32 |
33 | def fetch_dataset(url, pandas_impl=pandas, logger=False):
34 | """Download and return a dataset.
35 |
36 | Type inference is disabled in favor of explicit type casting at the
37 | database abstraction layer. Pandas behavior is to represent non-missing
38 | values as strings and missing values as `math.nan`.
39 |
40 | Parameters
41 | ----------
42 | url : str
43 | URL to the dataset in CSV format.
44 | logger : structlog.Logger [optional; default False]
45 | Logger to receive messages.
46 |
47 | Returns
48 | -------
49 | pandas.DataFrame
50 | The dataset.
51 | """
52 | if logger:
53 | logger.info('fetching dataset', url=url)
54 | return pandas_impl.read_csv(url, dtype=str)
55 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/common/test_utils.py:
--------------------------------------------------------------------------------
1 | """Utility functions only used in tests.
2 |
3 | This code is not used in production.
4 |
5 | The functions in this file are used by both unit and integration tests.
6 | However, unit tests can't import code that lives in integration tests, and vice
7 | versa. As a result, common test code has to live under the top-level `/src`
8 | dir, hence the existence of this file.
9 | """
10 |
11 | # standard library
12 | from pathlib import Path
13 |
14 | # third party
15 | import pandas
16 |
17 |
18 | class UnitTestUtils:
19 |
20 | # path to `covid_hosp` test data, relative to the top of the repo
21 | PATH_TO_TESTDATA = 'testdata/acquisition/covid_hosp'
22 |
23 | def __init__(self, abs_path_to_caller):
24 | # navigate to the root of the delphi-epidata repo
25 | dataset_name = None
26 | current_path = Path(abs_path_to_caller)
27 | while not (current_path / 'testdata').exists():
28 |
29 | # bail if we made it all the way to root
30 | if not current_path.name:
31 | raise Exception('unable to determine path to delphi-epidata repo')
32 |
33 | # looking for a path like .../acquisition/covid_hosp/
34 | if current_path.parent.name == 'covid_hosp':
35 | dataset_name = current_path.name
36 |
37 | # move up one level
38 | current_path = current_path.parent
39 |
40 | # the loop above stops at the top of the repo
41 | path_to_repo = current_path
42 |
43 | if not dataset_name:
44 | raise Exception('unable to determine name of dataset under test')
45 |
46 | # path dataset-specific test data, relative to the root of the repo
47 | self.data_dir = (
48 | path_to_repo / UnitTestUtils.PATH_TO_TESTDATA / dataset_name
49 | ).resolve()
50 |
51 | def load_sample_metadata(self, metadata_name='metadata.csv'):
52 | df = pandas.read_csv(self.data_dir / metadata_name, dtype=str)
53 | df["Update Date"] = pandas.to_datetime(df["Update Date"])
54 | df.sort_values("Update Date", inplace=True)
55 | df.set_index("Update Date", inplace=True)
56 | return df
57 |
58 | def load_sample_dataset(self, dataset_name='dataset.csv'):
59 | return pandas.read_csv(self.data_dir / dataset_name, dtype=str)
60 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/facility/README.md:
--------------------------------------------------------------------------------
1 | # COVID-19 Reported Patient Impact and Hospital Capacity by Facility
2 |
3 | - Data source:
4 | https://healthdata.gov/dataset/covid-19-reported-patient-impact-and-hospital-capacity-facility
5 | - Data dictionary:
6 | https://healthdata.gov/covid-19-reported-patient-impact-and-hospital-capacity-facility-data-dictionary
7 | - Geographic resolution: healthcare facility (address, city, zip, fips)
8 | - Temporal resolution: weekly (Friday -- Thursday)
9 | - First week: 2020-07-31
10 | - First issue: 2020-12-08
11 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/facility/network.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.acquisition.covid_hosp.common.network import Network as BaseNetwork
3 |
4 |
5 | class Network(BaseNetwork):
6 |
7 | DATASET_ID = 'anag-cw7u'
8 | METADATA_ID = 'j4ip-wfsv'
9 |
10 | def fetch_metadata(*args, **kwags):
11 | """Download and return metadata.
12 |
13 | See `fetch_metadata_for_dataset`.
14 | """
15 |
16 | return Network.fetch_metadata_for_dataset(
17 | *args, **kwags, dataset_id=Network.METADATA_ID)
18 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/facility/update.py:
--------------------------------------------------------------------------------
1 | """
2 | Acquires the "COVID-19 Reported Patient Impact and Hospital Capacity by
3 | Facility" dataset provided by the US Department of Health & Human Services via
4 | healthdata.gov.
5 | """
6 |
7 | # first party
8 | from delphi.epidata.acquisition.covid_hosp.common.utils import Utils
9 | from delphi.epidata.acquisition.covid_hosp.facility.database import Database
10 | from delphi.epidata.acquisition.covid_hosp.facility.network import Network
11 |
12 |
13 | class Update:
14 |
15 | def run(network=Network):
16 | """Acquire the most recent dataset, unless it was previously acquired.
17 |
18 | Returns
19 | -------
20 | bool
21 | Whether a new dataset was acquired.
22 | """
23 |
24 | return Utils.update_dataset(Database, network)
25 |
26 |
27 | # main entry point
28 | Utils.launch_if_main(Update.run, __name__)
29 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_daily/README.md:
--------------------------------------------------------------------------------
1 | # COVID-19 Reported Patient Impact and Hospital Capacity by State (Daily Snapshots)
2 |
3 | - Data source:
4 | https://healthdata.gov/dataset/covid-19-reported-patient-impact-and-hospital-capacity-state
5 | - Data dictionary:
6 | https://healthdata.gov/covid-19-reported-patient-impact-and-hospital-capacity-state-data-dictionary
7 | - Geographic resolution: US States plus DC, VI, and PR
8 | - Temporal resolution: daily
9 | - First date: 2020-07-20
10 | - First issue: 2020-07-20
11 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_daily/network.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.acquisition.covid_hosp.common.network import Network as BaseNetwork
3 |
4 | class Network(BaseNetwork):
5 |
6 | DATASET_ID = '6xf2-c3ie'
7 | METADATA_ID = '4cnb-m4rz'
8 |
9 | @staticmethod
10 | def fetch_metadata(*args, **kwags):
11 | """Download and return metadata.
12 |
13 | See `fetch_metadata_for_dataset`.
14 | """
15 |
16 | return Network.fetch_metadata_for_dataset(
17 | *args, **kwags, dataset_id=Network.METADATA_ID)
18 |
19 | @staticmethod
20 | def fetch_revisions(metadata, newer_than):
21 | """
22 | Extract all dataset URLs from metadata for issues after newer_than.
23 |
24 | Parameters
25 | ----------
26 | metadata DataFrame
27 | Metadata DF containing all rows of metadata from data source page.
28 |
29 | newer_than Timestamp or datetime
30 | Date and time of issue to use as lower bound for new URLs.
31 |
32 | Returns
33 | -------
34 | List of URLs of issues after newer_than
35 | """
36 | return list(metadata.loc[metadata.index > newer_than, "Archive Link"])
37 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_daily/update.py:
--------------------------------------------------------------------------------
1 | """
2 | Acquires the "COVID-19 Reported Patient Impact and Hospital Capacity by State"
3 | dataset provided by the US Department of Health & Human Services
4 | via healthdata.gov.
5 | """
6 | # first party
7 | from delphi.epidata.acquisition.covid_hosp.common.utils import Utils
8 | from delphi.epidata.acquisition.covid_hosp.state_daily.database import Database
9 | from delphi.epidata.acquisition.covid_hosp.state_daily.network import Network
10 |
11 |
12 | class Update:
13 |
14 | @staticmethod
15 | def run(network=Network):
16 | """Acquire the most recent dataset, unless it was previously acquired.
17 |
18 | Returns
19 | -------
20 | bool
21 | Whether a new dataset was acquired.
22 | """
23 |
24 | return Utils.update_dataset(Database, network)
25 |
26 |
27 | # main entry point
28 | Utils.launch_if_main(Update.run, __name__)
29 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_timeseries/README.md:
--------------------------------------------------------------------------------
1 | # COVID-19 Reported Patient Impact and Hospital Capacity by State Timeseries
2 |
3 | - Data source:
4 | https://healthdata.gov/dataset/covid-19-reported-patient-impact-and-hospital-capacity-state-timeseries
5 | - Data dictionary:
6 | https://healthdata.gov/covid-19-reported-patient-impact-and-hospital-capacity-state-data-dictionary
7 | - Geographic resolution: US States plus DC, VI, and PR
8 | - Temporal resolution: daily
9 | - First date: 2020-01-01
10 | - First issue: 2020-11-16
11 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_timeseries/network.py:
--------------------------------------------------------------------------------
1 | # first party
2 | from delphi.epidata.acquisition.covid_hosp.common.network import Network as BaseNetwork
3 |
4 |
5 | class Network(BaseNetwork):
6 |
7 | DATASET_ID = 'g62h-syeh'
8 | METADATA_ID = 'qqte-vkut'
9 |
10 | def fetch_metadata(*args, **kwags):
11 | """Download and return metadata.
12 |
13 | See `fetch_metadata_for_dataset`.
14 | """
15 |
16 | return Network.fetch_metadata_for_dataset(
17 | *args, **kwags, dataset_id=Network.METADATA_ID)
18 |
--------------------------------------------------------------------------------
/src/acquisition/covid_hosp/state_timeseries/update.py:
--------------------------------------------------------------------------------
1 | """
2 | Acquires the "COVID-19 Reported Patient Impact and Hospital Capacity by State
3 | Timeseries" dataset provided by the US Department of Health & Human Services
4 | via healthdata.gov.
5 | """
6 |
7 | # first party
8 | from delphi.epidata.acquisition.covid_hosp.common.utils import Utils
9 | from delphi.epidata.acquisition.covid_hosp.state_timeseries.database import Database
10 | from delphi.epidata.acquisition.covid_hosp.state_timeseries.network import Network
11 |
12 |
13 | class Update:
14 |
15 | def run(network=Network):
16 | """Acquire the most recent dataset, unless it was previously acquired.
17 |
18 | Returns
19 | -------
20 | bool
21 | Whether a new dataset was acquired.
22 | """
23 |
24 | return Utils.update_dataset(Database, network)
25 |
26 |
27 | # main entry point
28 | Utils.launch_if_main(Update.run, __name__)
29 |
--------------------------------------------------------------------------------
/src/acquisition/wiki/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/src/acquisition/wiki/__init__.py
--------------------------------------------------------------------------------
/src/acquisition/wiki/create_wiki_meta_table.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE `wiki_meta`(
2 | `id` INT(11) NOT NULL PRIMARY KEY AUTO_INCREMENT,
3 | `datetime` DATETIME NOT NULL ,
4 | `date` date NOT NULL ,
5 | `epiweek` INT(11) NOT NULL ,
6 | `total` INT(11) NOT NULL ,
7 | UNIQUE KEY `datetime` (`datetime`)
8 | );
9 |
10 | # Add a column `language` to the wiki_meta table
11 | ALTER TABLE `wiki_meta`
12 | ADD `language` CHAR(2) NOT NULL DEFAULT 'en';
13 |
14 | # Another step is to update the UNIQUE KEY
15 | ALTER TABLE `wiki_meta`
16 | DROP INDEX `datetime`,
17 | ADD UNIQUE KEY `datetime` (`datetime`, `language`);
18 |
19 |
--------------------------------------------------------------------------------
/src/acquisition/wiki/create_wiki_raw_table.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE `wiki_raw` (
2 | `id` int(11) NOT NULL PRIMARY KEY AUTO_INCREMENT,
3 | `name` varchar(64) NOT NULL,
4 | `hash` char(32) NOT NULL,
5 | `status` int(11) NOT NULL DEFAULT '0',
6 | `size` int(11) DEFAULT NULL,
7 | `datetime` datetime DEFAULT NULL,
8 | `worker` varchar(256) DEFAULT NULL,
9 | `elapsed` float DEFAULT NULL,
10 | `data` varchar(2048) DEFAULT NULL,
11 | UNIQUE KEY `name` (`name`),
12 | KEY `status` (`status`)
13 | );
14 |
15 | # Alter the column type because we need larger space as we extract more articles
16 | ALTER TABLE `wiki_raw` MODIFY COLUMN `data` varchar(4096);
17 |
18 |
--------------------------------------------------------------------------------
/src/acquisition/wiki/create_wiki_table.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE `wiki`(
2 | `id` INT(11) NOT NULL PRIMARY KEY AUTO_INCREMENT,
3 | `datetime` DATETIME NOT NULL ,
4 | `article` VARCHAR(64) NOT NULL ,
5 | `count` INT(11) NOT NULL ,
6 | UNIQUE KEY `datetime` (`datetime`, `article`),
7 | KEY `datetime_2` (`datetime`),
8 | KEY `article` (`article`)
9 | );
10 |
11 | # Add a column `language` to the wiki table
12 | ALTER TABLE `wiki`
13 | ADD `language` CHAR(2) NOT NULL DEFAULT 'en';
14 |
15 | # Another step is to update the UNIQUE KEY
16 | ALTER TABLE `wiki`
17 | DROP INDEX `datetime`,
18 | ADD UNIQUE KEY `datetime` (`datetime`, `article`, `language`);
19 |
--------------------------------------------------------------------------------
/src/client/packaging/npm/.gitignore:
--------------------------------------------------------------------------------
1 | /delphi_epidata.*
2 | /node_modules
3 | /*.tgz
--------------------------------------------------------------------------------
/src/client/packaging/npm/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2018 The Delphi Group at Carnegie Mellon University
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/src/client/packaging/npm/README.md:
--------------------------------------------------------------------------------
1 | # Delphi Epidata API Client
2 |
3 | This package provides a programmatic interface to
4 | [Delphi](https://delphi.cmu.edu/)'s epidemiological data ("epidata")
5 | API. Source code and usage information can be found at
6 | [https://github.com/cmu-delphi/delphi-epidata](https://github.com/cmu-delphi/delphi-epidata).
7 |
--------------------------------------------------------------------------------
/src/client/packaging/npm/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "delphi_epidata",
3 | "description": "Delphi Epidata API Client",
4 | "authors": "Delphi Group",
5 | "version": "4.1.33",
6 | "license": "MIT",
7 | "homepage": "https://github.com/cmu-delphi/delphi-epidata",
8 | "bugs": {
9 | "url": "https://github.com/cmu-delphi/delphi-epidata/issues"
10 | },
11 | "repository": {
12 | "type": "git",
13 | "url": "https://github.com/cmu-delphi/delphi-epidata.git"
14 | },
15 | "main": "delphi_epidata.js",
16 | "types": "delphi_epidata.d.ts",
17 | "browser": "delphi_epidata.js",
18 | "files": [
19 | "delphi_epidata.js",
20 | "delphi_epidata.d.ts"
21 | ],
22 | "scripts": {
23 | "prepack": "npx shx cp -f ../../delphi_epidata.js ../../delphi_epidata.d.ts ./",
24 | "test": "npm run prepack && jest"
25 | },
26 | "dependencies": {
27 | "cross-fetch": "^3.1.4"
28 | },
29 | "devDependencies": {
30 | "jest": "^27",
31 | "shx": "^0.3.3"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/client/packaging/npm/tests/__snapshots__/delphi_epidata.spec.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`alternative API endpoints new 1`] = `
4 | [MockFunction] {
5 | "calls": Array [
6 | Array [
7 | "http://test.com/fluview/?regions=a&epiweeks=4",
8 | ],
9 | ],
10 | "results": Array [
11 | Object {
12 | "type": "return",
13 | "value": Promise {},
14 | },
15 | ],
16 | }
17 | `;
18 |
19 | exports[`alternative API endpoints php 1`] = `
20 | [MockFunction] {
21 | "calls": Array [
22 | Array [
23 | "http://test.com/api.php?endpoint=fluview®ions=a&epiweeks=4",
24 | ],
25 | ],
26 | "results": Array [
27 | Object {
28 | "type": "return",
29 | "value": Promise {},
30 | },
31 | ],
32 | }
33 | `;
34 |
35 | exports[`fluview basic async 1`] = `
36 | [MockFunction] {
37 | "calls": Array [
38 | Array [
39 | "https://api.delphi.cmu.edu/epidata/fluview/?regions=a&epiweeks=4",
40 | ],
41 | ],
42 | "results": Array [
43 | Object {
44 | "type": "return",
45 | "value": Promise {},
46 | },
47 | ],
48 | }
49 | `;
50 |
51 | exports[`fluview basic sync 1`] = `
52 | [MockFunction] {
53 | "calls": Array [
54 | Array [
55 | "https://api.delphi.cmu.edu/epidata/fluview/?regions=a&epiweeks=4",
56 | ],
57 | ],
58 | "results": Array [
59 | Object {
60 | "type": "return",
61 | "value": Promise {},
62 | },
63 | ],
64 | }
65 | `;
66 |
--------------------------------------------------------------------------------
/src/client/packaging/pypi/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 4.1.25
3 | commit = False
4 | tag = False
5 |
6 | [bumpversion:file:../../delphi_epidata.py]
7 |
8 | [bumpversion:file:pyproject.toml]
9 |
--------------------------------------------------------------------------------
/src/client/packaging/pypi/.gitignore:
--------------------------------------------------------------------------------
1 | delphi_epidata/*
2 | !delphi_epidata/__init__.py
3 | .eggs
4 | /build
5 | /dist
6 | /*.egg-info
--------------------------------------------------------------------------------
/src/client/packaging/pypi/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2018 The Delphi Group at Carnegie Mellon University
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/src/client/packaging/pypi/README.md:
--------------------------------------------------------------------------------
1 | # Delphi Epidata API Client
2 |
3 | This package provides a programmatic interface to
4 | [Delphi](https://delphi.cmu.edu/)'s epidemiological data ("epidata")
5 | API. Source code and usage information can be found at
6 | [https://github.com/cmu-delphi/delphi-epidata](https://github.com/cmu-delphi/delphi-epidata).
7 |
--------------------------------------------------------------------------------
/src/client/packaging/pypi/delphi_epidata/__init__.py:
--------------------------------------------------------------------------------
1 | from .delphi_epidata import Epidata, __version__
2 |
3 | name = "delphi_epidata"
4 |
--------------------------------------------------------------------------------
/src/client/packaging/pypi/pyproject.toml:
--------------------------------------------------------------------------------
1 | # This file was derived from the PyPA Sample Project
2 | # https://github.com/pypa/sampleproject
3 |
4 | # Guide (user-friendly):
5 | # https://packaging.python.org/en/latest/guides/writing-pyproject-toml/
6 |
7 | # Specification (technical, formal):
8 | # https://packaging.python.org/en/latest/specifications/pyproject-toml/
9 |
10 |
11 | # Choosing a build backend:
12 | # https://packaging.python.org/en/latest/tutorials/packaging-projects/#choosing-a-build-backend
13 | [build-system]
14 | # A list of packages that are needed to build your package:
15 | requires = ["setuptools"] # REQUIRED if [build-system] table is used
16 | # The name of the Python object that frontends will use to perform the build:
17 | build-backend = "setuptools.build_meta" # If not defined, then legacy behavior can happen.
18 |
19 | [project]
20 | name = "delphi_epidata" # REQUIRED, is the only field that cannot be marked as dynamic.
21 | version = "4.1.25"
22 | description = "A programmatic interface to Delphi's Epidata API."
23 | readme = "README.md"
24 | license = { file = "LICENSE" }
25 | authors = [{ name = "David Farrow", email = "dfarrow0@gmail.com" }]
26 | maintainers = [
27 | { name = "Delphi Support", email = "delphi-support+pypi@andrew.cmu.edu" },
28 | ]
29 | # For a list of valid classifiers, see https://pypi.org/classifiers/
30 | classifiers = [
31 | "License :: OSI Approved :: MIT License",
32 | "Programming Language :: Python",
33 | "Programming Language :: Python :: 2",
34 | "Programming Language :: Python :: 3",
35 | "Operating System :: OS Independent",
36 | "Intended Audience :: Science/Research",
37 | "Natural Language :: English",
38 | "Topic :: Scientific/Engineering :: Bio-Informatics",
39 | ]
40 | dependencies = ["aiohttp", "requests>=2.7.0", "tenacity"]
41 |
42 | [project.urls]
43 | "Homepage" = "https://github.com/cmu-delphi/delphi-epidata"
44 | "Changelog" = "https://github.com/cmu-delphi/delphi-epidata/blob/main/src/client/packaging/pypi/CHANGELOG.md"
45 |
--------------------------------------------------------------------------------
/src/ddl/api_user.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 |
3 |
4 | -- `api_user` API key and user management
5 |
6 | CREATE TABLE IF NOT EXISTS `api_user` (
7 | `id` int(11) UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
8 | `api_key` varchar(50) UNIQUE NOT NULL,
9 | `email` varchar(320) UNIQUE NOT NULL,
10 | `created` date,
11 | `last_time_used` date,
12 | UNIQUE KEY `api_user` (`api_key`, `email`)
13 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
14 |
15 |
16 | -- `user_role` User roles
17 |
18 | CREATE TABLE IF NOT EXISTS `user_role` (
19 | `id` int(11) UNSIGNED NOT NULL PRIMARY KEY AUTO_INCREMENT,
20 | `name` varchar(50) NOT NULL,
21 | UNIQUE KEY `name` (`name`)
22 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
23 |
24 |
25 | -- `user_role_link` User roles link table
26 |
27 | CREATE TABLE IF NOT EXISTS `user_role_link` (
28 | `user_id` int(11) UNSIGNED NOT NULL,
29 | `role_id` int(11) UNSIGNED NOT NULL,
30 | PRIMARY KEY (`user_id`, `role_id`)
31 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
32 |
--------------------------------------------------------------------------------
/src/ddl/ecdc_ili.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | release_date | date | NO | | NULL | |
12 | | issue | int(11) | NO | MUL | NULL | |
13 | | epiweek | int(11) | NO | | NULL | |
14 | | lag | int(11) | NO | | NULL | |
15 | | region | varchar(30) | NO | | NULL | |
16 | | incidence_rate | double | NO | | NULL | |
17 | +----------------+-------------+------+-----+---------+----------------+
18 | */
19 |
20 | CREATE TABLE `ecdc_ili` (
21 | `id` int(11) NOT NULL AUTO_INCREMENT,
22 | `release_date` date NOT NULL,
23 | `issue` int(11) NOT NULL,
24 | `epiweek` int(11) NOT NULL,
25 | `lag` int(11) NOT NULL,
26 | `region` varchar(30) NOT NULL,
27 | `incidence_rate` double NOT NULL,
28 | PRIMARY KEY (`id`),
29 | UNIQUE KEY `issue` (`issue`,`epiweek`,`region`)
30 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
31 |
--------------------------------------------------------------------------------
/src/ddl/forecasts.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +---------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +---------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | system | varchar(64) | NO | MUL | NULL | |
12 | | epiweek | int(11) | NO | MUL | NULL | |
13 | | json | mediumtext | NO | | NULL | |
14 | +---------+-------------+------+-----+---------+----------------+
15 | */
16 |
17 | CREATE TABLE `forecasts` (
18 | `id` int(11) NOT NULL AUTO_INCREMENT,
19 | `system` varchar(64) NOT NULL,
20 | `epiweek` int(11) NOT NULL,
21 | `json` mediumtext NOT NULL,
22 | PRIMARY KEY (`id`),
23 | UNIQUE KEY `system` (`system`,`epiweek`),
24 | KEY `system_2` (`system`),
25 | KEY `epiweek` (`epiweek`)
26 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
27 |
--------------------------------------------------------------------------------
/src/ddl/gft.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | epiweek | int(11) | NO | MUL | NULL | |
12 | | location | varchar(64) | NO | MUL | NULL | |
13 | | num | int(11) | NO | | NULL | |
14 | +----------+-------------+------+-----+---------+----------------+
15 | */
16 |
17 | CREATE TABLE `gft` (
18 | `id` int(11) NOT NULL AUTO_INCREMENT,
19 | `epiweek` int(11) NOT NULL,
20 | `location` varchar(64) NOT NULL,
21 | `num` int(11) NOT NULL,
22 | PRIMARY KEY (`id`),
23 | UNIQUE KEY `epiweek` (`epiweek`,`location`),
24 | KEY `epiweek_2` (`epiweek`),
25 | KEY `location` (`location`)
26 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
27 |
--------------------------------------------------------------------------------
/src/ddl/ght.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | query | varchar(64) | NO | MUL | NULL | |
12 | | location | varchar(8) | NO | MUL | NULL | |
13 | | epiweek | int(11) | NO | MUL | NULL | |
14 | | value | float | NO | | NULL | |
15 | +----------+-------------+------+-----+---------+----------------+
16 | */
17 |
18 | CREATE TABLE `ght` (
19 | `id` int(11) NOT NULL AUTO_INCREMENT,
20 | `query` varchar(64) NOT NULL,
21 | `location` varchar(8) NOT NULL,
22 | `epiweek` int(11) NOT NULL,
23 | `value` float NOT NULL,
24 | PRIMARY KEY (`id`),
25 | UNIQUE KEY `query` (`query`,`location`,`epiweek`),
26 | KEY `query_2` (`query`),
27 | KEY `location` (`location`),
28 | KEY `epiweek` (`epiweek`)
29 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
30 |
--------------------------------------------------------------------------------
/src/ddl/kcdc_ili.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +--------------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +--------------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | release_date | date | NO | | NULL | |
12 | | issue | int(11) | NO | MUL | NULL | |
13 | | epiweek | int(11) | NO | | NULL | |
14 | | lag | int(11) | NO | | NULL | |
15 | | region | varchar(12) | NO | | NULL | |
16 | | ili | double | NO | | NULL | |
17 | +--------------+-------------+------+-----+---------+----------------+
18 | */
19 |
20 | CREATE TABLE `kcdc_ili` (
21 | `id` int(11) NOT NULL AUTO_INCREMENT,
22 | `release_date` date NOT NULL,
23 | `issue` int(11) NOT NULL,
24 | `epiweek` int(11) NOT NULL,
25 | `lag` int(11) NOT NULL,
26 | `region` varchar(12) NOT NULL,
27 | `ili` double NOT NULL,
28 | PRIMARY KEY (`id`),
29 | UNIQUE KEY `issue` (`issue`,`epiweek`,`region`)
30 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
31 |
--------------------------------------------------------------------------------
/src/ddl/migrations/covid_hosp_facility_v0.2-v0.3.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE covid_hosp_facility ADD (
2 | `geocoded_hospital_address` VARCHAR(32),
3 | `hhs_ids` VARCHAR(127),
4 | `is_corrected` BOOLEAN,
5 | `previous_day_admission_adult_covid_confirmed_7_day_coverage` INT,
6 | `previous_day_admission_adult_covid_suspected_7_day_coverage` INT,
7 | `previous_day_admission_pediatric_covid_confirmed_7_day_coverage` INT,
8 | `previous_day_admission_pediatric_covid_suspected_7_day_coverage` INT,
9 | `previous_week_patients_covid_vaccinated_doses_all_7_day` INT,
10 | `previous_week_patients_covid_vaccinated_doses_all_7_day_sum` INT,
11 | `previous_week_patients_covid_vaccinated_doses_one_7_day` INT,
12 | `previous_week_patients_covid_vaccinated_doses_one_7_day_sum` INT,
13 | `previous_week_personnel_covid_vaccd_doses_administered_7_day` INT,
14 | `previous_week_personnel_covid_vaccd_doses_administered_7_day_sum` INT,
15 | `total_personnel_covid_vaccinated_doses_all_7_day` INT,
16 | `total_personnel_covid_vaccinated_doses_all_7_day_sum` INT,
17 | `total_personnel_covid_vaccinated_doses_none_7_day` INT,
18 | `total_personnel_covid_vaccinated_doses_none_7_day_sum` INT,
19 | `total_personnel_covid_vaccinated_doses_one_7_day` INT,
20 | `total_personnel_covid_vaccinated_doses_one_7_day_sum` INT);
21 |
--------------------------------------------------------------------------------
/src/ddl/migrations/covid_hosp_meta_v0.4.4-v0.4.5.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE covid_hosp_meta ADD COLUMN hhs_dataset_id CHAR(9) NOT NULL DEFAULT "????-????";
2 | UPDATE covid_hosp_meta SET hhs_dataset_id="g62h-syeh" WHERE revision_timestamp LIKE "%g62h-syeh%";
3 | UPDATE covid_hosp_meta SET hhs_dataset_id="6xf2-c3ie" WHERE revision_timestamp LIKE "%6xf2-c3ie%";
4 | UPDATE covid_hosp_meta SET hhs_dataset_id="anag-cw7u" WHERE revision_timestamp LIKE "%anag-cw7u%";
5 |
--------------------------------------------------------------------------------
/src/ddl/nowcasts.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | epiweek | int(11) | NO | MUL | NULL | |
12 | | location | varchar(12) | YES | MUL | NULL | |
13 | | value | float | NO | | NULL | |
14 | | std | float | NO | | NULL | |
15 | +----------+-------------+------+-----+---------+----------------+
16 | */
17 |
18 | CREATE TABLE `nowcasts` (
19 | `id` int(11) NOT NULL AUTO_INCREMENT,
20 | `epiweek` int(11) NOT NULL,
21 | `location` varchar(12) DEFAULT NULL,
22 | `value` float NOT NULL,
23 | `std` float NOT NULL,
24 | PRIMARY KEY (`id`),
25 | UNIQUE KEY `epiweek` (`epiweek`,`location`),
26 | KEY `epiweek_2` (`epiweek`),
27 | KEY `location` (`location`)
28 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8
29 |
--------------------------------------------------------------------------------
/src/ddl/paho_dengue.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | release_date | date | NO | | NULL | |
12 | | issue | int(11) | NO | MUL | NULL | |
13 | | epiweek | int(11) | NO | | NULL | |
14 | | lag | int(11) | NO | | NULL | |
15 | | region | varchar(12) | NO | | NULL | |
16 | | total_pop | int(11) | NO | | NULL | |
17 | | serotype | varchar(12) | NO | | NULL | |
18 | | num_dengue | int(11) | NO | | NULL | |
19 | | incidence_rate | double | NO | | NULL | |
20 | | num_severe | int(11) | NO | | NULL | |
21 | | num_deaths | int(11) | NO | | NULL | |
22 | +----------------+-------------+------+-----+---------+----------------+
23 | */
24 |
25 | CREATE TABLE `paho_dengue` (
26 | `id` int(11) NOT NULL AUTO_INCREMENT,
27 | `release_date` date NOT NULL,
28 | `issue` int(11) NOT NULL,
29 | `epiweek` int(11) NOT NULL,
30 | `lag` int(11) NOT NULL,
31 | `region` varchar(12) NOT NULL,
32 | `total_pop` int(11) NOT NULL,
33 | `serotype` varchar(12) NOT NULL,
34 | `num_dengue` int(11) NOT NULL,
35 | `incidence_rate` double NOT NULL,
36 | `num_severe` int(11) NOT NULL,
37 | `num_deaths` int(11) NOT NULL,
38 | PRIMARY KEY (`id`),
39 | UNIQUE KEY `issue` (`issue`,`epiweek`,`region`)
40 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
41 |
--------------------------------------------------------------------------------
/src/ddl/quidel.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +-------------+------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +-------------+------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | location | varchar(8) | NO | MUL | NULL | |
12 | | epiweek | int(11) | NO | MUL | NULL | |
13 | | value | float | NO | | NULL | |
14 | | num_rows | int(11) | NO | | NULL | |
15 | | num_devices | int(11) | NO | | NULL | |
16 | +-------------+------------+------+-----+---------+----------------+
17 | */
18 |
19 | CREATE TABLE `quidel` (
20 | `id` int(11) NOT NULL AUTO_INCREMENT,
21 | `location` varchar(8) NOT NULL,
22 | `epiweek` int(11) NOT NULL,
23 | `value` float NOT NULL,
24 | `num_rows` int(11) NOT NULL,
25 | `num_devices` int(11) NOT NULL,
26 | PRIMARY KEY (`id`),
27 | UNIQUE KEY `ew_loc` (`epiweek`,`location`),
28 | KEY `ew` (`epiweek`),
29 | KEY `loc` (`location`)
30 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
31 |
--------------------------------------------------------------------------------
/src/ddl/sensors.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +----------+-------------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +----------+-------------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | name | varchar(8) | NO | MUL | NULL | |
12 | | epiweek | int(11) | NO | MUL | NULL | |
13 | | location | varchar(12) | YES | MUL | NULL | |
14 | | value | float | NO | | NULL | |
15 | +----------+-------------+------+-----+---------+----------------+
16 | */
17 |
18 | CREATE TABLE `sensors` (
19 | `id` int(11) NOT NULL AUTO_INCREMENT,
20 | `name` varchar(8) NOT NULL,
21 | `epiweek` int(11) NOT NULL,
22 | `location` varchar(12) DEFAULT NULL,
23 | `value` float NOT NULL,
24 | PRIMARY KEY (`id`),
25 | UNIQUE KEY `name` (`name`,`epiweek`,`location`),
26 | KEY `name_2` (`name`),
27 | KEY `epiweek` (`epiweek`),
28 | KEY `location` (`location`)
29 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
30 |
--------------------------------------------------------------------------------
/src/ddl/twitter.sql:
--------------------------------------------------------------------------------
1 | USE epidata;
2 | /*
3 | TODO: document
4 | */
5 |
6 | /*
7 | +-------+---------+------+-----+---------+----------------+
8 | | Field | Type | Null | Key | Default | Extra |
9 | +-------+---------+------+-----+---------+----------------+
10 | | id | int(11) | NO | PRI | NULL | auto_increment |
11 | | date | date | NO | MUL | NULL | |
12 | | state | char(2) | NO | MUL | NULL | |
13 | | num | int(11) | NO | | NULL | |
14 | | total | int(11) | NO | | NULL | |
15 | +-------+---------+------+-----+---------+----------------+
16 | */
17 |
18 | CREATE TABLE `twitter` (
19 | `id` int(11) NOT NULL AUTO_INCREMENT,
20 | `date` date NOT NULL,
21 | `state` char(2) NOT NULL,
22 | `num` int(11) NOT NULL,
23 | `total` int(11) NOT NULL,
24 | PRIMARY KEY (`id`),
25 | UNIQUE KEY `date` (`date`,`state`),
26 | KEY `date_2` (`date`),
27 | KEY `state` (`state`)
28 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
29 |
--------------------------------------------------------------------------------
/src/ddl/v4_schema_aliases.sql:
--------------------------------------------------------------------------------
1 | -- ----------------------------------
2 | -- NOTE: this file ("v4_schema_aliases.sql") is deliberately named to be ordering-sensitive:
3 | -- it must be executed *AFTER* "v4_schema.sql" to ensure referenced tables exist.
4 | -- NOTE: v4-related db schema name change from `epidata` to `covid` is only implemented in acquisition code.
5 | -- frontend api code still uses `epidata` but has these relevant tables/views "aliased" to use covid.blah when referred to as epidata.blah in context.
6 | -- ----------------------------------
7 |
8 | CREATE VIEW `epidata`.`epimetric_full_v` AS SELECT * FROM `covid`.`epimetric_full_v`;
9 | CREATE VIEW `epidata`.`epimetric_latest_v` AS SELECT * FROM `covid`.`epimetric_latest_v`;
10 | CREATE VIEW `epidata`.`covidcast_meta_cache` AS SELECT * FROM `covid`.`covidcast_meta_cache`;
11 | CREATE VIEW `epidata`.`coverage_crossref_v` AS SELECT * FROM `covid`.`coverage_crossref_v`;
12 |
--------------------------------------------------------------------------------
/src/maintenance/coverage_crossref_updater.py:
--------------------------------------------------------------------------------
1 | """Updates the table for the `coverage_crossref` endpoint."""
2 |
3 | import time
4 |
5 | from delphi.epidata.acquisition.covidcast.database import Database
6 | from delphi_utils import get_structured_logger
7 |
8 |
9 | def main():
10 | """Updates the table for the `coverage_crossref`."""
11 |
12 | logger = get_structured_logger("coverage_crossref_updater")
13 | start_time = time.time()
14 | database = Database()
15 | database.connect()
16 |
17 | # compute and update coverage_crossref
18 | try:
19 | coverage = database.compute_coverage_crossref()
20 | finally:
21 | # clean up in success and in failure
22 | database.disconnect(True)
23 |
24 | logger.info(f"coverage_crossref returned: {coverage}")
25 |
26 | logger.info(
27 | "Generated and updated covidcast geo/signal coverage",
28 | total_runtime_in_seconds=round(time.time() - start_time, 2))
29 | return True
30 |
31 |
32 | if __name__ == '__main__':
33 | main()
34 |
--------------------------------------------------------------------------------
/src/maintenance/delete_batch.py:
--------------------------------------------------------------------------------
1 | """Deletes large numbers of rows from covidcast based on a CSV"""
2 |
3 | # standard library
4 | import argparse
5 | import glob
6 | import os
7 | import time
8 |
9 | # first party
10 | from delphi.epidata.acquisition.covidcast.database import Database
11 | from delphi_utils import get_structured_logger
12 |
13 |
14 | def get_argument_parser():
15 | """Define command line arguments."""
16 |
17 | parser = argparse.ArgumentParser()
18 | parser.add_argument(
19 | '--deletion_dir',
20 | help='directory where deletion CSVs are stored')
21 | parser.add_argument(
22 | '--log_file',
23 | help="filename for log output (defaults to stdout)")
24 | return parser
25 |
26 | def handle_file(deletion_file, database, logger):
27 | logger.info("Deleting from csv file", filename=deletion_file)
28 | rows = []
29 | with open(deletion_file) as f:
30 | for line in f:
31 | fields = line.strip().split(",")
32 | if len(fields) < 9: continue
33 | rows.append(fields + ["day"])
34 | rows = rows[1:]
35 | try:
36 | n = database.delete_batch(rows)
37 | logger.info("Deleted database rows", row_count=n)
38 | return n
39 | except Exception as e:
40 | logger.exception('Exception while deleting rows', exception=e)
41 | database.rollback()
42 | return 0
43 |
44 | def main(args):
45 | """Delete rows from covidcast."""
46 |
47 | logger = get_structured_logger("csv_deletion", filename=args.log_file)
48 | start_time = time.time()
49 | database = Database()
50 | database.connect()
51 | all_n = 0
52 |
53 | try:
54 | for deletion_file in sorted(glob.glob(os.path.join(args.deletion_dir, '*.csv'))):
55 | n = handle_file(deletion_file, database, logger)
56 | if n is not None:
57 | all_n += n
58 | else:
59 | all_n = "rowcount unsupported"
60 | finally:
61 | database.disconnect(True)
62 |
63 | logger.info(
64 | "Deleted CSVs from database",
65 | total_runtime_in_seconds=round(time.time() - start_time, 2), row_count=all_n)
66 |
67 | if __name__ == '__main__':
68 | main(get_argument_parser().parse_args())
69 |
--------------------------------------------------------------------------------
/src/maintenance/update_last_usage.py:
--------------------------------------------------------------------------------
1 | import os
2 | from datetime import datetime as dtime
3 |
4 | import delphi.operations.secrets as secrets
5 | import mysql.connector
6 | import redis
7 |
8 | REDIS_HOST = os.environ.get("REDIS_HOST", "delphi_redis")
9 | REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD", "1234")
10 | LAST_USED_KEY_PATTERN = "*LAST_USED*"
11 |
12 |
13 | def main():
14 | redis_cli = redis.Redis(host=REDIS_HOST, password=REDIS_PASSWORD, decode_responses=True)
15 | u, p = secrets.db.epi
16 | cnx = mysql.connector.connect(database="epidata", user=u, password=p, host=secrets.db.host)
17 | cur = cnx.cursor()
18 |
19 | redis_keys = redis_cli.keys(pattern=LAST_USED_KEY_PATTERN)
20 | today_date = dtime.today().date()
21 | for key in redis_keys:
22 | api_key, last_time_used = str(key).split("/")[1], dtime.strptime(str(redis_cli.get(key)), "%Y-%m-%d").date()
23 | cur.execute(
24 | f"""
25 | UPDATE
26 | api_user
27 | SET last_time_used = "{last_time_used}"
28 | WHERE api_key = "{api_key}" AND (last_time_used < "{last_time_used}" OR last_time_used IS NULL)
29 | """
30 | )
31 | redis_cli.delete(key)
32 | cur.close()
33 | cnx.commit()
34 | cnx.close()
35 |
36 |
37 | if __name__ == "__main__":
38 | main()
39 |
--------------------------------------------------------------------------------
/src/server/.htaccess:
--------------------------------------------------------------------------------
1 | # Open up the API to all origins.
2 | Header set Access-Control-Allow-Origin "*"
3 |
4 | # Compress API responses
5 |
6 | AddOutputFilterByType DEFLATE application/json text/csv
7 |
8 |
9 | AddOutputFilterByType BROTLI_COMPRESS application/json text/csv
10 |
11 |
12 | # Allow brief caching of API responses
13 |
14 | # ExpiresActive on
15 | # ExpiresByType application/json "access plus 2 hours"
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/server/_exceptions.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable, Optional
2 | from flask import make_response, request
3 | from flask.json import dumps
4 | from werkzeug.exceptions import HTTPException
5 |
6 |
7 | def _is_using_status_codes() -> bool:
8 | # classic and tree are old school
9 | return request.values.get("format", "classic") not in ["classic", "tree"]
10 |
11 |
12 | class EpiDataException(HTTPException):
13 | def __init__(self, message: str, status_code: int = 500):
14 | super(EpiDataException, self).__init__(message)
15 | self.code = status_code if _is_using_status_codes() else 200
16 | self.response = make_response(
17 | dumps(dict(result=-1, message=message, epidata=[])),
18 | self.code,
19 | )
20 | self.response.mimetype = "application/json"
21 |
22 |
23 | class MissingOrWrongSourceException(EpiDataException):
24 | def __init__(self, endpoints: Iterable[str]):
25 | super(MissingOrWrongSourceException, self).__init__(f"no data source specified, possible values: {','.join(endpoints)}", 400)
26 |
27 |
28 | class ValidationFailedException(EpiDataException):
29 | def __init__(self, message: str):
30 | super(ValidationFailedException, self).__init__(message, 400)
31 |
32 |
33 | class DatabaseErrorException(EpiDataException):
34 | def __init__(self, details: Optional[str] = None):
35 | msg = "database error"
36 | if details:
37 | msg = f"{msg}: {details}"
38 | super(DatabaseErrorException, self).__init__(msg, 500)
39 |
--------------------------------------------------------------------------------
/src/server/_pandas.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Any, Optional
2 | import pandas as pd
3 |
4 | from flask import request
5 | from sqlalchemy import text
6 | from sqlalchemy.engine.base import Engine
7 |
8 | from ._common import engine
9 | from ._config import MAX_RESULTS
10 | from ._printer import create_printer
11 | from ._query import filter_fields, limit_query
12 | from ._exceptions import DatabaseErrorException
13 |
14 |
15 | def as_pandas(query: str, params: Dict[str, Any], db_engine: Engine = engine, parse_dates: Optional[Dict[str, str]] = None, limit_rows = MAX_RESULTS+1) -> pd.DataFrame:
16 | try:
17 | query = limit_query(query, limit_rows)
18 | return pd.read_sql_query(text(str(query)), db_engine, params=params, parse_dates=parse_dates)
19 | except Exception as e:
20 | raise DatabaseErrorException(str(e))
21 |
22 |
23 | def print_pandas(df: pd.DataFrame):
24 | p = create_printer(request.values.get("format"))
25 |
26 | def gen():
27 | for row in df.to_dict(orient="records"):
28 | yield row
29 |
30 | return p(filter_fields(gen()))
31 |
--------------------------------------------------------------------------------
/src/server/_validate.py:
--------------------------------------------------------------------------------
1 | from flask import Request
2 |
3 | from ._exceptions import ValidationFailedException
4 |
5 |
6 | def require_all(request: Request, *values: str) -> bool:
7 | """
8 | returns true if all fields are present in the request otherwise raises an exception
9 | :returns bool
10 | """
11 | for value in values:
12 | if not request.values.get(value):
13 | raise ValidationFailedException(f"missing parameter: need [{', '.join(values)}]")
14 | return True
15 |
16 |
17 | def require_any(request: Request, *values: str, empty=False) -> bool:
18 | """
19 | returns true if any fields are present in the request otherwise raises an exception
20 | :returns bool
21 | """
22 | for value in values:
23 | if request.values.get(value) or (empty and value in request.values):
24 | return True
25 | raise ValidationFailedException(f"missing parameter: need one of [{', '.join(values)}]")
26 |
--------------------------------------------------------------------------------
/src/server/admin/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cmu-delphi/delphi-epidata/0fd0c7dff9f3e8bd54e5dee3f557c4e06f9e919e/src/server/admin/__init__.py
--------------------------------------------------------------------------------
/src/server/covidcast_issues_migration/.gitignore:
--------------------------------------------------------------------------------
1 | *.sql.gz
2 | *.sql
3 | *.csv
4 | ./tmp
5 | ./out
6 |
--------------------------------------------------------------------------------
/src/server/covidcast_issues_migration/requirements.txt:
--------------------------------------------------------------------------------
1 | pandas==1.0.3
2 | multiprocessing-logging==0.3.1
3 |
--------------------------------------------------------------------------------
/src/server/endpoints/__init__.py:
--------------------------------------------------------------------------------
1 | from . import (
2 | cdc,
3 | covid_hosp_facility_lookup,
4 | covid_hosp_facility,
5 | covid_hosp_state_timeseries,
6 | covidcast_meta,
7 | covidcast,
8 | delphi,
9 | dengue_nowcast,
10 | dengue_sensors,
11 | ecdc_ili,
12 | flusurv,
13 | fluview_clinicial,
14 | fluview_meta,
15 | fluview,
16 | gft,
17 | ght,
18 | ilinet,
19 | kcdc_ili,
20 | meta_norostat,
21 | meta,
22 | nidss_dengue,
23 | nidss_flu,
24 | norostat,
25 | nowcast,
26 | paho_dengue,
27 | quidel,
28 | sensors,
29 | twitter,
30 | wiki,
31 | signal_dashboard_status,
32 | signal_dashboard_coverage,
33 | )
34 |
35 | endpoints = [
36 | cdc,
37 | covid_hosp_facility_lookup,
38 | covid_hosp_facility,
39 | covid_hosp_state_timeseries,
40 | covidcast_meta,
41 | covidcast,
42 | delphi,
43 | dengue_nowcast,
44 | dengue_sensors,
45 | ecdc_ili,
46 | flusurv,
47 | fluview_clinicial,
48 | fluview_meta,
49 | fluview,
50 | gft,
51 | ght,
52 | ilinet,
53 | kcdc_ili,
54 | meta_norostat,
55 | meta,
56 | nidss_dengue,
57 | nidss_flu,
58 | norostat,
59 | nowcast,
60 | paho_dengue,
61 | quidel,
62 | sensors,
63 | twitter,
64 | wiki,
65 | signal_dashboard_status,
66 | signal_dashboard_coverage,
67 | ]
68 |
69 | __all__ = ["endpoints"]
70 |
--------------------------------------------------------------------------------
/src/server/endpoints/covid_hosp_facility_lookup.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_any
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("covid_hosp_facility_lookup", __name__)
9 |
10 |
11 | @bp.route("/", methods=("GET", "POST"))
12 | def handle():
13 | require_any(request, "state", "ccn", "city", "zip", "fips_code")
14 | state = extract_strings("state")
15 | ccn = extract_strings("ccn")
16 | city = extract_strings("city")
17 | zip = extract_strings("zip")
18 | fips_code = extract_strings("fips_code")
19 |
20 | # build query
21 | q = QueryBuilder("covid_hosp_facility_key", "c")
22 | q.fields = ", ".join(
23 | [ # NOTE: fields `geocoded_hospital_address` and `hhs_ids` are available but not being provided by this endpoint.
24 | f"{q.alias}.hospital_pk",
25 | f"{q.alias}.state",
26 | f"{q.alias}.ccn",
27 | f"{q.alias}.hospital_name",
28 | f"{q.alias}.address",
29 | f"{q.alias}.city",
30 | f"{q.alias}.zip",
31 | f"{q.alias}.hospital_subtype",
32 | f"{q.alias}.fips_code",
33 | f"{q.alias}.is_metro_micro",
34 | ]
35 | )
36 | # basic query info
37 | q.set_sort_order("hospital_pk")
38 | # build the filter
39 | # these are all fast because the table has indexes on each of these fields
40 | if state:
41 | q.where_strings('state', state)
42 | elif ccn:
43 | q.where_strings('ccn', ccn)
44 | elif city:
45 | q.where_strings('city', city)
46 | elif zip:
47 | q.where_strings("zip", zip)
48 | elif fips_code:
49 | q.where_strings("fips_code", fips_code)
50 | else:
51 | q.conditions.append('FALSE')
52 |
53 | fields_string = [
54 | "hospital_pk",
55 | "state",
56 | "ccn",
57 | "hospital_name",
58 | "address",
59 | "city",
60 | "zip",
61 | "hospital_subtype",
62 | "fips_code",
63 | ]
64 | fields_int = ["is_metro_micro"]
65 | fields_float = []
66 |
67 | # send query
68 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
69 |
--------------------------------------------------------------------------------
/src/server/endpoints/covidcast_utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .trend import compute_trend, compute_trend_value, compute_trends
2 | from .meta import CovidcastMetaEntry
--------------------------------------------------------------------------------
/src/server/endpoints/covidcast_utils/meta.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass, asdict, field
2 | from typing import Dict, Any
3 | from .model import DataSignal
4 |
5 |
6 | @dataclass
7 | class CovidcastMetaStats:
8 | min: float
9 | mean: float
10 | stdev: float
11 | max: float
12 |
13 |
14 | @dataclass
15 | class CovidcastMetaEntry:
16 | signal: DataSignal
17 | min_time: int
18 | max_time: int
19 | max_issue: int
20 | geo_types: Dict[str, CovidcastMetaStats] = field(default_factory=dict)
21 |
22 | def intergrate(self, row: Dict[str, Any]):
23 | if row["min_time"] < self.min_time:
24 | self.min_time = row["min_time"]
25 | if row["max_time"] > self.max_time:
26 | self.max_time = row["max_time"]
27 | if row["max_issue"] > self.max_issue:
28 | self.max_issue = row["max_issue"]
29 | self.geo_types[row["geo_type"]] = CovidcastMetaStats(row["min_value"], row["mean_value"], row["stdev_value"], row["max_value"])
30 |
31 | def asdict(self):
32 | r = asdict(self)
33 | if self.signal:
34 | r.update(self.signal.asdict())
35 | r["geo_types"] = {k: asdict(v) for k, v in self.geo_types.items()}
36 | return r
37 |
--------------------------------------------------------------------------------
/src/server/endpoints/delphi.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 | from flask.json import loads
3 |
4 | from .._printer import print_non_standard
5 | from .._query import parse_result
6 | from .._validate import require_all
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("delphi", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | def handle():
15 | require_all(request, "system", "epiweek")
16 | system = request.values["system"]
17 | epiweek = int(request.values["epiweek"])
18 |
19 | # build query
20 | query = "SELECT `system`, `epiweek`, `json` FROM `forecasts` WHERE `system` = :system AND `epiweek` = :epiweek LIMIT 1"
21 | params = dict(system=system, epiweek=epiweek)
22 |
23 | fields_string = ["system", "json"]
24 | fields_int = ["epiweek"]
25 | fields_float = []
26 |
27 | rows = parse_result(query, params, fields_string, fields_int, fields_float)
28 | for row in rows:
29 | row["forecast"] = loads(row["json"])
30 | del row["json"]
31 | # send query
32 | return print_non_standard(request.values.get("format"), rows)
33 |
--------------------------------------------------------------------------------
/src/server/endpoints/dengue_nowcast.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("dengue_nowcast", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "locations", "epiweeks")
15 | locations = extract_strings("locations")
16 | epiweeks = extract_integers("epiweeks")
17 |
18 | # build query
19 | q = QueryBuilder("dengue_nowcasts", "n")
20 |
21 | fields_string = ["location"]
22 | fields_int = ["epiweek"]
23 | fields_float = ["value", "std"]
24 | q.set_fields(fields_string, fields_int, fields_float)
25 |
26 | q.set_sort_order("epiweek", "location")
27 |
28 | # build the filter
29 | q.where_strings("location", locations)
30 | q.where_integers("epiweek", epiweeks)
31 | # send query
32 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
33 |
--------------------------------------------------------------------------------
/src/server/endpoints/dengue_sensors.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 | from .._security import require_role
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("dengue_sensors", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | @require_role("sensors")
15 | def handle():
16 | require_all(request, "names", "locations", "epiweeks")
17 |
18 | names = extract_strings("names")
19 | locations = extract_strings("locations")
20 | epiweeks = extract_integers("epiweeks")
21 |
22 | # build query
23 | q = QueryBuilder("dengue_sensors", "s")
24 |
25 | fields_string = ["name", "location"]
26 | fields_int = ["epiweek"]
27 | fields_float = ["value"]
28 | q.set_fields(fields_string, fields_int, fields_float)
29 |
30 | q.set_sort_order('epiweek', 'name', 'location')
31 |
32 | q.where_strings('name', names)
33 | q.where_strings('location', locations)
34 | q.where_integers('epiweek', epiweeks)
35 |
36 | # send query
37 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
38 |
--------------------------------------------------------------------------------
/src/server/endpoints/ecdc_ili.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("ecdc_ili", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "regions", "epiweeks")
15 | regions = extract_strings("regions")
16 | epiweeks = extract_integers("epiweeks")
17 | issues = extract_integers("issues")
18 | lag = extract_integer("lag")
19 |
20 | # build query
21 | q = QueryBuilder("ecdc_ili", "ec")
22 |
23 | fields_string = ["release_date", "region"]
24 | fields_int = ["issue", "epiweek", "lag"]
25 | fields_float = ["incidence_rate"]
26 | q.set_fields(fields_string, fields_int, fields_float)
27 |
28 | q.set_sort_order("epiweek", "region", "issue")
29 |
30 | q.where_integers("epiweek", epiweeks)
31 | q.where_strings("region", regions)
32 |
33 | if issues is not None:
34 | q.where_integers("issue", issues)
35 | elif lag is not None:
36 | q.where(lag=lag)
37 | else:
38 | q.with_max_issue("epiweek", "region")
39 |
40 | # send query
41 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
42 |
--------------------------------------------------------------------------------
/src/server/endpoints/flusurv.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | bp = Blueprint("flusurv", __name__)
8 |
9 |
10 | @bp.route("/", methods=("GET", "POST"))
11 | def handle():
12 | require_all(request, "epiweeks", "locations")
13 |
14 | epiweeks = extract_integers("epiweeks")
15 | locations = extract_strings("locations")
16 | issues = extract_integers("issues")
17 | lag = extract_integer("lag")
18 |
19 | # basic query info
20 | q = QueryBuilder("flusurv", "fs")
21 |
22 | fields_string = ["release_date", "location"]
23 | fields_int = ["issue", "epiweek", "lag"]
24 | fields_float = [
25 | "rate_age_0",
26 | "rate_age_1",
27 | "rate_age_2",
28 | "rate_age_3",
29 | "rate_age_4",
30 | "rate_overall",
31 | ]
32 | q.set_fields(fields_string, fields_int, fields_float)
33 | q.set_sort_order("epiweek", "location", "issue")
34 |
35 | q.where_integers("epiweek", epiweeks)
36 | q.where_strings("location", locations)
37 |
38 | if issues is not None:
39 | q.where_integers("issue", issues)
40 | elif lag is not None:
41 | q.where(lag=lag)
42 | else:
43 | q.with_max_issue("epiweek", "location")
44 |
45 | # send query
46 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
47 |
--------------------------------------------------------------------------------
/src/server/endpoints/fluview_clinicial.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | bp = Blueprint("fluview_clinical", __name__)
8 |
9 |
10 | @bp.route("/", methods=("GET", "POST"))
11 | def handle():
12 | require_all(request, "epiweeks", "regions")
13 |
14 | epiweeks = extract_integers("epiweeks")
15 | regions = extract_strings("regions")
16 | issues = extract_integers("issues")
17 | lag = extract_integer("lag")
18 |
19 | # basic query info
20 | q = QueryBuilder("fluview_clinical", "fvc")
21 |
22 | fields_string = ["release_date", "region"]
23 | fields_int = ["issue", "epiweek", "lag", "total_specimens", "total_a", "total_b"]
24 | fields_float = ["percent_positive", "percent_a", "percent_b"]
25 | q.set_fields(fields_string, fields_int, fields_float)
26 | q.set_sort_order("epiweek", "region", "issue")
27 |
28 | q.where_integers("epiweek", epiweeks)
29 | q.where_strings("region", regions)
30 |
31 | if issues is not None:
32 | q.where_integers("issue", issues)
33 | elif lag is not None:
34 | q.where(lag=lag)
35 | else:
36 | # final query using most recent issues
37 | q.with_max_issue("epiweek", "region")
38 |
39 | # send query
40 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
41 |
--------------------------------------------------------------------------------
/src/server/endpoints/fluview_meta.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._printer import create_printer
4 | from .._query import filter_fields, parse_result
5 |
6 | # first argument is the endpoint name
7 | bp = Blueprint("fluview_meta", __name__)
8 | alias = None
9 |
10 |
11 | def meta_fluview():
12 | query = "SELECT max(`release_date`) `latest_update`, max(`issue`) `latest_issue`, count(1) `table_rows` FROM `fluview`"
13 | fields_string = ["latest_update"]
14 | fields_int = ["latest_issue", "table_rows"]
15 | return parse_result(query, {}, fields_string, fields_int, None)
16 |
17 |
18 | @bp.route("/", methods=("GET", "POST"))
19 | def handle():
20 | # query and return metadata
21 |
22 | def gen():
23 | for row in meta_fluview():
24 | yield row
25 |
26 | return create_printer(request.values.get("format"))(filter_fields(gen()))
27 |
--------------------------------------------------------------------------------
/src/server/endpoints/gft.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("gft", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "locations", "epiweeks")
15 |
16 | locations = extract_strings("locations")
17 | epiweeks = extract_integers("epiweeks")
18 |
19 | # build query
20 | q = QueryBuilder("gft", "g")
21 |
22 | fields_string = ["location"]
23 | fields_int = ["epiweek", "num"]
24 | fields_float = []
25 | q.set_fields(fields_string, fields_int, fields_float)
26 | q.set_sort_order("epiweek", "location")
27 |
28 | # build the filter
29 | q.where_integers("epiweek", epiweeks)
30 | q.where_strings("location", locations)
31 |
32 | # send query
33 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
34 |
--------------------------------------------------------------------------------
/src/server/endpoints/ght.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 | from .._security import require_role
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("ght", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | @require_role("ght")
15 | def handle():
16 | require_all(request, "locations", "epiweeks", "query")
17 |
18 | locations = extract_strings("locations")
19 | epiweeks = extract_integers("epiweeks")
20 | query = request.values["query"]
21 |
22 | # build query
23 | q = QueryBuilder("ght", "g")
24 |
25 | fields_string = ["location"]
26 | fields_int = ["epiweek"]
27 | fields_float = ["value"]
28 | q.set_fields(fields_string, fields_int, fields_float)
29 |
30 | q.set_sort_order("epiweek", "location")
31 |
32 | # build the filter
33 | q.where_strings("location", locations)
34 | q.where_integers("epiweek", epiweeks)
35 | q.where(query=query)
36 |
37 | # send query
38 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
39 |
--------------------------------------------------------------------------------
/src/server/endpoints/ilinet.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint
2 |
3 | from .._exceptions import EpiDataException
4 |
5 | # first argument is the endpoint name
6 | bp = Blueprint("ilinet", __name__)
7 | alias = "stateili"
8 |
9 |
10 | @bp.route("/", methods=("GET", "POST"))
11 | def handle():
12 | raise EpiDataException("use fluview instead")
13 |
--------------------------------------------------------------------------------
/src/server/endpoints/kcdc_ili.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("kcdc_ili", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "regions", "epiweeks")
15 | regions = extract_strings("regions")
16 | epiweeks = extract_integers("epiweeks")
17 | issues = extract_integers("issues")
18 | lag = extract_integer("lag")
19 |
20 | # build query
21 | q = QueryBuilder("kcdc_ili", "kc")
22 |
23 | fields_string = ["release_date", "region"]
24 | fields_int = ["issue", "epiweek", "lag"]
25 | fields_float = ["ili"]
26 | q.set_fields(fields_string, fields_int, fields_float)
27 |
28 | q.set_sort_order("epiweek", "region", "issue")
29 | # build the filter
30 | q.where_integers("epiweek", epiweeks)
31 | q.where_strings("region", regions)
32 |
33 | if issues:
34 | q.where_integers("issue", issues)
35 | elif lag is not None:
36 | q.where(lag=lag)
37 | else:
38 | q.with_max_issue('epiweek', 'region')
39 |
40 | # send query
41 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
42 |
--------------------------------------------------------------------------------
/src/server/endpoints/meta.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._printer import print_non_standard
4 | from .._query import parse_result
5 | from .fluview_meta import meta_fluview
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("meta", __name__)
9 | alias = None
10 |
11 |
12 | def meta_twitter():
13 | query = "SELECT x.`date` `latest_update`, x.`table_rows`, count(distinct t.`state`) `num_states` FROM (SELECT max(`date`) `date`, count(1) `table_rows` FROM `twitter`) x JOIN `twitter` t ON t.`date` = x.`date` GROUP BY x.`date`, x.`table_rows`"
14 | fields_string = ["latest_update"]
15 | fields_int = ["num_states", "table_rows"]
16 | return parse_result(query, {}, fields_string, fields_int, None)
17 |
18 |
19 | def meta_wiki():
20 | # $query = 'SELECT date_sub(max(`datetime`), interval 5 hour) `latest_update`, count(1) `table_rows` FROM `wiki_meta`' // GMT to EST
21 | query = "SELECT max(`datetime`) `latest_update`, count(1) `table_rows` FROM `wiki_meta`"
22 | fields_string = ["latest_update"]
23 | fields_int = ["table_rows"]
24 | return parse_result(query, {}, fields_string, fields_int, None)
25 |
26 |
27 | def meta_delphi():
28 | query = "SELECT `system`, min(`epiweek`) `first_week`, max(`epiweek`) `last_week`, count(1) `num_weeks` FROM `forecasts` GROUP BY `system` ORDER BY `system` ASC"
29 | fields_string = ["system"]
30 | fields_int = ["first_week", "last_week", "num_weeks"]
31 | return parse_result(query, {}, fields_string, fields_int, None)
32 |
33 |
34 | @bp.route("/", methods=("GET", "POST"))
35 | def handle():
36 | # query and return metadata
37 | # collect individual meta data results using collectors
38 | fluview = meta_fluview()
39 | twitter = meta_twitter()
40 | wiki = meta_wiki()
41 | delphi = meta_delphi()
42 |
43 | row = {
44 | "fluview": fluview,
45 | "twitter": twitter,
46 | "wiki": wiki,
47 | "delphi": delphi,
48 | }
49 | return print_non_standard(request.values.get("format"), [row])
50 |
--------------------------------------------------------------------------------
/src/server/endpoints/meta_norostat.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._printer import print_non_standard
4 | from .._query import parse_result
5 | from .._security import require_role
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("meta_norostat", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | @require_role("norostat")
14 | def handle():
15 | # build query
16 | query = "SELECT DISTINCT `release_date` FROM `norostat_raw_datatable_version_list`"
17 | releases = parse_result(query, {}, ["release_date"])
18 |
19 | query = "SELECT DISTINCT `location` FROM `norostat_raw_datatable_location_pool`"
20 | locations = parse_result(query, {}, ["location"])
21 |
22 | data = {"releases": releases, "locations": locations}
23 | return print_non_standard(request.values.get("format"), data)
24 |
--------------------------------------------------------------------------------
/src/server/endpoints/nidss_dengue.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | from flask import Blueprint, request
4 |
5 | from .._params import extract_integers, extract_strings
6 | from .._query import execute_queries, filter_integers
7 | from .._validate import require_all
8 |
9 | # first argument is the endpoint name
10 | bp = Blueprint("nidss_dengue", __name__)
11 | alias = None
12 |
13 |
14 | @bp.route("/", methods=("GET", "POST"))
15 | def handle():
16 | require_all(request, "locations", "epiweeks")
17 | locations = extract_strings("locations")
18 | epiweeks = extract_integers("epiweeks")
19 |
20 | # build query
21 | # build the filter
22 | params = dict()
23 | # build the epiweek filter
24 | condition_epiweek = filter_integers("nd.`epiweek`", epiweeks, "epiweek", params)
25 |
26 | queries = []
27 | for location in locations:
28 | # some kind of enforcing escaping
29 | location = re.search(r"([\w-]+)", location)[0]
30 | location_params = params.copy()
31 | query = f"""
32 | SELECT
33 | nd2.`epiweek`, nd2.`location`, count(1) `num_locations`, sum(nd2.`count`) `count`
34 | FROM (
35 | SELECT
36 | nd1.`epiweek`, CASE WHEN q.`query` = nd1.`location` THEN nd1.`location` WHEN q.`query` = nd1.`region` THEN nd1.`region` ELSE nd1.`nat` END `location`, nd1.`count`
37 | FROM (
38 | SELECT
39 | `epiweek`, `location`, `region`, 'nationwide' `nat`, `count`
40 | FROM
41 | `nidss_dengue` nd
42 | WHERE {condition_epiweek}
43 | ) nd1
44 | JOIN (
45 | SELECT
46 | '{location}' `query`
47 | ) q
48 | ON
49 | q.`query` IN (nd1.`location`, nd1.`region`, nd1.`nat`)
50 | ) nd2
51 | GROUP BY
52 | nd2.`epiweek`, nd2.`location`
53 | """
54 | queries.append((query, location_params))
55 |
56 | fields_string = ["location"]
57 | fields_int = ["epiweek", "count"]
58 | fields_float = []
59 |
60 | # send query
61 | return execute_queries(queries, fields_string, fields_int, fields_float)
62 |
--------------------------------------------------------------------------------
/src/server/endpoints/nidss_flu.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("nidss_flu", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "regions", "epiweeks")
15 | regions = extract_strings("regions")
16 | epiweeks = extract_integers("epiweeks")
17 | issues = extract_integers("issues")
18 | lag = extract_integer("lag")
19 |
20 | # build query
21 | q = QueryBuilder("nidss_flu", "nf")
22 |
23 | fields_string = ["release_date", "region"]
24 | fields_int = ["issue", "epiweek", "lag", "visits"]
25 | fields_float = ["ili"]
26 | q.set_fields(fields_string, fields_int, fields_float)
27 | q.set_sort_order("epiweek", "region", "issue")
28 |
29 | # build the filter
30 | q.where_integers("epiweek", epiweeks)
31 | q.where_strings("region", regions)
32 |
33 | if issues:
34 | q.where_integers("issue", issues)
35 | elif lag is not None:
36 | q.where(lag=lag)
37 | else:
38 | q.with_max_issue("epiweek", "region")
39 |
40 | # send query
41 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
42 |
--------------------------------------------------------------------------------
/src/server/endpoints/norostat.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers
4 | from .._query import execute_query, filter_integers, filter_strings
5 | from .._validate import require_all
6 | from .._security import require_role
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("norostat", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | @require_role("norostat")
15 | def handle():
16 | require_all(request, "location", "epiweeks")
17 |
18 | location = request.values["location"]
19 | epiweeks = extract_integers("epiweeks")
20 |
21 | # build query
22 | # build the filter
23 | params = dict()
24 | # build the location filter
25 | condition_location = filter_strings(
26 | "`norostat_raw_datatable_location_pool`.`location`", [location], "loc", params
27 | )
28 | condition_epiweek = filter_integers(
29 | "`latest`.`epiweek`", epiweeks, "epiweek", params
30 | )
31 | # the query
32 | query = f"""
33 | SELECT `latest`.`release_date`, `latest`.`epiweek`, `latest`.`new_value` AS `value`
34 | FROM `norostat_point_diffs` AS `latest`
35 | LEFT JOIN `norostat_raw_datatable_location_pool` USING (`location_id`)
36 | LEFT JOIN (
37 | SELECT * FROM `norostat_point_diffs`
38 | ) `later`
39 | ON `latest`.`location_id` = `later`.`location_id` AND
40 | `latest`.`epiweek` = `later`.`epiweek` AND
41 | (`latest`.`release_date`, `latest`.`parse_time`) <
42 | (`later`.`release_date`, `later`.`parse_time`) AND
43 | `later`.`new_value` IS NOT NULL
44 | WHERE ({condition_location}) AND
45 | ({condition_epiweek}) AND
46 | `later`.`parse_time` IS NULL AND
47 | `latest`.`new_value` IS NOT NULL
48 | """
49 |
50 | fields_string = ["release_date"]
51 | fields_int = ["epiweek", "value"]
52 | fields_float = []
53 |
54 | # send query
55 | return execute_query(query, params, fields_string, fields_int, fields_float)
56 |
--------------------------------------------------------------------------------
/src/server/endpoints/nowcast.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("nowcast", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "locations", "epiweeks")
15 | locations = extract_strings("locations")
16 | epiweeks = extract_integers("epiweeks")
17 |
18 | # build query
19 | q = QueryBuilder("nowcasts", "n")
20 |
21 | fields_string = ["location"]
22 | fields_int = ["epiweek"]
23 | fields_float = ["value", "std"]
24 | q.set_fields(fields_string, fields_int, fields_float)
25 |
26 | q.set_sort_order("epiweek", "location")
27 |
28 | # build the filter
29 | q.where_strings("location", locations)
30 | q.where_integers("epiweek", epiweeks)
31 |
32 | # send query
33 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
34 |
--------------------------------------------------------------------------------
/src/server/endpoints/paho_dengue.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integer, extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("paho_dengue", __name__)
9 | alias = None
10 |
11 |
12 | @bp.route("/", methods=("GET", "POST"))
13 | def handle():
14 | require_all(request, "regions", "epiweeks")
15 | regions = extract_strings("regions")
16 | epiweeks = extract_integers("epiweeks")
17 | issues = extract_integers("issues")
18 | lag = extract_integer("lag")
19 |
20 | # build query
21 | q = QueryBuilder("paho_dengue", "pd")
22 |
23 | fields_string = ["release_date", "region", "serotype"]
24 | fields_int = [
25 | "issue",
26 | "epiweek",
27 | "lag",
28 | "total_pop",
29 | "num_dengue",
30 | "num_severe",
31 | "num_deaths",
32 | ]
33 | fields_float = ["incidence_rate"]
34 | q.set_fields(fields_string, fields_int, fields_float)
35 |
36 | q.set_sort_order("epiweek", "region", "issue")
37 |
38 | # build the filter
39 | q.where_integers("epiweek", epiweeks)
40 | q.where_strings("region", regions)
41 |
42 | if issues:
43 | q.where_integers("issue", issues)
44 | elif lag is not None:
45 | q.where(lag=lag)
46 | else:
47 | # final query using most recent issues
48 | q.with_max_issue('epiweek', 'region')
49 |
50 | # send query
51 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
52 |
--------------------------------------------------------------------------------
/src/server/endpoints/quidel.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .._params import extract_integers, extract_strings
4 | from .._query import execute_query, QueryBuilder
5 | from .._validate import require_all
6 | from .._security import require_role
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("quidel", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | @require_role("quidel")
15 | def handle():
16 | require_all(request, "locations", "epiweeks")
17 |
18 | locations = extract_strings("locations")
19 | epiweeks = extract_integers("epiweeks")
20 |
21 | # build query
22 | q = QueryBuilder("quidel", "q")
23 |
24 | fields_string = ["location"]
25 | fields_int = ["epiweek"]
26 | fields_float = ["value"]
27 | q.set_fields(fields_string, fields_int, fields_float)
28 |
29 | q.set_sort_order("epiweek", "location")
30 |
31 | # build the filter
32 | q.where_strings("location", locations)
33 | q.where_integers("epiweek", epiweeks)
34 |
35 | # send query
36 | return execute_query(str(q), q.params, fields_string, fields_int, fields_float)
37 |
--------------------------------------------------------------------------------
/src/server/endpoints/signal_dashboard_coverage.py:
--------------------------------------------------------------------------------
1 | from typing import List, Dict, Any
2 | from flask import Blueprint, request
3 |
4 | from .._query import parse_result
5 | from .._printer import print_non_standard
6 |
7 | # first argument is the endpoint name
8 | bp = Blueprint("signal_dashboard_coverage", __name__)
9 | alias = None
10 |
11 | def fetch_coverage_data() -> Dict[str, Dict[str, List[Dict[str, Any]]]]:
12 | fields_string = ["name", "date", "geo_type"]
13 | fields_int = ["count"]
14 | fields_float: List[str] = []
15 |
16 | query = """
17 | SELECT enabled_signal.`name`,
18 | coverage.`date`,
19 | coverage.`geo_type`,
20 | coverage.`count`
21 | FROM (SELECT `id`, `name`, `latest_coverage_update`
22 | FROM `dashboard_signal`
23 | WHERE `enabled`) AS enabled_signal
24 | LEFT JOIN `dashboard_signal_coverage` AS coverage
25 | ON enabled_signal.`id` = coverage.`signal_id`
26 | ORDER BY `id` ASC, `date` DESC
27 | """
28 |
29 | rows = parse_result(query, {}, fields_string, fields_int, fields_float)
30 |
31 | grouped: Dict[str, Dict[str, List[Dict[str, Any]]]] = {}
32 | for row in rows:
33 | name = row['name']
34 | geo_type = row['geo_type']
35 | timedata = {'date': row['date'], 'count': row['count'] }
36 | name_entry = grouped.setdefault(name, {})
37 | geo_type_entry = name_entry.setdefault(geo_type, [])
38 | geo_type_entry.append(timedata)
39 |
40 | return grouped
41 |
42 |
43 | @bp.route("/", methods=("GET", "POST"))
44 | def handle():
45 | return print_non_standard(request.values.get("format"), fetch_coverage_data())
46 |
--------------------------------------------------------------------------------
/src/server/endpoints/signal_dashboard_status.py:
--------------------------------------------------------------------------------
1 | from flask import Blueprint, request
2 |
3 | from .signal_dashboard_coverage import fetch_coverage_data
4 | from .._query import parse_row, run_query
5 | from .._printer import create_printer
6 | from .._exceptions import DatabaseErrorException
7 |
8 | # first argument is the endpoint name
9 | bp = Blueprint("signal_dashboard_status", __name__)
10 | alias = None
11 |
12 |
13 | @bp.route("/", methods=("GET", "POST"))
14 | def handle():
15 | fields_string = ["name", "source", "covidcast_signal", "latest_issue", "latest_time_value"]
16 | fields_int = []
17 | fields_float = []
18 |
19 | query = """
20 | SELECT enabled_signal.`name`,
21 | enabled_signal.`source`,
22 | enabled_signal.`covidcast_signal`,
23 | status.`latest_issue`,
24 | status.`latest_time_value`
25 | FROM (SELECT `id`, `name`, `source`, `covidcast_signal`, `latest_status_update`
26 | FROM `dashboard_signal`
27 | WHERE `enabled`) AS enabled_signal
28 | LEFT JOIN `dashboard_signal_status` AS status
29 | ON enabled_signal.`latest_status_update` = status.`date`
30 | AND enabled_signal.`id` = status.`signal_id`
31 | """
32 |
33 | p = create_printer(request.values.get("format"))
34 |
35 | def gen(rows, coverage_data):
36 | for row in rows:
37 | parsed = parse_row(row, fields_string, fields_int, fields_float)
38 | # inject coverage data
39 | parsed["coverage"] = coverage_data.get(parsed["name"], {})
40 | yield parsed
41 |
42 | try:
43 | coverage_data = fetch_coverage_data()
44 | r = run_query(p, (query, {}))
45 | except Exception as e:
46 | raise DatabaseErrorException(str(e))
47 |
48 | # now use a generator for sending the rows and execute all the other queries
49 | return p(gen(r, coverage_data))
50 |
--------------------------------------------------------------------------------
/src/server/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | DELPHI Epidata API
6 |
7 |
8 |
9 | The API is documented
here.
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/src/server/simulate_api_response.py:
--------------------------------------------------------------------------------
1 | # standard library
2 | import os.path
3 | import subprocess
4 | import json
5 |
6 | def dangerously_simulate_api_response(request_dict):
7 | """*SHOULD NOT RECEIVE USER INPUT*. Simulates the API output for the specified request using server files located within this repository's directory structure.
8 |
9 | The request should be in the form of a dictionary specifying the html query parameters / php $_REQUEST entries for the request. Used to construct tests of API behavior. *Security note*: the request argument should not be supplied by an outside user, only carefully specified during development, or usage thoroughly vetted, as, depending on usage (pre-deploy, during deploy, post-deploy), potential risks may vary.
10 |
11 | The API output is simulated using files from ../../src/server/ in this repository's directory structure. No web requests are issued, nor (when run on the server) does it use the currently deployed version of the API.
12 |
13 | Returns a tuple (returncode, stderr_bytes, stdout_bytes).
14 | """
15 | request_json = json.dumps(request_dict)
16 | process = subprocess.Popen(
17 | cwd=os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','..','src','server'),
18 | args=['php', '-r', '$_REQUEST = json_decode(file_get_contents("php://stdin"), true); require("api.php");'],
19 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
20 | )
21 | (stdout_bytes, stderr_bytes) = process.communicate(input=request_json.encode('UTF-8'))
22 | returncode = process.returncode
23 | simulated_api_response = (returncode, stderr_bytes, stdout_bytes)
24 | return simulated_api_response
25 |
26 | def extract_response_json(simulated_api_response):
27 | (returncode, stderr_bytes, stdout_bytes) = simulated_api_response
28 | if returncode != 0 or len(stderr_bytes)!=0:
29 | raise Exception(['Simulated API request appears to have generated an internal error, returning a nonzero error code and/or producing output to stderr:',returncode,stderr_bytes])
30 | else:
31 | unpacked_json = json.loads(stdout_bytes.decode("UTF-8"))
32 | return unpacked_json
33 |
--------------------------------------------------------------------------------
/src/server/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .dates import shift_day_value, day_to_time_value, time_value_to_iso, time_value_to_day, days_in_range, weeks_in_range, shift_week_value, week_to_time_value, time_value_to_week, guess_time_value_is_day, guess_time_value_is_week, time_values_to_ranges, days_to_ranges, weeks_to_ranges, IntRange, TimeValues
2 |
--------------------------------------------------------------------------------
/tasks.py:
--------------------------------------------------------------------------------
1 | from invoke import task
2 |
3 |
4 | @task
5 | def start(c):
6 | c.run("python -m src.server.main")
7 |
8 |
9 | @task
10 | def update_gdoc(
11 | c,
12 | sources_url="https://docs.google.com/spreadsheets/d/e/2PACX-1vRfXo-qePhrYGAoZqewVnS1kt9tfnUTLgtkV7a-1q7yg4FoZk0NNGuB1H6k10ah1Xz5B8l1S1RB17N6/pub?gid=0&single=true&output=csv",
13 | signal_url="https://docs.google.com/spreadsheets/d/e/2PACX-1vRfXo-qePhrYGAoZqewVnS1kt9tfnUTLgtkV7a-1q7yg4FoZk0NNGuB1H6k10ah1Xz5B8l1S1RB17N6/pub?gid=329338228&single=true&output=csv",
14 | ):
15 | import requests
16 | import pathlib
17 |
18 | base_dir = pathlib.Path("./src/server/endpoints/covidcast_utils/")
19 |
20 | def _migrate_file(url: str, filename: str):
21 | r = requests.get(url).content.decode("utf8").replace("\r\n", "\n")
22 | rows = r.split("\n")
23 | rows = [r for r in rows if not r.startswith(",")]
24 | file_ = base_dir / filename
25 | file_.write_text("\n".join(rows), encoding="utf8")
26 |
27 | _migrate_file(sources_url, "db_sources.csv")
28 | _migrate_file(signal_url, "db_signals.csv")
29 |
--------------------------------------------------------------------------------
/testdata/acquisition/covid_hosp/state_daily/metadata.csv:
--------------------------------------------------------------------------------
1 | Update Date,Days Since Update,User,Rows,Row Change,Columns,Column Change,Metadata Published,Metadata Updates,Column Level Metadata,Column Level Metadata Updates,Archive Link
2 | 03/13/2021 00:00:00 AM,0,0,0,0,0,0,0,0,0,0,https://test0.csv
3 | 03/15/2021 00:00:00 AM,0,0,0,0,0,0,0,0,0,0,https://test1.csv
4 |
--------------------------------------------------------------------------------
/testdata/acquisition/covid_hosp/state_daily/metadata2.csv:
--------------------------------------------------------------------------------
1 | Update Date,Days Since Update,User,Rows,Row Change,Columns,Column Change,Metadata Published,Metadata Updates,Column Level Metadata,Column Level Metadata Updates,Archive Link
2 | 03/13/2021 00:00:00 AM,0,0,0,0,0,0,0,0,0,0,https://test0.csv
3 | 03/15/2021 00:00:00 AM,0,0,0,0,0,0,0,0,0,0,https://test1.csv
4 | 03/15/2021 00:00:01 AM,0,0,0,0,0,0,0,0,0,0,https://test2.csv
5 | 03/15/2021 00:00:02 AM,0,0,0,0,0,0,0,0,0,0,https://test3.csv
6 | 03/16/2021 00:00:00 AM,0,0,0,0,0,0,0,0,0,0,https://test4.csv
7 | 03/16/2021 00:00:01 AM,0,0,0,0,0,0,0,0,0,0,https://test5.csv
8 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/common/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/common/test_network.py:
--------------------------------------------------------------------------------
1 | """Unit tests for network.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import MagicMock, sentinel, patch
6 |
7 | from delphi.epidata.acquisition.covid_hosp.common.network import Network
8 |
9 | import pandas as pd
10 |
11 | # py3tester coverage target
12 | __test_target__ = 'delphi.epidata.acquisition.covid_hosp.common.network'
13 |
14 |
15 | class NetworkTests(unittest.TestCase):
16 |
17 | def test_fetch_metadata_for_dataset(self):
18 | """Fetch metadata as JSON."""
19 |
20 | with patch.object(pd, "read_csv") as func:
21 | func.return_value = pd.DataFrame(
22 | {"Archive Link": ["test2", "test1", "test3"],
23 | "Update Date": ["2020/1/2", "2020/1/1", "2020/1/3"]}
24 | )
25 | result = Network.fetch_metadata_for_dataset("test")
26 | pd.testing.assert_frame_equal(
27 | result,
28 | pd.DataFrame(
29 | {"Archive Link": ["test1", "test2", "test3"],
30 | "Update Date": pd.date_range("2020/1/1", "2020/1/3")}
31 | ).set_index("Update Date")
32 | )
33 | func.assert_called_once_with(
34 | "https://healthdata.gov/api/views/test/rows.csv",
35 | dtype=str
36 | )
37 |
38 | def test_fetch_dataset(self):
39 | """Fetch dataset as CSV."""
40 |
41 | mock_pandas = MagicMock()
42 | mock_pandas.read_csv.return_value = sentinel.dataset
43 |
44 | result = Network.fetch_dataset(sentinel.url, pandas_impl=mock_pandas)
45 |
46 | self.assertEqual(result, sentinel.dataset)
47 | mock_pandas.read_csv.assert_called_once_with(sentinel.url, dtype=str)
48 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/facility/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/facility/test_network.py:
--------------------------------------------------------------------------------
1 | """Unit tests for network.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import patch
6 | from unittest.mock import sentinel
7 |
8 | from delphi.epidata.acquisition.covid_hosp.facility.network import Network
9 |
10 | # py3tester coverage target
11 | __test_target__ = 'delphi.epidata.acquisition.covid_hosp.facility.network'
12 |
13 |
14 | class NetworkTests(unittest.TestCase):
15 |
16 | def test_fetch_metadata(self):
17 | """Fetch metadata as JSON."""
18 |
19 | with patch.object(Network, 'fetch_metadata_for_dataset') as func:
20 | func.return_value = sentinel.json
21 |
22 | result = Network.fetch_metadata()
23 |
24 | self.assertEqual(result, sentinel.json)
25 | func.assert_called_once_with(dataset_id=Network.METADATA_ID)
26 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/facility/test_update.py:
--------------------------------------------------------------------------------
1 | """Unit tests for update.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import patch
6 | from unittest.mock import sentinel
7 |
8 | # first party
9 | from delphi.epidata.acquisition.covid_hosp.common.utils import Utils
10 | from delphi.epidata.acquisition.covid_hosp.facility.update import Update
11 |
12 | # py3tester coverage target
13 | __test_target__ = 'delphi.epidata.acquisition.covid_hosp.facility.update'
14 |
15 |
16 | class UpdateTests(unittest.TestCase):
17 |
18 | def test_run(self):
19 | """Acquire a new dataset."""
20 |
21 | with patch.object(Utils, 'update_dataset') as mock_update_dataset:
22 | mock_update_dataset.return_value = sentinel.result
23 |
24 | result = Update.run()
25 |
26 | self.assertEqual(result, sentinel.result)
27 | mock_update_dataset.assert_called_once()
28 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/state_daily/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/state_daily/test_network.py:
--------------------------------------------------------------------------------
1 | """Unit tests for network.py."""
2 |
3 | # standard library
4 | import requests
5 | import unittest
6 | from unittest.mock import patch
7 | from unittest.mock import sentinel
8 |
9 | # first party
10 | from delphi.epidata.acquisition.covid_hosp.common.test_utils import UnitTestUtils
11 | from delphi.epidata.acquisition.covid_hosp.state_daily.network import Network
12 |
13 | # third party
14 | import pandas as pd
15 |
16 | # py3tester coverage target
17 | __test_target__ = \
18 | 'delphi.epidata.acquisition.covid_hosp.state_daily.network'
19 |
20 |
21 | class NetworkTests(unittest.TestCase):
22 | def setUp(self):
23 | """Perform per-test setup."""
24 |
25 | # configure test data
26 | self.test_utils = UnitTestUtils(__file__)
27 |
28 | def test_fetch_metadata(self):
29 | """Fetch metadata as JSON."""
30 |
31 | with patch.object(Network, 'fetch_metadata_for_dataset') as func:
32 | func.return_value = sentinel.json
33 |
34 | result = Network.fetch_metadata()
35 |
36 | self.assertEqual(result, sentinel.json)
37 | func.assert_called_once_with(dataset_id=Network.METADATA_ID)
38 |
39 | def test_fetch_revisions(self):
40 | """Scrape CSV files from revision pages"""
41 |
42 | test_metadata = pd.DataFrame(
43 | {"Archive Link": ["test1", "test2", "test3"]},
44 | index=pd.date_range("2020/1/1", "2020/1/3")
45 | )
46 | assert Network.fetch_revisions(test_metadata, pd.Timestamp("2020/1/1")) == ["test2", "test3"]
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/state_timeseries/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/state_timeseries/test_network.py:
--------------------------------------------------------------------------------
1 | """Unit tests for network.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import patch
6 | from unittest.mock import sentinel
7 |
8 | from delphi.epidata.acquisition.covid_hosp.state_timeseries.network import Network
9 |
10 |
11 | # py3tester coverage target
12 | __test_target__ = \
13 | 'delphi.epidata.acquisition.covid_hosp.state_timeseries.network'
14 |
15 |
16 | class NetworkTests(unittest.TestCase):
17 |
18 | def test_fetch_metadata(self):
19 | """Fetch metadata as JSON."""
20 |
21 | with patch.object(Network, 'fetch_metadata_for_dataset') as func:
22 | func.return_value = sentinel.json
23 |
24 | result = Network.fetch_metadata()
25 |
26 | self.assertEqual(result, sentinel.json)
27 | func.assert_called_once_with(dataset_id=Network.METADATA_ID)
28 |
--------------------------------------------------------------------------------
/tests/acquisition/covid_hosp/state_timeseries/test_update.py:
--------------------------------------------------------------------------------
1 | """Unit tests for update.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import patch
6 | from unittest.mock import sentinel
7 |
8 | # first party
9 | from delphi.epidata.acquisition.covid_hosp.common.utils import Utils
10 | from delphi.epidata.acquisition.covid_hosp.state_timeseries.update import Update
11 |
12 | # py3tester coverage target
13 | __test_target__ = \
14 | 'delphi.epidata.acquisition.covid_hosp.state_timeseries.update'
15 |
16 |
17 | class UpdateTests(unittest.TestCase):
18 |
19 | def test_run(self):
20 | """Acquire a new dataset."""
21 |
22 | with patch.object(Utils, 'update_dataset') as mock_update_dataset:
23 | mock_update_dataset.return_value = sentinel.result
24 |
25 | result = Update.run()
26 |
27 | self.assertEqual(result, sentinel.result)
28 | mock_update_dataset.assert_called_once()
29 |
--------------------------------------------------------------------------------
/tests/acquisition/covidcast/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/flusurv/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/acquisition/flusurv/test_flusurv.py:
--------------------------------------------------------------------------------
1 | """Unit tests for flusurv.py."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import MagicMock
6 | from unittest.mock import sentinel
7 |
8 | from delphi.epidata.acquisition.flusurv.flusurv import fetch_json
9 |
10 | # py3tester coverage target
11 | __test_target__ = "delphi.epidata.acquisition.flusurv.flusurv"
12 |
13 |
14 | class FunctionTests(unittest.TestCase):
15 | """Tests each function individually."""
16 |
17 | def test_fetch_json(self):
18 | """Run through a successful flow."""
19 |
20 | path = "path"
21 | payload = None
22 |
23 | response_object = MagicMock()
24 | response_object.status_code = 200
25 | response_object.headers = {"Content-Type": "application/json"}
26 | response_object.json.return_value = sentinel.expected
27 |
28 | requests_impl = MagicMock()
29 | requests_impl.get.return_value = response_object
30 |
31 | actual = fetch_json(path, payload, requests_impl=requests_impl)
32 |
33 | self.assertEqual(actual, sentinel.expected)
34 |
--------------------------------------------------------------------------------
/tests/acquisition/flusurv/test_flusurv_update.py:
--------------------------------------------------------------------------------
1 | """Unit tests for flusurv_update.py."""
2 |
3 | # standard library
4 | import unittest
5 |
6 | # py3tester coverage target
7 | __test_target__ = "delphi.epidata.acquisition.flusurv.flusurv_update"
8 |
9 |
10 | class FunctionTests(unittest.TestCase):
11 | """Tests each function individually."""
12 |
13 | def test_syntax(self):
14 | """This no-op test ensures that syntax is valid."""
15 | pass
16 |
--------------------------------------------------------------------------------
/tests/acquisition/fluview/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/client/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/client/test_delphi_epidata.py:
--------------------------------------------------------------------------------
1 | """Unit tests for delphi_epidata.py."""
2 |
3 | # standard library
4 | import unittest
5 |
6 | # py3tester coverage target
7 | __test_target__ = "delphi.epidata.client.delphi_epidata"
8 |
9 |
10 | class UnitTests(unittest.TestCase):
11 | """Basic unit tests."""
12 |
13 | # TODO: Unit tests still need to be written. This no-op test will pass unless
14 | # the target file can't be loaded. In effect, it's a syntax checker.
15 | def test_syntax(self):
16 | pass
17 |
--------------------------------------------------------------------------------
/tests/maintenance/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/maintenance/test_covidcast_meta_cache_updater.py:
--------------------------------------------------------------------------------
1 | """Unit tests for covidcast_meta_cache_updater.py."""
2 |
3 | # standard library
4 | import argparse
5 | import unittest
6 | from unittest.mock import MagicMock
7 |
8 | from delphi.epidata.maintenance.covidcast_meta_cache_updater import get_argument_parser, \
9 | main
10 | # py3tester coverage target
11 | __test_target__ = (
12 | 'delphi.epidata.maintenance.'
13 | 'covidcast_meta_cache_updater'
14 | )
15 |
16 |
17 | class UnitTests(unittest.TestCase):
18 | """Basic unit tests."""
19 |
20 | def test_get_argument_parser(self):
21 | """Return a parser for command-line arguments."""
22 |
23 | self.assertIsInstance(get_argument_parser(), argparse.ArgumentParser)
24 |
25 | def test_main_successful(self):
26 | """Run the main program successfully."""
27 |
28 | api_response = {
29 | 'result': 1,
30 | 'message': 'yes',
31 | 'epidata': [{'foo': 'bar'}],
32 | }
33 |
34 | args = MagicMock(log_file="log")
35 | mock_epidata_impl = MagicMock()
36 | mock_epidata_impl.covidcast_meta.return_value = api_response
37 | mock_database = MagicMock()
38 | mock_database.compute_covidcast_meta.return_value=api_response['epidata']
39 | fake_database_impl = lambda: mock_database
40 |
41 | main(
42 | args,
43 | epidata_impl=mock_epidata_impl,
44 | database_impl=fake_database_impl)
45 |
46 | self.assertTrue(mock_database.connect.called)
47 |
48 | self.assertTrue(mock_database.update_covidcast_meta_cache.called)
49 | actual_args = mock_database.update_covidcast_meta_cache.call_args[0]
50 | expected_args = (api_response['epidata'],)
51 | self.assertEqual(actual_args, expected_args)
52 |
53 | self.assertTrue(mock_database.disconnect.called)
54 | self.assertTrue(mock_database.disconnect.call_args[0][0])
55 |
56 | def test_main_failure(self):
57 | """Run the main program with a query failure."""
58 |
59 | api_response = {
60 | 'result': -123,
61 | 'message': 'no',
62 | }
63 |
64 | args = MagicMock(log_file="log")
65 | mock_database = MagicMock()
66 | mock_database.compute_covidcast_meta.return_value = list()
67 | fake_database_impl = lambda: mock_database
68 |
69 | main(args, epidata_impl=None, database_impl=fake_database_impl)
70 |
71 | self.assertTrue(mock_database.compute_covidcast_meta.called)
72 |
--------------------------------------------------------------------------------
/tests/server/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.append(os.getcwd())
5 |
--------------------------------------------------------------------------------
/tests/server/endpoints/test_covidcast.py:
--------------------------------------------------------------------------------
1 | # standard library
2 | import unittest
3 |
4 | from flask.testing import FlaskClient
5 | from flask import Response
6 | from delphi.epidata.server.main import app
7 |
8 | # py3tester coverage target
9 | __test_target__ = "delphi.epidata.server.endpoints.covidcast"
10 |
11 |
12 | class UnitTests(unittest.TestCase):
13 | """Basic unit tests."""
14 |
15 | client: FlaskClient
16 |
17 | def setUp(self):
18 | app.config["TESTING"] = True
19 | app.config["WTF_CSRF_ENABLED"] = False
20 | app.config["DEBUG"] = False
21 | self.client = app.test_client()
22 |
23 | def test_url(self):
24 | rv: Response = self.client.get("/covidcast/", follow_redirects=True)
25 | msg = rv.get_json()
26 | self.assertEqual(rv.status_code, 200)
27 | self.assertEqual(msg["result"], -1)
28 | self.assertRegex(msg["message"], r"missing parameter.*")
29 |
30 | def test_time(self):
31 | rv: Response = self.client.get("/covidcast/", query_string=dict(signal="src1:*", time="day:20200101", geo="state:*"))
32 | msg = rv.get_json()
33 | self.assertEqual(rv.status_code, 200)
34 | self.assertEqual(msg["result"], -2) # no result
35 | self.assertEqual(msg["message"], "no results")
36 |
--------------------------------------------------------------------------------
/tests/server/endpoints/test_nidss_flu.py:
--------------------------------------------------------------------------------
1 | # standard library
2 | import unittest
3 | import base64
4 |
5 | from json import loads
6 | from flask.testing import FlaskClient
7 | from flask import Response
8 | from delphi.epidata.server.main import app
9 |
10 | # py3tester coverage target
11 | __test_target__ = "delphi.epidata.server.endpoints.nidss_flu"
12 |
13 |
14 | class UnitTests(unittest.TestCase):
15 | """Basic unit tests."""
16 |
17 | client: FlaskClient
18 |
19 | def setUp(self):
20 | app.config["TESTING"] = True
21 | app.config["WTF_CSRF_ENABLED"] = False
22 | app.config["DEBUG"] = False
23 | self.client = app.test_client()
24 |
25 | def test_urls(self):
26 | with self.subTest('direct url'):
27 | rv: Response = self.client.get('/nidss_flu', follow_redirects=True)
28 | msg = rv.get_json()
29 | self.assertEqual(rv.status_code, 200)
30 | self.assertEqual(msg['result'], -1)
31 | self.assertRegex(msg['message'], r"missing parameter.*")
32 | with self.subTest('with wrapper'):
33 | rv: Response = self.client.get('/api.php?endpoint=nidss_flu', follow_redirects=True)
34 | msg = rv.get_json()
35 | self.assertEqual(rv.status_code, 200)
36 | self.assertEqual(msg['result'], -1)
37 | self.assertRegex(msg['message'], r"missing parameter.*")
38 |
39 | def test_(self):
40 | rv: Response = self.client.get('/nidss_flu/', query_string=dict(regions="A", epiweeks="12"))
41 | msg = rv.get_json()
42 | self.assertEqual(rv.status_code, 200)
43 | self.assertEqual(msg['result'], -2) # no result
44 | self.assertEqual(msg['message'], "no results")
45 |
--------------------------------------------------------------------------------
/tests/server/test_exceptions.py:
--------------------------------------------------------------------------------
1 | # standard library
2 | import unittest
3 |
4 | # from flask.testing import FlaskClient
5 | from delphi.epidata.server._common import app
6 | from delphi.epidata.server._exceptions import _is_using_status_codes
7 |
8 | # py3tester coverage target
9 | __test_target__ = 'delphi.epidata.server._exceptions'
10 |
11 | class UnitTests(unittest.TestCase):
12 | """Basic unit tests."""
13 | # app: FlaskClient
14 |
15 | def setUp(self):
16 | app.config['TESTING'] = True
17 | app.config['WTF_CSRF_ENABLED'] = False
18 | app.config['DEBUG'] = False
19 |
20 | def test_is_using_status_codes(self):
21 | with app.test_request_context('/?format=csv'):
22 | self.assertTrue(_is_using_status_codes())
23 | with app.test_request_context('/?format=json'):
24 | self.assertTrue(_is_using_status_codes())
25 | with app.test_request_context('/?format=jsonl'):
26 | self.assertTrue(_is_using_status_codes())
27 | with app.test_request_context('/'):
28 | self.assertFalse(_is_using_status_codes())
29 | with app.test_request_context('/?format=classic'):
30 | self.assertFalse(_is_using_status_codes())
31 | with app.test_request_context('/?format=tree'):
32 | self.assertFalse(_is_using_status_codes())
33 |
--------------------------------------------------------------------------------
/tests/server/test_pandas.py:
--------------------------------------------------------------------------------
1 | """Unit tests for pandas helper."""
2 |
3 | # standard library
4 | import unittest
5 | from unittest.mock import patch, sentinel, ANY
6 |
7 | # first party
8 | from delphi.epidata.server.main import app
9 | from delphi.epidata.server._pandas import as_pandas
10 | from delphi.epidata.server._config import MAX_RESULTS
11 |
12 | # py3tester coverage target
13 | __test_target__ = "delphi.epidata.server._pandas"
14 |
15 |
16 | class TestPandas(unittest.TestCase):
17 | """Basic unit tests."""
18 |
19 | def setUp(self):
20 | """Perform per-test setup."""
21 | app.config["TESTING"] = True
22 | app.config["WTF_CSRF_ENABLED"] = False
23 | app.config["DEBUG"] = False
24 |
25 | @patch("delphi.epidata.server._pandas.text")
26 | @patch("pandas.read_sql_query")
27 | def test_as_pandas(self, mock_read_sql_query, mock_sqlalch_text):
28 | with app.test_request_context('covidcast/'):
29 |
30 | mock_sqlalch_text.return_value = sentinel.default_limit
31 | as_pandas("", params=None, db_engine=None)
32 | mock_read_sql_query.assert_called()
33 | mock_sqlalch_text.assert_called_with(f" LIMIT {MAX_RESULTS+1}")
34 | mock_read_sql_query.assert_called_with(sentinel.default_limit, ANY, params=ANY, parse_dates=ANY)
35 |
36 | mock_sqlalch_text.return_value = sentinel.explicit_limit
37 | as_pandas("", params=None, db_engine=None, limit_rows=5)
38 | mock_sqlalch_text.assert_called_with(f" LIMIT {5}")
39 | mock_read_sql_query.assert_called_with(sentinel.explicit_limit, ANY, params=ANY, parse_dates=ANY)
40 |
--------------------------------------------------------------------------------
/tests/server/test_security.py:
--------------------------------------------------------------------------------
1 | """Unit tests for granular sensor authentication."""
2 |
3 | # standard library
4 | import unittest
5 | import base64
6 |
7 | # from flask.testing import FlaskClient
8 | from delphi.epidata.server._common import app
9 | from delphi.epidata.server._security import (
10 | resolve_auth_token,
11 | )
12 |
13 | # py3tester coverage target
14 | __test_target__ = "delphi.epidata.server._security"
15 |
16 |
17 | class UnitTests(unittest.TestCase):
18 | """Basic unit tests."""
19 |
20 | # app: FlaskClient
21 |
22 | def setUp(self):
23 | app.config["TESTING"] = True
24 | app.config["WTF_CSRF_ENABLED"] = False
25 | app.config["DEBUG"] = False
26 |
27 | def test_resolve_auth_token(self):
28 | with self.subTest("no auth"):
29 | with app.test_request_context("/"):
30 | self.assertIsNone(resolve_auth_token())
31 |
32 | with self.subTest("param"):
33 | with app.test_request_context("/?auth=abc"):
34 | self.assertEqual(resolve_auth_token(), "abc")
35 |
36 | with self.subTest("param2"):
37 | with app.test_request_context("/?api_key=abc"):
38 | self.assertEqual(resolve_auth_token(), "abc")
39 |
40 | with self.subTest("bearer token"):
41 | with app.test_request_context("/", headers={"Authorization": "Bearer abc"}):
42 | self.assertEqual(resolve_auth_token(), "abc")
43 |
44 | with self.subTest("basic token"):
45 | userpass = base64.b64encode(b"epidata:abc").decode("utf-8")
46 | with app.test_request_context("/", headers={"Authorization": f"Basic {userpass}"}):
47 | self.assertEqual(resolve_auth_token(), "abc")
48 |
--------------------------------------------------------------------------------