├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.yml
│ ├── documentation_improvement.yml
│ └── feature_request.yml
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ └── notify-slack
│ │ └── action.yml
├── config.yml
├── lighthouserc.js
└── workflows
│ ├── api-deployer.yml
│ ├── api-dev.yml
│ ├── api-prod.yml
│ ├── api-qa.yml
│ ├── assign_next_release_milestone.yml
│ ├── build-test.yml
│ ├── datasets-batch-deployer-dev.yml
│ ├── datasets-batch-deployer-prod.yml
│ ├── datasets-batch-deployer-qa.yml
│ ├── datasets-batch-deployer.yml
│ ├── db-deployer.yml
│ ├── db-prod.yml
│ ├── db-qa.yml
│ ├── db-update-dev.yml
│ ├── db-update-prod.yml
│ ├── db-update-qa.yml
│ ├── db-update.yml
│ ├── duplicate-prod-db.yml
│ ├── integration-tests-pr.yml
│ ├── integration-tests.yml
│ ├── release-qa.yml
│ ├── release.yml
│ ├── schedule-load-test.yml
│ ├── typescript-generator-check.yml
│ ├── validator-update.yml
│ ├── web-app-deployer.yml
│ ├── web-dev.yml
│ ├── web-pr.yml
│ ├── web-prod.yml
│ ├── web-prototype.yml
│ └── web-qa.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── README.md
├── api
├── .coveragerc
├── .flake8
├── .gitignore
├── .openapi-generator-ignore
├── .openapi-generator
│ ├── FILES
│ └── VERSION
├── Dockerfile
├── docker-compose.yaml
├── pyproject.toml
├── requirements.txt
├── requirements_dev.txt
├── setup.cfg
├── src
│ ├── __init__.py
│ ├── feeds
│ │ └── impl
│ │ │ ├── __init__.py
│ │ │ ├── datasets_api_impl.py
│ │ │ ├── error_handling.py
│ │ │ ├── feeds_api_impl.py
│ │ │ ├── metadata_api_impl.py
│ │ │ ├── models
│ │ │ ├── basic_feed_impl.py
│ │ │ ├── bounding_box_impl.py
│ │ │ ├── entity_type_enum.py
│ │ │ ├── external_id_impl.py
│ │ │ ├── feed_impl.py
│ │ │ ├── gbfs_endpoint_impl.py
│ │ │ ├── gbfs_feed_impl.py
│ │ │ ├── gbfs_validation_report_impl.py
│ │ │ ├── gbfs_version_impl.py
│ │ │ ├── gtfs_dataset_impl.py
│ │ │ ├── gtfs_feed_impl.py
│ │ │ ├── gtfs_rt_feed_impl.py
│ │ │ ├── latest_dataset_impl.py
│ │ │ ├── location_impl.py
│ │ │ ├── redirect_impl.py
│ │ │ ├── search_feed_item_result_impl.py
│ │ │ └── validation_report_impl.py
│ │ │ └── search_api_impl.py
│ ├── main.py
│ ├── middleware
│ │ ├── request_context.py
│ │ └── request_context_middleware.py
│ ├── scripts
│ │ ├── gbfs_utils
│ │ │ ├── __init__.py
│ │ │ ├── comparison.py
│ │ │ ├── fetching.py
│ │ │ └── license.py
│ │ ├── load_dataset_on_create.py
│ │ ├── populate_db.py
│ │ ├── populate_db_gbfs.py
│ │ ├── populate_db_gtfs.py
│ │ └── populate_db_test_data.py
│ ├── shared
│ │ ├── __init__.py
│ │ ├── common
│ │ │ ├── db_utils.py
│ │ │ ├── entity_type_enum.py
│ │ │ ├── error_handling.py
│ │ │ ├── iter_utils.py
│ │ │ └── logging_utils.py
│ │ ├── database
│ │ │ ├── __init__.py
│ │ │ ├── database.py
│ │ │ └── sql_functions
│ │ │ │ └── unaccent.py
│ │ └── feed_filters
│ │ │ ├── __init__.py
│ │ │ ├── feed_filter.py
│ │ │ ├── gbfs_feed_filter.py
│ │ │ ├── gtfs_dataset_filter.py
│ │ │ ├── gtfs_feed_filter.py
│ │ │ ├── gtfs_rt_feed_filter.py
│ │ │ └── param_utils.py
│ └── utils
│ │ ├── __init__.py
│ │ ├── config.py
│ │ ├── data_utils.py
│ │ ├── date_utils.py
│ │ ├── dict_utils.py
│ │ ├── logger.py
│ │ └── model_utils.py
└── tests
│ ├── README.md
│ ├── __init__.py
│ ├── integration
│ ├── conftest.py
│ ├── populate_tests
│ │ ├── conftest.py
│ │ ├── test_data
│ │ │ └── sources_test.csv
│ │ └── test_populate.py
│ ├── test_data
│ │ ├── extra_test_data.json
│ │ └── sources_test.csv
│ ├── test_database.py
│ ├── test_datasets_api.py
│ ├── test_feeds_api.py
│ ├── test_metadata_api.py
│ └── test_search_api.py
│ ├── test_data
│ ├── extra_test_data.json
│ ├── sources_test.csv
│ ├── systems_test.csv
│ └── test_datasets.json
│ ├── test_utils
│ ├── __init__.py
│ ├── cryptography.py
│ ├── database.py
│ ├── db_utils.py
│ └── token.py
│ ├── unittest
│ ├── conftest.py
│ ├── middleware
│ │ └── test_request_context.py
│ ├── models
│ │ ├── test_basic_feed_impl.py
│ │ ├── test_bounding_box_impl.py
│ │ ├── test_external_id_impl.py
│ │ ├── test_gtfs_dataset_impl.py
│ │ ├── test_gtfs_feed_impl.py
│ │ ├── test_gtfs_rt_feed_impl.py
│ │ ├── test_latest_dataset_impl.py
│ │ ├── test_location_impl.py
│ │ ├── test_redirect_id_impl.py
│ │ ├── test_search_feed_item_result_impl.py
│ │ └── test_validation_report_impl.py
│ ├── test_feeds.py
│ └── test_param_utils.py
│ └── utils
│ ├── test_compare_java_versions.py
│ ├── test_date_utils.py
│ └── test_dict_utils.py
├── bigquery
└── compare-validation-reports.sql
├── config
└── .env.local
├── docker-compose.yaml
├── docs
├── BearerTokenSchema.yaml
├── DatabaseCatalogAPI.yaml
├── DatabaseCatalogTokenAPI.yaml
├── DatabaseSelection.md
├── GCP.md
├── GitHubWorkflows.md
├── IAPAuthenticationSchema.yaml
├── Logging.md
├── OperationsAPI.yaml
├── SpreadsheetSchemaV2.md
├── SwaggerUI
│ ├── README.md
│ ├── dist
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── index.css
│ │ ├── oauth2-redirect.html
│ │ ├── swagger-initializer.js
│ │ ├── swagger-ui-bundle.js
│ │ ├── swagger-ui-bundle.js.map
│ │ ├── swagger-ui-es-bundle-core.js
│ │ ├── swagger-ui-es-bundle-core.js.map
│ │ ├── swagger-ui-es-bundle.js
│ │ ├── swagger-ui-es-bundle.js.map
│ │ ├── swagger-ui-standalone-preset.js
│ │ ├── swagger-ui-standalone-preset.js.map
│ │ ├── swagger-ui.css
│ │ ├── swagger-ui.css.map
│ │ ├── swagger-ui.js
│ │ └── swagger-ui.js.map
│ ├── index.html
│ ├── screenshots
│ │ └── swagger-github-pages.png
│ └── swagger-ui.version
├── batch-processing
│ ├── BatchProcessing.md
│ ├── batch_datasets.png
│ ├── batch_process_dataset.png
│ ├── batch_processing_schema.png
│ ├── extract_bb.png
│ ├── process_validation_report.png
│ └── update_validation_report.png
└── feature-flag
│ ├── feature_flag.md
│ ├── feature_flag1.png
│ ├── feature_flag2.png
│ ├── feature_flag3.png
│ ├── feature_flag4.png
│ ├── feature_flag5.png
│ ├── feature_flag6.png
│ └── feature_flag7.png
├── functions-python
├── .flake8
├── .gcloudignore
├── .gitignore
├── README.md
├── backfill_dataset_service_date_range
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ └── test_backfill_dataset_service_date_range_main.py
├── batch_datasets
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── main_local_debug.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ ├── conftest.py
│ │ └── test_batch_datasets_main.py
├── batch_process_dataset
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── main_local_debug.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ ├── status.py
│ └── tests
│ │ ├── conftest.py
│ │ └── test_batch_process_dataset_main.py
├── big_query_ingestion
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── common
│ │ │ └── bq_data_transfer.py
│ │ ├── gbfs
│ │ │ └── gbfs_big_query_ingest.py
│ │ ├── gtfs
│ │ │ └── gtfs_big_query_ingest.py
│ │ └── main.py
│ └── tests
│ │ ├── test_common.py
│ │ ├── test_gbfs_ingestion.py
│ │ ├── test_gtfs_ingestion.py
│ │ └── test_main.py
├── dataset_service
│ ├── .coveragerc
│ ├── __init__.py
│ ├── main.py
│ ├── requirements_dev.txt
│ └── tests
│ │ └── test_dataset_service.py
├── export_csv
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ └── main.py
│ └── tests
│ │ ├── conftest.py
│ │ └── test_export_csv_main.py
├── feed_sync_dispatcher_transitland
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── main_local_debug.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ └── test_feed_sync.py
├── feed_sync_process_transitland
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── main_local_debug.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── feed_processor_utils.py
│ │ └── main.py
│ └── tests
│ │ ├── test_feed_processor_utils.py
│ │ └── test_feed_sync_process.py
├── gbfs_validator
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── gbfs_data_processor.py
│ │ ├── gbfs_utils.py
│ │ └── main.py
│ └── tests
│ │ ├── test_gbfs_data_processor.py
│ │ ├── test_gbfs_utils.py
│ │ └── test_gbfs_validator.py
├── helpers
│ ├── .coveragerc
│ ├── __init__.py
│ ├── bq_schema
│ │ ├── gbfs_schema.json
│ │ ├── gtfs_schema.json
│ │ └── schema.py
│ ├── feed_status.py
│ ├── feed_sync
│ │ ├── feed_sync_common.py
│ │ ├── feed_sync_dispatcher.py
│ │ └── models.py
│ ├── gtfs_validator_common.py
│ ├── locations.py
│ ├── logger.py
│ ├── parser.py
│ ├── pub_sub.py
│ ├── query_helper.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── test_config.json
│ ├── tests
│ │ ├── test_helpers.py
│ │ ├── test_locations.py
│ │ ├── test_schema.py
│ │ ├── test_timezone.py
│ │ └── test_transform.py
│ ├── timezone.py
│ ├── transform.py
│ ├── utils.py
│ └── validation_report
│ │ └── validation_report_update.py
├── operations_api
│ ├── .coveragerc
│ ├── .gitignore
│ ├── .openapi-generator-ignore
│ ├── .openapi-generator
│ │ ├── FILES
│ │ └── VERSION
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── feeds_operations
│ │ │ ├── __init__.py
│ │ │ └── impl
│ │ │ │ ├── __init__.py
│ │ │ │ ├── feeds_operations_impl.py
│ │ │ │ ├── models
│ │ │ │ ├── __init__.py
│ │ │ │ ├── basic_feed_impl.py
│ │ │ │ ├── entity_type_impl.py
│ │ │ │ ├── external_id_impl.py
│ │ │ │ ├── get_feeds_response.py
│ │ │ │ ├── gtfs_feed_impl.py
│ │ │ │ ├── gtfs_rt_feed_impl.py
│ │ │ │ ├── location_impl.py
│ │ │ │ ├── redirect_impl.py
│ │ │ │ ├── update_request_gtfs_feed_impl.py
│ │ │ │ └── update_request_gtfs_rt_feed_impl.py
│ │ │ │ └── request_validator.py
│ │ ├── main.py
│ │ └── middleware
│ │ │ ├── request_context_middleware.py
│ │ │ └── request_context_oauth2.py
│ └── tests
│ │ ├── __init__.py
│ │ ├── conftest.py
│ │ ├── feeds_operations
│ │ └── impl
│ │ │ ├── __init__.py
│ │ │ ├── models
│ │ │ ├── test_entity_type_impl.py
│ │ │ ├── test_external_id_impl.py
│ │ │ ├── test_feed_responses.py
│ │ │ ├── test_redirect_impl.py
│ │ │ ├── test_update_request_gtfs_feed_impl.py
│ │ │ └── test_update_request_gtfs_rt_feed_impl.py
│ │ │ ├── test_feeds_operations_impl_gtfs.py
│ │ │ ├── test_feeds_operations_impl_gtfs_rt.py
│ │ │ ├── test_get_feeds.py
│ │ │ └── test_request_validator.py
│ │ ├── middleware
│ │ ├── test_request_context_middleware.py
│ │ └── test_request_context_oauth2.py
│ │ └── pytest.ini
├── preprocessed_analytics
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── main.py
│ │ └── processors
│ │ │ ├── base_analytics_processor.py
│ │ │ ├── gbfs_analytics_processor.py
│ │ │ └── gtfs_analytics_processor.py
│ └── tests
│ │ ├── test_base_processor.py
│ │ ├── test_gbfs_processor.py
│ │ ├── test_gtfs_processor.py
│ │ └── test_main.py
├── process_validation_report
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ ├── conftest.py
│ │ └── test_validation_report.py
├── requirements_dev.txt
├── reverse_geolocation
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── location_group_utils.py
│ │ ├── main.py
│ │ ├── parse_request.py
│ │ ├── reverse_geolocation.py
│ │ ├── reverse_geolocation_batch.py
│ │ └── reverse_geolocation_processor.py
│ └── tests
│ │ ├── test_location_group_utils.py
│ │ ├── test_reverse_geolocation.py
│ │ ├── test_reverse_geolocation_batch.py
│ │ └── test_reverse_geolocation_processor.py
├── reverse_geolocation_populate
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── locality_admin_levels.json
│ │ └── main.py
│ └── tests
│ │ └── test_reverse_geolocation_populate.py
├── tasks_executor
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── main_local_debug.py
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ ├── main.py
│ │ └── tasks
│ │ │ └── validation_reports
│ │ │ ├── README.md
│ │ │ └── rebuild_missing_validation_reports.py
│ └── tests
│ │ ├── conftest.py
│ │ ├── tasks
│ │ └── validation_reports
│ │ │ └── test_rebuild_missing_validation_reports.py
│ │ └── test_main.py
├── test_utils
│ ├── __init__.py
│ ├── database_utils.py
│ └── liquibase.properties
├── tokens
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── pyproject.toml
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── setup.cfg
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ ├── __init__.py
│ │ └── test_main.py
├── update_feed_status
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ ├── conftest.py
│ │ └── test_update_feed_status_main.py
├── update_validation_report
│ ├── .coveragerc
│ ├── .env.rename_me
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ │ ├── __init__.py
│ │ └── main.py
│ └── tests
│ │ └── test_update_validation_report.py
└── validation_to_ndjson
│ ├── .coveragerc
│ ├── README.md
│ ├── function_config.json
│ ├── requirements.txt
│ ├── requirements_dev.txt
│ ├── src
│ ├── __init__.py
│ ├── locations.py
│ ├── main.py
│ └── validation_report_converter.py
│ └── tests
│ ├── test_converter.py
│ ├── test_locations.py
│ └── test_main.py
├── functions
├── .firebaserc
├── README.md
├── firebase.json
├── package.json
├── packages
│ ├── feed-form
│ │ ├── .eslintrc.js
│ │ ├── .gitignore
│ │ ├── jest.config.ts
│ │ ├── package.json
│ │ ├── src
│ │ │ ├── __tests__
│ │ │ │ ├── feed-form.spec.ts
│ │ │ │ └── github-issue-builder.spec.ts
│ │ │ ├── impl
│ │ │ │ ├── feed-form-impl.ts
│ │ │ │ └── types.ts
│ │ │ └── index.ts
│ │ ├── tsconfig.dev.json
│ │ └── tsconfig.json
│ ├── firebase-password-policy
│ │ ├── .eslintrc.js
│ │ ├── .gcloudignore
│ │ ├── .gitignore
│ │ ├── jest.config.ts
│ │ ├── package.json
│ │ ├── src
│ │ │ ├── __tests__
│ │ │ │ └── firebase-password-policy.spec.ts
│ │ │ ├── impl
│ │ │ │ └── firebase-password-policy-impl.ts
│ │ │ └── index.ts
│ │ ├── tsconfig.dev.json
│ │ └── tsconfig.json
│ └── user-api
│ │ ├── .eslintrc.js
│ │ ├── .gitignore
│ │ ├── jest.config.ts
│ │ ├── package.json
│ │ ├── src
│ │ ├── __tests__
│ │ │ └── user-api.spec.ts
│ │ ├── impl
│ │ │ └── user-api-impl.ts
│ │ └── index.ts
│ │ ├── tsconfig.dev.json
│ │ └── tsconfig.json
└── yarn.lock
├── infra
├── .terraform.lock.hcl
├── artifact-registry
│ ├── main.tf
│ ├── vars.tf
│ └── vars.tfvars.rename_me
├── backend.conf.rename_me
├── batch
│ ├── .terraform.lock.hcl
│ ├── main.tf
│ ├── provider.tf
│ ├── vars.tf
│ └── vars.tfvars.rename_me
├── feed-api
│ ├── main.tf
│ └── vars.tf
├── functions-python
│ ├── main.tf
│ └── vars.tf
├── global
│ ├── main.tf
│ └── vars.tf
├── load-balancer
│ ├── main.tf
│ └── vars.tf
├── main.tf
├── metrics
│ ├── main.tf
│ └── vars.tf
├── postgresql
│ ├── .terraform.lock.hcl
│ ├── main.tf
│ ├── provider.tf
│ ├── vars.tf
│ └── vars.tfvars.rename_me
├── provider.tf
├── terraform-init
│ ├── .terraform.lock.hcl
│ ├── backend.conf.rename_me
│ ├── main.tf
│ ├── vars.tf
│ └── vars.tfvars.rename_me
├── vars.tf
├── vars.tfvars.rename_me
└── workflows
│ ├── main.tf
│ └── vars.tf
├── integration-tests
├── .flake8
├── .gitignore
├── README.md
├── requirements.txt
├── requirements_dev.txt
└── src
│ ├── __init__.py
│ ├── endpoints
│ ├── datasets.py
│ ├── feeds.py
│ ├── gbfs_feeds.py
│ ├── gtfs_feeds.py
│ ├── gtfs_rt_feeds.py
│ ├── integration_tests.py
│ └── metadata.py
│ └── main.py
├── liquibase
├── changelog.xml
├── changes
│ ├── add_idxs.sql
│ ├── feat_1041.sql
│ ├── feat_1046.sql
│ ├── feat_1055.sql
│ ├── feat_1082.sql
│ ├── feat_1083.sql
│ ├── feat_1118.sql
│ ├── feat_1124.sql
│ ├── feat_1125.sql
│ ├── feat_1132.sql
│ ├── feat_1181.sql
│ ├── feat_1182.sql
│ ├── feat_1195.sql
│ ├── feat_1200.sql
│ ├── feat_13.sql
│ ├── feat_149.sql
│ ├── feat_15.sql
│ ├── feat_240.sql
│ ├── feat_26.sql
│ ├── feat_263.sql
│ ├── feat_327.sql
│ ├── feat_360.sql
│ ├── feat_371.sql
│ ├── feat_389.sql
│ ├── feat_533.sql
│ ├── feat_565.sql
│ ├── feat_566.sql
│ ├── feat_57.sql
│ ├── feat_611.sql
│ ├── feat_618.sql
│ ├── feat_618_2.sql
│ ├── feat_622.sql
│ ├── feat_66.sql
│ ├── feat_741.sql
│ ├── feat_76.sql
│ ├── feat_77.sql
│ ├── feat_780.sql
│ ├── feat_794.sql
│ ├── feat_794_2.sql
│ ├── feat_823.sql
│ ├── feat_871.sql
│ ├── feat_879.sql
│ ├── feat_88.sql
│ ├── feat_880.sql
│ ├── feat_880_2.sql
│ ├── feat_927.sql
│ ├── feat_927_2.sql
│ ├── feat_946.sql
│ ├── feat_951.sql
│ ├── feat_966.sql
│ ├── feat_993.sql
│ ├── feat_997.sql
│ └── official_tag_update.sql
├── clear_tables.xml
└── population_prep_tables.xml
├── load-test
├── README.md
└── gtfs_user_test.py
├── scripts
├── .deploy-env-dev.rename_me
├── api-deploy-dev.sh
├── api-gen.sh
├── api-operations-gen.sh
├── api-start.sh
├── api-tests.sh
├── assign-next-release-milestone.js
├── db-gen.sh
├── docker-build-push.sh
├── docker-localdb-rebuild-data.sh
├── duplicate-prod-db
│ ├── copy-prod-db-to-qa.sh
│ ├── create-dump-bucket.sh
│ └── post-import.sh
├── extract-hash-and-version.sh
├── function-python-build.sh
├── function-python-deploy.sh
├── function-python-run.sh
├── function-python-setup.sh
├── gen-config.yaml
├── gen-operations-config.yaml
├── integration-tests.sh
├── lint-tests.sh
├── lint-write.sh
├── local_docker
│ └── Dockerfile
├── populate-db-test-data.sh
├── populate-db.sh
├── pubsub_message_print.sh
├── replace-variables.sh
├── setup-openapi-generator.sh
├── tunnel-create.sh
└── tunnel-kill.sh
├── web-app
├── .eslintignore
├── .eslintrc.json
├── .firebaserc
├── .gitignore
├── .prettierrc
├── README.md
├── babel.config.js
├── cypress.config.ts
├── cypress.env.json.rename_me
├── cypress
│ ├── e2e
│ │ ├── addFeedForm.cy.ts
│ │ ├── changepassword.cy.ts
│ │ ├── feeds.cy.ts
│ │ ├── home.cy.ts
│ │ ├── resetpassword.cy.ts
│ │ ├── signin.cy.ts
│ │ └── signup.cy.ts
│ ├── fixtures
│ │ ├── feed_datasets_test-516.json
│ │ ├── feed_test-516.json
│ │ └── gtfs_feed_test-516.json
│ └── support
│ │ ├── commands.ts
│ │ ├── e2e.ts
│ │ └── index.ts
├── firebase.json
├── jest-global-setup.ts
├── package.json
├── public
│ ├── assets
│ │ ├── MOBILTYDATA_logo_purple_M.png
│ │ ├── MOBILTYDATA_logo_purple_M.webp
│ │ └── rocket.gif
│ ├── favicon.ico
│ ├── index.html
│ ├── locales
│ │ ├── en
│ │ │ ├── account.json
│ │ │ ├── common.json
│ │ │ ├── contactUs.json
│ │ │ ├── feeds.json
│ │ │ └── gbfs.json
│ │ └── fr
│ │ │ ├── account.json
│ │ │ └── common.json
│ ├── manifest.json
│ ├── robots.staging.txt
│ ├── robots.txt
│ └── sitemap.xml
├── src
│ ├── .env.rename_me
│ ├── .env.test
│ ├── app
│ │ ├── App.css
│ │ ├── App.tsx
│ │ ├── AppContainer.tsx
│ │ ├── Theme.ts
│ │ ├── components
│ │ │ ├── ContentBox.tsx
│ │ │ ├── Context.tsx
│ │ │ ├── CoveredAreaMap.tsx
│ │ │ ├── FeedStatus.tsx
│ │ │ ├── Footer.tsx
│ │ │ ├── Header.style.ts
│ │ │ ├── Header.tsx
│ │ │ ├── HeaderMobileDrawer.tsx
│ │ │ ├── Locations.tsx
│ │ │ ├── LogoutConfirmModal.tsx
│ │ │ ├── Map.tsx
│ │ │ ├── MapGeoJSON.tsx
│ │ │ ├── NestedCheckboxList.spec.tsx
│ │ │ ├── NestedCheckboxList.tsx
│ │ │ ├── OfficialChip.tsx
│ │ │ ├── PopupTable.tsx
│ │ │ ├── ThemeToggle.tsx
│ │ │ └── WarningContentBox.tsx
│ │ ├── constants
│ │ │ ├── Navigation.spec.ts
│ │ │ ├── Navigation.ts
│ │ │ └── Validation.tsx
│ │ ├── context
│ │ │ ├── RemoteConfigProvider.spec.tsx
│ │ │ ├── RemoteConfigProvider.tsx
│ │ │ └── ThemeProvider.tsx
│ │ ├── hooks
│ │ │ └── index.ts
│ │ ├── interface
│ │ │ ├── ContextProviderProps.ts
│ │ │ ├── Navigation.ts
│ │ │ └── RemoteConfig.ts
│ │ ├── router
│ │ │ ├── ProtectedRoute.tsx
│ │ │ └── Router.tsx
│ │ ├── screens
│ │ │ ├── About.tsx
│ │ │ ├── Account.tsx
│ │ │ ├── Analytics
│ │ │ │ ├── GBFSFeedAnalytics
│ │ │ │ │ ├── DetailPanel.tsx
│ │ │ │ │ ├── GBFSFeedAnalyticsTable.tsx
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── GBFSNoticeAnalytics
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── GBFSVersionAnalytics
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── GTFSFeatureAnalytics
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── GTFSFeedAnalytics
│ │ │ │ │ ├── DetailPanel.tsx
│ │ │ │ │ ├── GTFSFeedAnalyticsTable.tsx
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── GTFSNoticeAnalytics
│ │ │ │ │ └── index.tsx
│ │ │ │ ├── analytics.css
│ │ │ │ └── types.ts
│ │ │ ├── ChangePassword.tsx
│ │ │ ├── CompleteRegistration.tsx
│ │ │ ├── ContactInformation.tsx
│ │ │ ├── ContactUs.tsx
│ │ │ ├── Contribute.tsx
│ │ │ ├── FAQ.tsx
│ │ │ ├── Feed
│ │ │ │ ├── Feed.functions.tsx
│ │ │ │ ├── Feed.spec.tsx
│ │ │ │ ├── Feed.styles.ts
│ │ │ │ ├── StructuredData.functions.ts
│ │ │ │ ├── components
│ │ │ │ │ ├── AssociatedFeeds.tsx
│ │ │ │ │ ├── DataQualitySummary.tsx
│ │ │ │ │ ├── FeedAuthenticationSummaryInfo.tsx
│ │ │ │ │ ├── FeedSummary.tsx
│ │ │ │ │ ├── FeedTitle.tsx
│ │ │ │ │ ├── GbfsFeedInfo.tsx
│ │ │ │ │ ├── GbfsVersions.spec.tsx
│ │ │ │ │ ├── GbfsVersions.tsx
│ │ │ │ │ └── PreviousDatasets.tsx
│ │ │ │ └── index.tsx
│ │ │ ├── FeedSubmission
│ │ │ │ ├── Form
│ │ │ │ │ ├── FirstStep.tsx
│ │ │ │ │ ├── FourthStep.tsx
│ │ │ │ │ ├── SecondStep.tsx
│ │ │ │ │ ├── SecondStepRealtime.tsx
│ │ │ │ │ ├── ThirdStep.tsx
│ │ │ │ │ ├── components
│ │ │ │ │ │ └── FormLabelDescription.tsx
│ │ │ │ │ └── index.tsx
│ │ │ │ └── index.tsx
│ │ │ ├── FeedSubmissionFAQ.tsx
│ │ │ ├── FeedSubmitted.tsx
│ │ │ ├── Feeds
│ │ │ │ ├── AdvancedSearchTable.tsx
│ │ │ │ ├── Feeds.styles.ts
│ │ │ │ ├── GtfsRtEntities.tsx
│ │ │ │ ├── PopoverList.tsx
│ │ │ │ ├── ProviderTitle.tsx
│ │ │ │ ├── SearchFilters.tsx
│ │ │ │ ├── SearchTable.spec.tsx
│ │ │ │ ├── SearchTable.tsx
│ │ │ │ ├── index.tsx
│ │ │ │ └── utility.ts
│ │ │ ├── ForgotPassword.tsx
│ │ │ ├── Home.tsx
│ │ │ ├── PostRegistration.tsx
│ │ │ ├── PrivacyPolicy.tsx
│ │ │ ├── SignIn.tsx
│ │ │ ├── SignUp.tsx
│ │ │ └── TermsAndConditions.tsx
│ │ ├── services
│ │ │ ├── channel-service.ts
│ │ │ ├── feeds
│ │ │ │ ├── add-feed-form-service.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── types.ts
│ │ │ │ ├── utils.spec.ts
│ │ │ │ └── utils.ts
│ │ │ ├── index.ts
│ │ │ └── profile-service.ts
│ │ ├── store
│ │ │ ├── analytics-reducer.ts
│ │ │ ├── dataset-reducer.ts
│ │ │ ├── dataset-selectors.ts
│ │ │ ├── feed-reducer.ts
│ │ │ ├── feed-selectors.ts
│ │ │ ├── feeds-reducer.ts
│ │ │ ├── feeds-selectors.ts
│ │ │ ├── gbfs-analytics-reducer.ts
│ │ │ ├── gbfs-analytics-selector.ts
│ │ │ ├── gtfs-analytics-reducer.ts
│ │ │ ├── gtfs-analytics-selector.ts
│ │ │ ├── profile-reducer.ts
│ │ │ ├── profile-selectors.ts
│ │ │ ├── reducers.ts
│ │ │ ├── saga
│ │ │ │ ├── auth-saga.ts
│ │ │ │ ├── dataset-saga.ts
│ │ │ │ ├── feed-saga.ts
│ │ │ │ ├── feeds-saga.ts
│ │ │ │ ├── gbfs-analytics-saga.ts
│ │ │ │ ├── gtfs-analytics-saga.ts
│ │ │ │ ├── profile-saga.ts
│ │ │ │ └── root-saga.ts
│ │ │ ├── selectors.ts
│ │ │ └── store.ts
│ │ ├── styles
│ │ │ ├── Account.css
│ │ │ ├── Footer.css
│ │ │ ├── PageHeader.style.ts
│ │ │ ├── PageLayout.style.ts
│ │ │ ├── TextShimmer.css
│ │ │ └── VerificationBadge.styles.ts
│ │ ├── types.ts
│ │ └── utils
│ │ │ ├── config.spec.ts
│ │ │ ├── config.ts
│ │ │ ├── consts.tsx
│ │ │ ├── dataset.spec.ts
│ │ │ ├── dataset.ts
│ │ │ ├── date.spec.ts
│ │ │ ├── date.ts
│ │ │ └── error.ts
│ ├── firebase.ts
│ ├── i18n.ts
│ ├── index.css
│ ├── index.tsx
│ ├── react-app-env.d.ts
│ └── setupTests.ts
├── tsconfig.json
└── yarn.lock
└── workflows
├── gtfs_validator_execution.yml
└── reverse_geolocation_populate.yml
/.github/ISSUE_TEMPLATE/documentation_improvement.yml:
--------------------------------------------------------------------------------
1 | name: Documentation improvement
2 | description: Signal a problem or an improvement idea with our documentation
3 | labels: ['documentation','status: Needs triage']
4 |
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: >
9 | #### Before submitting this issue, please make sure there isn't already an [existing issue open](https://github.com/MobilityData/mobility-feed-api/issues?q=is%3Aopen+is%3Aissue+label%3Adocumentation).
10 | - type: textarea
11 | attributes:
12 | label: Describe the problem
13 | description: >
14 | What is the confusion or the problem with the documentation?
15 | validations:
16 | required: true
17 |
18 | - type: textarea
19 | attributes:
20 | label: Suggest a fix or an alternative
21 | description: |
22 | How can we improve our documentation in this regard?
23 | validations:
24 | required: false
25 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | **Summary:**
2 |
3 | Summarize the changes in the pull request including how it relates to any issues (include the #number, or link them).
4 |
5 | **Expected behavior:**
6 |
7 | Explain and/or show screenshots for how you expect the pull request to work in your testing (in case other devices exhibit different behavior).
8 |
9 | **Testing tips:**
10 |
11 | Provide tips, procedures and sample files on how to test the feature.
12 | Testers are invited to follow the tips AND to try anything they deem relevant outside the bounds of the testing tips.
13 |
14 | Please make sure these boxes are checked before submitting your pull request - thanks!
15 |
16 | - [ ] Run the unit tests with `./scripts/api-tests.sh` to make sure you didn't break anything
17 | - [ ] Add or update any needed documentation to the repo
18 | - [ ] Format the title like "feat: [new feature short description]". Title must follow the Conventional Commit Specification(https://www.conventionalcommits.org/en/v1.0.0/).
19 | - [ ] Linked all relevant issues
20 | - [ ] Include screenshot(s) showing how this pull request works and fixes the issue(s)
21 |
--------------------------------------------------------------------------------
/.github/workflows/datasets-batch-deployer-prod.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Historical Batch Processing - PROD
2 |
3 | on:
4 | workflow_dispatch: # Supports manual deployment
5 | workflow_call:
6 |
7 | jobs:
8 | deploy:
9 | uses: ./.github/workflows/datasets-batch-deployer.yml
10 | with:
11 | STATE_BUCKET_NAME: 'mobility-feeds-terraform-prod'
12 | OBJECT_PREFIX: 'terraform-state-batch'
13 | PROJECT_ID: ${{ vars.PROD_MOBILITY_FEEDS_PROJECT_ID }}
14 | REGION: ${{ vars.MOBILITY_FEEDS_REGION }}
15 | ENVIRONMENT: 'prod'
16 | DEPLOYER_SERVICE_ACCOUNT: ${{ vars.PROD_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
17 | # every day at 00:00 UTC
18 | JOB_SCHEDULE: '0 0 * * *'
19 | DATASETS_BUCKET_NAME: 'mobilitydata-datasets-prod'
20 | secrets:
21 | GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.PROD_GCP_MOBILITY_FEEDS_SA_KEY }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/datasets-batch-deployer-qa.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Historical Batch Processing - QA
2 |
3 | on:
4 | workflow_dispatch: # Supports manual deployment
5 | workflow_call:
6 |
7 | jobs:
8 | deploy:
9 | uses: ./.github/workflows/datasets-batch-deployer.yml
10 | with:
11 | STATE_BUCKET_NAME: 'mobility-feeds-terraform-qa'
12 | OBJECT_PREFIX: 'terraform-state-batch'
13 | PROJECT_ID: ${{ vars.QA_MOBILITY_FEEDS_PROJECT_ID }}
14 | REGION: ${{ vars.MOBILITY_FEEDS_REGION }}
15 | ENVIRONMENT: 'qa'
16 | DEPLOYER_SERVICE_ACCOUNT: ${{ vars.QA_MOBILITY_FEEDS_DEPLOYER_SERVICE_ACCOUNT }}
17 | # every week on monday at 00:00
18 | JOB_SCHEDULE: '0 0 * * 1'
19 | DATASETS_BUCKET_NAME: 'mobilitydata-datasets-qa'
20 | secrets:
21 | GCP_MOBILITY_FEEDS_SA_KEY: ${{ secrets.QA_GCP_MOBILITY_FEEDS_SA_KEY }}
22 |
--------------------------------------------------------------------------------
/.github/workflows/web-dev.yml:
--------------------------------------------------------------------------------
1 | name: Web App - DEV
2 | on:
3 | workflow_dispatch:
4 | workflow_call:
5 |
6 | jobs:
7 | deploy-web-app:
8 | name: Deploy Web App
9 | uses: ./.github/workflows/web-app-deployer.yml
10 | with:
11 | FIREBASE_PROJECT: dev
12 | FEED_SUBMIT_GOOGLE_SHEET_ID: "1iXwux9hM4p5Li1EGgwx-8hU3sMDnF15yTflqmGjiZqE"
13 | OP_SLACK_WEBHOOK_URL: "op://rbiv7rvkkrsdlpcrz3bmv7nmcu/wm52iemzzm2cwfaoakwaufthuq/password"
14 | secrets: inherit
15 |
--------------------------------------------------------------------------------
/.github/workflows/web-prod.yml:
--------------------------------------------------------------------------------
1 | name: Web App - PROD
2 | on:
3 | workflow_dispatch:
4 | workflow_call:
5 |
6 | jobs:
7 | deploy-web-app:
8 | name: Deploy Web App
9 | uses: ./.github/workflows/web-app-deployer.yml
10 | with:
11 | FIREBASE_PROJECT: prod
12 | REACT_APP_GOOGLE_ANALYTICS_ID: ${{ vars.PROD_REACT_APP_GOOGLE_ANALYTICS_ID }}
13 | FEED_SUBMIT_GOOGLE_SHEET_ID: "10eIUxWVtLmc2EATiwivgXBf4bOMErOnq7GFIoRedXHU"
14 | OP_SLACK_WEBHOOK_URL: "op://rbiv7rvkkrsdlpcrz3bmv7nmcu/Slack webhook URLs/rdpfgrmnbxqaelgi5oky3lryz4/internal-add-feeds"
15 | OP_ADD_FEED_FORM_GITHUB_TOKEN: "op://rbiv7rvkkrsdlpcrz3bmv7nmcu/cwzlqlspbw7goqjsdqu4b7matq/credential"
16 | secrets: inherit
--------------------------------------------------------------------------------
/.github/workflows/web-qa.yml:
--------------------------------------------------------------------------------
1 | name: Web App - QA
2 | on:
3 | workflow_dispatch:
4 | workflow_call:
5 |
6 | jobs:
7 | deploy-web-app:
8 | name: Deploy Web App
9 | uses: ./.github/workflows/web-app-deployer.yml
10 | with:
11 | FIREBASE_PROJECT: qa
12 | REACT_APP_GOOGLE_ANALYTICS_ID: ${{ vars.QA_REACT_APP_GOOGLE_ANALYTICS_ID }}
13 | FEED_SUBMIT_GOOGLE_SHEET_ID: "1GZeO3kFBFr073bSHuClhTiKt7KEad8vWM01Clo-rOVQ"
14 | OP_SLACK_WEBHOOK_URL: "op://rbiv7rvkkrsdlpcrz3bmv7nmcu/wm52iemzzm2cwfaoakwaufthuq/password"
15 | secrets: inherit
16 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: local
3 | hooks:
4 | - id: lint-tests
5 | name: lint-tests-sh
6 | entry: scripts/lint-tests.sh
7 | language: script
8 | pass_filenames: false
--------------------------------------------------------------------------------
/api/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | src/feeds_gen/*
5 | src/database_gen/*
6 |
7 | [report]
8 | exclude_lines =
9 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/api/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv,build,feeds_gen,database_gen
4 | # Ignored because conflict with black
5 | extend-ignore = E203
--------------------------------------------------------------------------------
/api/.openapi-generator/VERSION:
--------------------------------------------------------------------------------
1 | 7.5.0
2 |
--------------------------------------------------------------------------------
/api/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3.6'
2 | services:
3 | api:
4 | container_name: api
5 | build:
6 | context: .
7 | dockerfile: Dockerfile
8 | ports:
9 | - "8080:8080"
10 |
--------------------------------------------------------------------------------
/api/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.black]
6 | line-length = 120
7 | extend-exclude = '''
8 | (
9 | /(
10 | \.eggs # exclude a few common directories in the
11 | | \.git # root of the project
12 | | \.hg
13 | | \.mypy_cache
14 | | \.tox
15 | | \.venv
16 | | .*/venv
17 | | _build
18 | | buck-out
19 | | build
20 | | dist
21 | | src/feeds_gen/*
22 | | src/shared/database_gen/*
23 | )/
24 | )
25 | '''
26 |
27 | [tool.isort]
28 | profile = "black"
29 | skip = [
30 | '.eggs', '.git', '.hg', '.mypy_cache', '.nox', '.pants.d', '.tox',
31 | '.venv', '_build', 'buck-out', 'build', 'dist', 'node_modules', 'venv',
32 | ]
33 | skip_gitignore = true
34 |
35 | [tool.pytest.ini_options]
36 | pythonpath = ["src"]
37 |
--------------------------------------------------------------------------------
/api/requirements.txt:
--------------------------------------------------------------------------------
1 | aiofiles==23.1.0
2 | aniso8601==7.0.0
3 | async-exit-stack==1.0.1
4 | async-generator==1.10
5 | certifi==2023.7.22
6 | chardet==4.0.0
7 | click==8.0.2
8 | dnspython==2.6.1
9 | email-validator==2.0.0
10 | fastapi==0.111.0
11 | google-auth
12 | google-cloud-logging==3.10.0
13 | graphene==2.1.8
14 | graphql-core==2.3.2
15 | graphql-relay==2.0.1
16 | h11==0.12.0
17 | httptools==0.6.0
18 | httpx==0.24.1
19 | idna==3.7
20 | itsdangerous==1.1.0
21 | Jinja2==3.1.4
22 | MarkupSafe==2.0.1
23 | orjson==3.10.6
24 | promise==2.3
25 | pydantic==2.4.0
26 | python-dotenv==1.0.0
27 | python-multipart==0.0.9
28 | PyYAML==6.0.1
29 | requests==2.32.2
30 | Rx==1.6.1
31 | six==1.16.0
32 | starlette==0.37.2
33 | typing-extensions==4.12.2
34 | ujson==5.5.0
35 | urllib3==1.26.19
36 | uvicorn==0.30.1
37 | uvloop==0.17.0
38 | watchgod==0.7
39 | websockets==12.0
40 | SQLAlchemy==2.0.23
41 | geoalchemy2==0.14.7
42 | psycopg2-binary==2.9.6
43 | sqlacodegen==3.0.0rc5
44 | pandas
45 | packaging==24.0
46 | cloud-sql-python-connector[pg8000]
47 | fastapi-filter[sqlalchemy]==1.0.0
48 | PyJWT
49 | shapely
50 | google-cloud-pubsub
51 | pycountry
--------------------------------------------------------------------------------
/api/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | #Testing
2 | pytest>=3.2.3
3 | flake8==6.0.0
4 | black==22.3.0
5 | pytest-mock
6 | pre-commit==3.4.0
7 | faker
8 | pytest-asyncio
--------------------------------------------------------------------------------
/api/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = feeds
3 | version = 0.1.0
4 | description = API as required in the _Proposed Version 1_ from the _Product Requirement Document for the Mobility Database_
5 | long_description = file: README.md
6 | keywords = OpenAPI Mobility Data Catalog API
7 | python_requires = >= 3.7.*
8 | classifiers =
9 | Operating System :: OS Independent
10 | Programming Language :: Python :: 3
11 | Programming Language :: Python :: 3.7
12 |
13 | [options]
14 | install_requires = fastapi[all]
15 | setup_requires = setuptools
16 | package_dir = =src
17 | packages = find_namespace:
18 |
19 | [options.packages.find]
20 | where = src
--------------------------------------------------------------------------------
/api/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/__init__.py
--------------------------------------------------------------------------------
/api/src/feeds/impl/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/feeds/impl/__init__.py
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/bounding_box_impl.py:
--------------------------------------------------------------------------------
1 | from geoalchemy2 import WKBElement
2 | from geoalchemy2.shape import to_shape
3 |
4 | from feeds_gen.models.bounding_box import BoundingBox
5 |
6 |
7 | class BoundingBoxImpl(BoundingBox):
8 | """Implementation of the `BoundingBox` model.
9 | This class converts a SQLAlchemy geometry values to a Pydantic model.
10 | """
11 |
12 | class Config:
13 | """Pydantic configuration.
14 | Enabling `from_orm` method to create a model instance from a SQLAlchemy row object."""
15 |
16 | from_attributes = True
17 |
18 | @classmethod
19 | def from_orm(cls, geometry_value: WKBElement | None) -> BoundingBox | None:
20 | """Create a model instance from a SQLAlchemy a WKBElement value."""
21 | if geometry_value is None or geometry_value.data is None:
22 | return None
23 | shape = to_shape(geometry_value)
24 | return BoundingBoxImpl(
25 | minimum_latitude=shape.bounds[1],
26 | maximum_latitude=shape.bounds[3],
27 | minimum_longitude=shape.bounds[0],
28 | maximum_longitude=shape.bounds[2],
29 | )
30 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/entity_type_enum.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class EntityType(Enum):
5 | """
6 | Enum for the entity type
7 | """
8 |
9 | VP = "vp"
10 | SA = "sa"
11 | TU = "tu"
12 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/external_id_impl.py:
--------------------------------------------------------------------------------
1 | from shared.database_gen.sqlacodegen_models import Externalid
2 | from feeds_gen.models.external_id import ExternalId
3 |
4 |
5 | class ExternalIdImpl(ExternalId):
6 | """Implementation of the `ExternalId` model.
7 | This class converts a SQLAlchemy row DB object to a Pydantic model.
8 | """
9 |
10 | class Config:
11 | """Pydantic configuration.
12 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
13 |
14 | from_attributes = True
15 |
16 | @classmethod
17 | def from_orm(cls, external_id: Externalid | None) -> ExternalId | None:
18 | if not external_id:
19 | return None
20 | return cls(
21 | external_id=external_id.associated_id,
22 | source=external_id.source,
23 | )
24 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/feed_impl.py:
--------------------------------------------------------------------------------
1 | from feeds.impl.models.basic_feed_impl import BaseFeedImpl
2 | from feeds_gen.models.feed import Feed
3 | from shared.database_gen.sqlacodegen_models import Feed as FeedOrm
4 |
5 |
6 | class FeedImpl(BaseFeedImpl, Feed):
7 | """Base implementation of the feeds models.
8 | This class converts a SQLAlchemy row DB object with common feed fields to a Pydantic model.
9 | """
10 |
11 | class Config:
12 | """Pydantic configuration.
13 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
14 |
15 | from_attributes = True
16 |
17 | @classmethod
18 | def from_orm(cls, feed_orm: FeedOrm | None) -> Feed | None:
19 | feed: Feed = super().from_orm(feed_orm)
20 | if not feed:
21 | return None
22 | feed.status = feed_orm.status
23 | feed.official = feed_orm.official
24 | feed.official_updated_at = feed_orm.official_updated_at
25 | feed.feed_name = feed_orm.feed_name
26 | feed.note = feed_orm.note
27 | return feed
28 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/gbfs_endpoint_impl.py:
--------------------------------------------------------------------------------
1 | from feeds_gen.models.gbfs_endpoint import GbfsEndpoint
2 | from shared.database_gen.sqlacodegen_models import Gbfsendpoint as GbfsEndpointOrm
3 |
4 |
5 | class GbfsEndpointImpl(GbfsEndpoint):
6 | """Implementation of the `GtfsFeed` model.
7 | This class converts a SQLAlchemy row DB object to a Pydantic model.
8 | """
9 |
10 | class Config:
11 | """Pydantic configuration.
12 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
13 |
14 | from_attributes = True
15 |
16 | @classmethod
17 | def from_orm(cls, endpoint: GbfsEndpointOrm | None) -> GbfsEndpoint | None:
18 | if not endpoint:
19 | return None
20 | return cls(name=endpoint.name, url=endpoint.url, language=endpoint.language, is_feature=endpoint.is_feature)
21 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/gbfs_validation_report_impl.py:
--------------------------------------------------------------------------------
1 | from feeds_gen.models.gbfs_version import GbfsValidationReport
2 | from shared.database_gen.sqlacodegen_models import Gbfsvalidationreport as GbfsValidationReportOrm
3 |
4 |
5 | class GbfsValidationReportImpl(GbfsValidationReport):
6 | """Implementation of the `GtfsFeed` model.
7 | This class converts a SQLAlchemy row DB object to a Pydantic model.
8 | """
9 |
10 | class Config:
11 | """Pydantic configuration.
12 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
13 |
14 | from_attributes = True
15 |
16 | @classmethod
17 | def from_orm(cls, validation_report: GbfsValidationReportOrm | None) -> GbfsValidationReport | None:
18 | if not validation_report:
19 | return None
20 | return cls(
21 | validated_at=validation_report.validated_at,
22 | total_error=validation_report.total_errors_count,
23 | report_summary_url=validation_report.report_summary_url,
24 | validator_version=validation_report.validator_version,
25 | )
26 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/gtfs_rt_feed_impl.py:
--------------------------------------------------------------------------------
1 | from feeds.impl.models.feed_impl import FeedImpl
2 | from shared.database_gen.sqlacodegen_models import Gtfsrealtimefeed as GtfsRTFeedOrm
3 | from feeds.impl.models.location_impl import LocationImpl
4 | from feeds_gen.models.gtfs_rt_feed import GtfsRTFeed
5 |
6 |
7 | class GtfsRTFeedImpl(FeedImpl, GtfsRTFeed):
8 | """Implementation of the 'Gtfsrealtimefeed' model."""
9 |
10 | class Config:
11 | """Pydantic configuration.
12 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
13 |
14 | from_attributes = True
15 |
16 | @classmethod
17 | def from_orm(cls, feed: GtfsRTFeedOrm | None) -> GtfsRTFeed | None:
18 | gtfs_rt_feed: GtfsRTFeed = super().from_orm(feed)
19 | if not gtfs_rt_feed:
20 | return None
21 | gtfs_rt_feed.locations = [LocationImpl.from_orm(item) for item in feed.locations]
22 | gtfs_rt_feed.entity_types = [item.name for item in feed.entitytypes]
23 | gtfs_rt_feed.feed_references = [item.stable_id for item in feed.gtfs_feeds]
24 | return gtfs_rt_feed
25 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/location_impl.py:
--------------------------------------------------------------------------------
1 | from feeds_gen.models.location import Location
2 | import pycountry
3 | from shared.database_gen.sqlacodegen_models import Location as LocationOrm
4 |
5 |
6 | class LocationImpl(Location):
7 | class Config:
8 | """Pydantic configuration.
9 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
10 |
11 | from_attributes = True
12 |
13 | @classmethod
14 | def from_orm(cls, location: LocationOrm | None) -> Location | None:
15 | """Create a model instance from a SQLAlchemy a Location row object."""
16 | if not location:
17 | return None
18 | country_name = location.country
19 | if not country_name:
20 | try:
21 | country_name = pycountry.countries.get(alpha_2=location.country_code).name
22 | except AttributeError:
23 | pass
24 | return cls(
25 | country_code=location.country_code,
26 | country=country_name,
27 | subdivision_name=location.subdivision_name,
28 | municipality=location.municipality,
29 | )
30 |
--------------------------------------------------------------------------------
/api/src/feeds/impl/models/redirect_impl.py:
--------------------------------------------------------------------------------
1 | from shared.database_gen.sqlacodegen_models import Redirectingid
2 | from feeds_gen.models.redirect import Redirect
3 |
4 |
5 | class RedirectImpl(Redirect):
6 | """Implementation of the `Redirect` model.
7 | This class converts a SQLAlchemy row DB object to a Pydantic model.
8 | """
9 |
10 | class Config:
11 | """Pydantic configuration.
12 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object."""
13 |
14 | from_attributes = True
15 |
16 | @classmethod
17 | def from_orm(cls, redirect: Redirectingid | None) -> Redirect | None:
18 | if not redirect:
19 | return None
20 | return cls(
21 | target_id=redirect.target.stable_id,
22 | comment=redirect.redirect_comment,
23 | )
24 |
--------------------------------------------------------------------------------
/api/src/scripts/gbfs_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/scripts/gbfs_utils/__init__.py
--------------------------------------------------------------------------------
/api/src/scripts/gbfs_utils/license.py:
--------------------------------------------------------------------------------
1 | LICENSE_URL_MAP = {
2 | "CC0-1.0": "https://creativecommons.org/publicdomain/zero/1.0/",
3 | "CC-BY-4.0": "https://creativecommons.org/licenses/by/4.0/",
4 | "CDLA-Permissive-1.0": "https://cdla.io/permissive-1-0/",
5 | "ODC-By-1.0": "https://www.opendatacommons.org/licenses/by/1.0/",
6 | }
7 |
8 | DEFAULT_LICENSE_URL = "https://creativecommons.org/licenses/by/4.0/"
9 |
10 |
11 | def get_license_url(system_info, logger):
12 | """Get the license URL from the system information."""
13 | try:
14 | if system_info is None:
15 | return None
16 |
17 | # Fetching license_url or license_id
18 | license_url = system_info.get("license_url")
19 | if not license_url:
20 | license_id = system_info.get("license_id")
21 | if license_id:
22 | return LICENSE_URL_MAP.get(license_id, DEFAULT_LICENSE_URL)
23 | return DEFAULT_LICENSE_URL
24 | return license_url
25 | except Exception as e:
26 | logger.error(f"Error fetching license url data from system info {system_info}: \n{e}")
27 | return None
28 |
--------------------------------------------------------------------------------
/api/src/shared/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/shared/__init__.py
--------------------------------------------------------------------------------
/api/src/shared/common/entity_type_enum.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class EntityType(Enum):
5 | """
6 | Enum for the entity type
7 | """
8 |
9 | VP = "vp"
10 | SA = "sa"
11 | TU = "tu"
12 |
--------------------------------------------------------------------------------
/api/src/shared/common/iter_utils.py:
--------------------------------------------------------------------------------
1 | from itertools import islice
2 |
3 |
4 | def batched(iterable, n):
5 | """
6 | Batch an iterable into tuples of length `n`. The last batch may be shorter.
7 |
8 | Based on the implementation in more-itertools and will be built-in once we
9 | switch to Python 3.12+.
10 | """
11 | it = iter(iterable)
12 | while batch := tuple(islice(it, n)):
13 | yield batch
14 |
--------------------------------------------------------------------------------
/api/src/shared/common/logging_utils.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 |
4 |
5 | def get_env_logging_level():
6 | """
7 | Get the logging level from the environment via OS variable LOGGING_LEVEL. Returns INFO if not set.
8 | """
9 | return logging.getLevelName(os.getenv("LOGGING_LEVEL", "INFO"))
10 |
--------------------------------------------------------------------------------
/api/src/shared/database/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/shared/database/__init__.py
--------------------------------------------------------------------------------
/api/src/shared/database/sql_functions/unaccent.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.sql.functions import ReturnTypeFromArgs
2 |
3 |
4 | class unaccent(ReturnTypeFromArgs):
5 | """
6 | This class represents the `unaccent` function in the database.
7 | This function is used to remove accents from a string.
8 | More documentation can be found at https://www.postgresql.org/docs/current/unaccent.html.
9 | Be aware that this function is not available in all databases nor in all versions of PostgreSQL.
10 | """
11 |
12 | pass
13 |
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/shared/feed_filters/__init__.py
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/feed_filter.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from fastapi_filter.contrib.sqlalchemy import Filter
4 |
5 | from shared.database_gen.sqlacodegen_models import Feed
6 | from shared.feed_filters.param_utils import normalize_str_parameter
7 |
8 |
9 | class FeedFilter(Filter):
10 | status: Optional[str]
11 | stable_id: Optional[str]
12 | provider__ilike: Optional[str] # case insensitive
13 | producer_url__ilike: Optional[str] # case insensitive
14 |
15 | def __init__(self, *args, **kwargs):
16 | kwargs_normalized = normalize_str_parameter("status", **kwargs)
17 | kwargs_normalized = normalize_str_parameter("stable_id", **kwargs_normalized)
18 | kwargs_normalized = normalize_str_parameter("provider__ilike", **kwargs_normalized)
19 | kwargs_normalized = normalize_str_parameter("producer_url__ilike", **kwargs_normalized)
20 | super().__init__(*args, **kwargs_normalized)
21 |
22 | class Constants(Filter.Constants):
23 | model = Feed
24 |
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/gbfs_feed_filter.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from fastapi_filter.contrib.sqlalchemy import Filter
4 |
5 | from shared.database_gen.sqlacodegen_models import Gbfsfeed, Gbfsversion
6 | from shared.feed_filters.gtfs_feed_filter import LocationFilter
7 |
8 |
9 | class GbfsVersionFilter(Filter):
10 | version: Optional[str]
11 |
12 | class Constants(Filter.Constants):
13 | model = Gbfsversion
14 |
15 |
16 | class GbfsFeedFilter(Filter):
17 | stable_id: Optional[str] = None
18 | provider__ilike: Optional[str] = None # case-insensitive
19 | producer_url__ilike: Optional[str] = None # case-insensitive
20 | location: Optional[LocationFilter] = None
21 | system_id: Optional[str] = None
22 | version: Optional[GbfsVersionFilter] = None
23 |
24 | class Constants(Filter.Constants):
25 | model = Gbfsfeed
26 |
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/gtfs_dataset_filter.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 | from datetime import datetime
3 | from fastapi_filter.contrib.sqlalchemy import Filter
4 |
5 | from shared.database_gen.sqlacodegen_models import Gtfsdataset
6 | from shared.feed_filters.param_utils import normalize_str_parameter
7 |
8 |
9 | class GtfsDatasetFilter(Filter):
10 | downloaded_at__lte: Optional[datetime]
11 | downloaded_at__gte: Optional[datetime]
12 |
13 | def __init__(self, *args, **kwargs):
14 | kwargs_normalized = normalize_str_parameter("downloaded_at__lte", **kwargs)
15 | kwargs_normalized = normalize_str_parameter("downloaded_at__gte", **kwargs_normalized)
16 | super().__init__(*args, **kwargs_normalized)
17 |
18 | class Constants(Filter.Constants):
19 | model = Gtfsdataset
20 |
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/gtfs_feed_filter.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from fastapi_filter.contrib.sqlalchemy import Filter
4 |
5 | from shared.database_gen.sqlacodegen_models import Location, Gtfsfeed
6 | from shared.feed_filters.param_utils import normalize_str_parameter
7 |
8 |
9 | class LocationFilter(Filter):
10 | country_code: Optional[str]
11 | subdivision_name__ilike: Optional[str]
12 | municipality__ilike: Optional[str]
13 |
14 | def __init__(self, *args, **kwargs):
15 | kwargs_normalized = normalize_str_parameter("country_code", **kwargs)
16 | kwargs_normalized = normalize_str_parameter("subdivision_name__ilike", **kwargs_normalized)
17 | kwargs_normalized = normalize_str_parameter("municipality__ilike", **kwargs_normalized)
18 | super().__init__(*args, **kwargs_normalized)
19 |
20 | class Constants(Filter.Constants):
21 | model = Location
22 |
23 |
24 | class GtfsFeedFilter(Filter):
25 | stable_id: Optional[str]
26 | provider__ilike: Optional[str] # case insensitive
27 | producer_url__ilike: Optional[str] # case insensitive
28 | location: Optional[LocationFilter]
29 |
30 | class Constants(Filter.Constants):
31 | model = Gtfsfeed
32 |
--------------------------------------------------------------------------------
/api/src/shared/feed_filters/param_utils.py:
--------------------------------------------------------------------------------
1 | def normalize_str_parameter(param_name, **kwargs):
2 | """
3 | Process a parameter in the kwargs dictionary, stripping it if it is a string and set empty to None.
4 | """
5 | if param_name in kwargs:
6 | value = kwargs[param_name]
7 | if isinstance(value, str):
8 | stripped_value = value.strip()
9 | kwargs[param_name] = None if stripped_value == "" else stripped_value
10 | return kwargs
11 |
--------------------------------------------------------------------------------
/api/src/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/src/utils/__init__.py
--------------------------------------------------------------------------------
/api/src/utils/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Final
3 |
4 | PROJECT_ID: Final[str] = "PROJECT_ID"
5 |
6 |
7 | def get_config(key: str, default_value: str = None) -> str:
8 | """
9 | Get the value of an environment variable
10 | """
11 | return os.getenv(key, default_value)
12 |
--------------------------------------------------------------------------------
/api/src/utils/data_utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def set_up_defaults(df):
5 | """
6 | Updates the dataframe to match types defined in the database
7 | @param df: Dataframe to update
8 | """
9 | df.status = df.status.fillna("active")
10 | df["urls.authentication_type"] = df["urls.authentication_type"].fillna(0)
11 | df["features"] = df["features"].fillna("")
12 | df["entity_type"] = df["entity_type"].fillna("")
13 | df["location.country_code"] = df["location.country_code"].fillna("")
14 | df["location.subdivision_name"] = df["location.subdivision_name"].fillna("")
15 | df["location.municipality"] = df["location.municipality"].fillna("")
16 | df = df.replace(np.nan, None)
17 | df = df.replace("gtfs-rt", "gtfs_rt")
18 | df["location.country_code"] = df["location.country_code"].replace("unknown", "")
19 | df["location.subdivision_name"] = df["location.subdivision_name"].replace("unknown", "")
20 | df["location.municipality"] = df["location.municipality"].replace("unknown", "")
21 | return df
22 |
--------------------------------------------------------------------------------
/api/src/utils/date_utils.py:
--------------------------------------------------------------------------------
1 | from typing import Final, Optional
2 | import re
3 |
4 | iso_pattern: Final[str] = (
5 | r"^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])T([01]\d|2[0-3]):([0-5]\d):([0-5]\d)("
6 | r"\.\d+)?(Z|[+-](["
7 | r"01]\d|2[0-3]):([0-5]\d))?$"
8 | )
9 |
10 |
11 | def valid_iso_date(date_string: Optional[str]) -> bool:
12 | """Check if a date string is a valid ISO 8601 date format."""
13 | # Validators are not required to check for None or empty strings
14 | if date_string is None or date_string.strip() == "":
15 | return True
16 | return re.match(iso_pattern, date_string) is not None
17 |
--------------------------------------------------------------------------------
/api/src/utils/dict_utils.py:
--------------------------------------------------------------------------------
1 | def get_safe_value(dictionary: dict, field_name, default_value=None):
2 | """
3 | Get a value from a dictionary safely, returning a default value if the field is not present
4 | @param dictionary: Dictionary to get the value from
5 | @param field_name: Name of the field to get
6 | @param default_value: Default value to return if the field is not present
7 | @return: Value of the field or the default value if the field is not present
8 | """
9 | return dictionary[field_name] if field_name in dictionary else default_value
10 |
--------------------------------------------------------------------------------
/api/src/utils/model_utils.py:
--------------------------------------------------------------------------------
1 | from packaging.version import Version
2 |
3 |
4 | def compare_java_versions(v1: str | None, v2: str | None):
5 | """
6 | Compare two version strings v1 and v2.
7 | Returns 1 if v1 > v2, -1 if v1 < v2,
8 | otherwise 0.
9 | The version strings are expected to be in the format of
10 | major.minor.patch[-SNAPSHOT]
11 | """
12 | if v1 is None and v2 is None:
13 | return 0
14 | if v1 is None:
15 | return -1
16 | if v2 is None:
17 | return 1
18 | # clean version strings replacing the SNAPSHOT suffix with .dev0
19 | v1 = v1.replace("-SNAPSHOT", ".dev0")
20 | v2 = v2.replace("-SNAPSHOT", ".dev0")
21 | if Version(v1) > Version(v2):
22 | return 1
23 | elif Version(v1) < Version(v2):
24 | return -1
25 | else:
26 | return 0
27 |
--------------------------------------------------------------------------------
/api/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/tests/__init__.py
--------------------------------------------------------------------------------
/api/tests/integration/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 | from fastapi import FastAPI
5 | from fastapi.testclient import TestClient
6 |
7 | from shared.database.database import Database
8 | from main import app as application
9 | from tests.test_utils.database import populate_database
10 |
11 |
12 | @pytest.fixture(scope="package")
13 | def app() -> FastAPI:
14 | application.dependency_overrides = {}
15 | return application
16 |
17 |
18 | @pytest.fixture(scope="package")
19 | def test_database():
20 | # Restrict the tests to the test database
21 | os.environ["FEEDS_DATABASE_URL"] = "postgresql://postgres:postgres@localhost:54320/MobilityDatabaseTest"
22 |
23 | current_path = os.path.dirname(os.path.abspath(__file__))
24 |
25 | data_dirs = [current_path + "/../test_data", current_path + "/test_data"]
26 | with populate_database(Database(), data_dirs) as db:
27 | yield db
28 |
29 |
30 | @pytest.fixture(scope="package")
31 | def client(app, test_database) -> TestClient:
32 | return TestClient(app)
33 |
--------------------------------------------------------------------------------
/api/tests/integration/populate_tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 | from fastapi import FastAPI
5 | from fastapi.testclient import TestClient
6 |
7 | from shared.database.database import Database
8 | from main import app as application
9 | from tests.test_utils.database import populate_database
10 |
11 |
12 | @pytest.fixture(scope="package")
13 | def app() -> FastAPI:
14 | application.dependency_overrides = {}
15 | return application
16 |
17 |
18 | @pytest.fixture(scope="package")
19 | def test_database():
20 | # Restrict the tests to the test database
21 | os.environ["FEEDS_DATABASE_URL"] = "postgresql://postgres:postgres@localhost:54320/MobilityDatabaseTest"
22 |
23 | current_path = os.path.dirname(os.path.abspath(__file__))
24 |
25 | data_dirs = [current_path + "/../../test_data", current_path + "/../test_data", current_path + "/test_data"]
26 | with populate_database(Database(), data_dirs) as db:
27 | yield db
28 |
29 |
30 | @pytest.fixture(scope="package")
31 | def client(app, test_database) -> TestClient:
32 | return TestClient(app)
33 |
--------------------------------------------------------------------------------
/api/tests/integration/test_metadata_api.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | import os
3 |
4 | from fastapi.testclient import TestClient
5 |
6 | from tests.test_utils.token import authHeaders
7 |
8 |
9 | def test_metadata_get(client: TestClient):
10 | """Test case for metadata_get"""
11 | test_hash = "1234567890123456789012345678901234567890"
12 | test_version = "v1.2.3"
13 | version_info_path = os.path.join(os.path.dirname(__file__), "../../src/version_info")
14 | with open(version_info_path, "w") as file:
15 | file.write("[DEFAULT]\n")
16 | file.write(f"LONG_COMMIT_HASH={test_hash}\n")
17 | file.write(f"SHORT_COMMIT_HASH={test_hash[:7]}\n")
18 | file.write(f"EXTRACTED_VERSION={test_version}")
19 |
20 | response = client.request(
21 | "GET",
22 | "/v1/metadata",
23 | headers=authHeaders,
24 | )
25 |
26 | # Validate that the response reads from version_info
27 | assert response.json()["commit_hash"] == test_hash
28 | assert response.json()["version"] == test_version
29 |
30 | assert response.status_code == 200
31 |
--------------------------------------------------------------------------------
/api/tests/test_data/sources_test.csv:
--------------------------------------------------------------------------------
1 | mdb_source_id,data_type,entity_type,location.country_code,location.subdivision_name,location.municipality,provider,is_official,name,note,feed_contact_email,static_reference,urls.direct_download,urls.authentication_type,urls.authentication_info,urls.api_key_parameter_name,urls.latest,urls.license,location.bounding_box.minimum_latitude,location.bounding_box.maximum_latitude,location.bounding_box.minimum_longitude,location.bounding_box.maximum_longitude,location.bounding_box.extracted_on,status,features,redirect.id,redirect.comment
2 | 1,gtfs,,US,mdb-1-subdivision,mdb-1-municipality,mdb-1-MobilityDataTest provider,,mdb-1-MobilityDataTest Feed Name,,,,,0,,,,,,,,,,active,Route Colors|Bike Allowed|Headsigns|Sandra Reeves,10|20,
3 | 10,gtfs,,US,mdb-10-subdivision,mdb-10-municipality,mdb-10-MobilityDataTest provider,,mdb-10-MobilityDataTest Feed Name,,,,,0,,,,,,,,,,active,,20|30,
4 | 20,gtfs,,US,mdb-20-subdivision,mdb-20-municipality,mdb-20-MobilityDataTest provider,,mdb-20-MobilityDataTest Feed Name,,,,,0,,,,,,,,,,active,,,
5 | 30,gtfs,,US,mdb-30-subdivision,mdb-30-municipality,mdb-30-MobilityDataTest provider,,mdb-30-MobilityDataTest Feed Name,,,,,0,,,,,,,,,,active,,,
6 | 1561,gtfs-rt,vp,US,,,,,,,,1,,0,,,,,,,,,,active,,10,
--------------------------------------------------------------------------------
/api/tests/test_data/systems_test.csv:
--------------------------------------------------------------------------------
1 | Country Code,Name,Location,System ID,URL,Auto-Discovery URL,Supported Versions,Authentication Info URL,Authentication Type,Authentication Parameter Name
2 | CA,Provider Name 1,Montreal,system_id_1,https://www.example.com/gbfs_feed_1/,https://www.example.com/gbfs_feed_1/gbfs.json,1.1 ; 2.3,,,
3 | CA,Provider Name 1,Laval,system_id_2,https://www.example.com/gbfs_feed_2/,https://www.example.com/gbfs_feed_2/gbfs.json,2.3,,,
4 | CA,Provider Name 2,Montreal,system_id_3,https://www.example.com/gbfs_feed_3/,https://www.example.com/gbfs_feed_3/gbfs.json,3.0,,,
--------------------------------------------------------------------------------
/api/tests/test_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/api/tests/test_utils/__init__.py
--------------------------------------------------------------------------------
/api/tests/test_utils/cryptography.py:
--------------------------------------------------------------------------------
1 | #
2 | # MobilityData 2023
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | # This file provides a public and private key pair for testing purposes
17 |
18 | from cryptography.hazmat.primitives.asymmetric import rsa
19 |
20 |
21 | def generate_public_private_key_pair():
22 | """Generates a public/private key pair for testing purposes"""
23 | pr_key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
24 | pb_key = pr_key.public_key()
25 | return pb_key, pr_key
26 |
27 |
28 | (public_key, private_key) = generate_public_private_key_pair()
29 |
--------------------------------------------------------------------------------
/api/tests/unittest/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 | from fastapi import FastAPI
5 | from fastapi.testclient import TestClient
6 |
7 | from shared.database.database import Database
8 | from main import app as application
9 | from tests.test_utils.database import populate_database
10 |
11 |
12 | @pytest.fixture(scope="package")
13 | def app() -> FastAPI:
14 | application.dependency_overrides = {}
15 | return application
16 |
17 |
18 | @pytest.fixture(scope="package")
19 | def test_database():
20 |
21 | # Restrict the tests to the test database
22 | os.environ["FEEDS_DATABASE_URL"] = "postgresql://postgres:postgres@localhost:54320/MobilityDatabaseTest"
23 |
24 | current_path = os.path.dirname(os.path.abspath(__file__))
25 | data_dirs = [current_path + "/../test_data"]
26 |
27 | with populate_database(Database(), data_dirs) as db:
28 | yield db
29 |
30 |
31 | @pytest.fixture(scope="package")
32 | def client(app, test_database) -> TestClient:
33 | return TestClient(app)
34 |
--------------------------------------------------------------------------------
/api/tests/unittest/models/test_bounding_box_impl.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from geoalchemy2 import WKTElement
4 |
5 | from feeds.impl.models.bounding_box_impl import BoundingBoxImpl
6 | from feeds_gen.models.bounding_box import BoundingBox
7 |
8 | POLYGON = "POLYGON ((3.0 1.0, 4.0 1.0, 4.0 2.0, 3.0 2.0, 3.0 1.0))"
9 |
10 |
11 | class TestBoundingBoxImpl(unittest.TestCase):
12 | def test_from_orm(self):
13 | result: BoundingBox = BoundingBoxImpl.from_orm(WKTElement(POLYGON, srid=4326))
14 | assert result.minimum_latitude == 1.0
15 | assert result.maximum_latitude == 2.0
16 | assert result.minimum_longitude == 3.0
17 | assert result.maximum_longitude == 4.0
18 |
19 | assert BoundingBoxImpl.from_orm(None) is None
20 |
--------------------------------------------------------------------------------
/api/tests/unittest/models/test_location_impl.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from feeds.impl.models.location_impl import LocationImpl
4 | from shared.database_gen.sqlacodegen_models import Location as LocationOrm
5 |
6 |
7 | class TestLocationImpl(unittest.TestCase):
8 | def test_from_orm(self):
9 | result = LocationImpl.from_orm(
10 | LocationOrm(
11 | country_code="US", subdivision_name="California", municipality="Los Angeles", country="United States"
12 | )
13 | )
14 | assert result == LocationImpl(
15 | country_code="US", country="United States", subdivision_name="California", municipality="Los Angeles"
16 | )
17 |
18 | assert LocationImpl.from_orm(None) is None
19 |
--------------------------------------------------------------------------------
/api/tests/unittest/test_param_utils.py:
--------------------------------------------------------------------------------
1 | from shared.feed_filters.param_utils import normalize_str_parameter
2 |
3 |
4 | def test_normalize_str_parameter():
5 | """
6 | Test the normalize_str_parameter function.
7 | """
8 | # Test with empty string
9 | assert normalize_str_parameter("downloaded_at__lte", downloaded_at__lte=" ") == {"downloaded_at__lte": None}
10 | # Test with non-empty string
11 | assert normalize_str_parameter("downloaded_at__lte", downloaded_at__lte=" 2021-01-01 ") == {
12 | "downloaded_at__lte": "2021-01-01"
13 | }
14 | # Test with non-str parameter
15 | assert normalize_str_parameter("counter", counter=1, downloaded_at__lte=" 2021-01-01 ") == {
16 | "counter": 1,
17 | "downloaded_at__lte": " 2021-01-01 ",
18 | }
19 | # Test with non-existing parameter
20 | assert normalize_str_parameter("counter", downloaded_at__lte=" 2021-01-01 ") == {
21 | "downloaded_at__lte": " 2021-01-01 "
22 | }
23 |
--------------------------------------------------------------------------------
/api/tests/utils/test_dict_utils.py:
--------------------------------------------------------------------------------
1 | def test_get_safe_value_with_field_present():
2 | from src.utils.dict_utils import get_safe_value
3 |
4 | dictionary = {"field": "value"}
5 | field_name = "field"
6 | default_value = "default"
7 | assert get_safe_value(dictionary, field_name, default_value) == "value"
8 | assert get_safe_value(dictionary, field_name, None) == "value"
9 |
10 |
11 | def test_get_safe_value_with_field_not_present():
12 | from src.utils.dict_utils import get_safe_value
13 |
14 | dictionary = {"field": "value"}
15 | field_name = "not_field"
16 | default_value = "default"
17 | assert get_safe_value(dictionary, field_name, default_value) == "default"
18 | assert get_safe_value(dictionary, field_name) is None
19 |
--------------------------------------------------------------------------------
/config/.env.local:
--------------------------------------------------------------------------------
1 | POSTGRES_USER=postgres
2 | POSTGRES_PASSWORD=postgres
3 | POSTGRES_DB=MobilityDatabase
4 | POSTGRES_TEST_DB=MobilityDatabaseTest
5 | POSTGRES_PORT=5432
6 | POSTGRES_TEST_PORT=54320
7 | PGUSER=postgres
8 | POSTGRES_HOST=localhost
9 | ENV=local
10 | SCHEMA_SPY_DOC=schemaspy-dev
11 | FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/MobilityDatabase
12 | FEEDS_DATABASE_URL_TEST=postgresql://postgres:postgres@localhost:54320/MobilityDatabaseTest
13 | FEEDS_AUTHORIZATION=Bearer
14 | PROJECT_ID=mobility-feeds-dev
--------------------------------------------------------------------------------
/docs/BearerTokenSchema.yaml:
--------------------------------------------------------------------------------
1 | components:
2 | securitySchemes:
3 | Authentication:
4 | type: http
5 | scheme: bearer
6 | bearerFormat: JWT
--------------------------------------------------------------------------------
/docs/IAPAuthenticationSchema.yaml:
--------------------------------------------------------------------------------
1 | components:
2 | securitySchemes:
3 | Authentication:
4 | type: apiKey
5 | in: header
6 | name: x-goog-iap-jwt-assertion
--------------------------------------------------------------------------------
/docs/SwaggerUI/dist/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/SwaggerUI/dist/favicon-16x16.png
--------------------------------------------------------------------------------
/docs/SwaggerUI/dist/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/SwaggerUI/dist/favicon-32x32.png
--------------------------------------------------------------------------------
/docs/SwaggerUI/dist/index.css:
--------------------------------------------------------------------------------
1 | html {
2 | box-sizing: border-box;
3 | overflow: -moz-scrollbars-vertical;
4 | overflow-y: scroll;
5 | }
6 |
7 | *,
8 | *:before,
9 | *:after {
10 | box-sizing: inherit;
11 | }
12 |
13 | body {
14 | margin: 0;
15 | background: #fafafa;
16 | }
17 |
--------------------------------------------------------------------------------
/docs/SwaggerUI/dist/swagger-initializer.js:
--------------------------------------------------------------------------------
1 | window.onload = function() {
2 | //
3 |
4 | // the following lines will be replaced by docker/configurator, when it runs in a docker-container
5 | window.ui = SwaggerUIBundle({
6 | urls: [
7 | { name: "Feeds", url: "../DatabaseCatalogAPI.yaml" },
8 | { name: "Tokens", url: "../DatabaseCatalogTokenAPI.yaml" }
9 | ],
10 | dom_id: '#swagger-ui',
11 | deepLinking: true,
12 | presets: [
13 | SwaggerUIBundle.presets.apis,
14 | SwaggerUIStandalonePreset
15 | ],
16 | plugins: [
17 | SwaggerUIBundle.plugins.DownloadUrl
18 | ],
19 | layout: "StandaloneLayout"
20 | });
21 |
22 | //
23 | };
24 |
--------------------------------------------------------------------------------
/docs/SwaggerUI/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Swagger UI
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/docs/SwaggerUI/screenshots/swagger-github-pages.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/SwaggerUI/screenshots/swagger-github-pages.png
--------------------------------------------------------------------------------
/docs/SwaggerUI/swagger-ui.version:
--------------------------------------------------------------------------------
1 | v4.19.0
2 |
--------------------------------------------------------------------------------
/docs/batch-processing/batch_datasets.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/batch_datasets.png
--------------------------------------------------------------------------------
/docs/batch-processing/batch_process_dataset.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/batch_process_dataset.png
--------------------------------------------------------------------------------
/docs/batch-processing/batch_processing_schema.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/batch_processing_schema.png
--------------------------------------------------------------------------------
/docs/batch-processing/extract_bb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/extract_bb.png
--------------------------------------------------------------------------------
/docs/batch-processing/process_validation_report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/process_validation_report.png
--------------------------------------------------------------------------------
/docs/batch-processing/update_validation_report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/batch-processing/update_validation_report.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag.md:
--------------------------------------------------------------------------------
1 | # How to Add a Feature Flag on Firebase
2 |
3 | ## 1. Go to Remote Config on Firebase
4 |
5 | Navigate to the Remote Config section in your Firebase project.
6 | 
7 |
8 | ## 2. Add a new parameter
9 |
10 | Click the "Add parameter" button to create a new parameter.
11 | 
12 |
13 | ## 3. Create the Feature Flag
14 |
15 | Use the new parameter to create a feature flag.
16 | 
17 |
18 | Don't forget to publish your changes.
19 | 
20 |
21 |
22 | ## 4. Edit the Feature Flag
23 |
24 | You can edit the feature flag by clicking on the pencil editing button. After making your changes, click the "Save" button and publish your changes.
25 | 
26 |
27 | ## 5. Update `RemoteConfig.ts`
28 |
29 | In your code editor, open the `RemoteConfig.ts` file and add `enableMVPSearch` to `defaultRemoteConfigValues`.
30 | 
31 |
32 |
33 | ## 6. Use the Feature Flag in Your Code
34 |
35 | You can now use the feature flag in your code to control the behavior of your application.
36 | 
37 |
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag1.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag2.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag3.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag4.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag5.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag6.png
--------------------------------------------------------------------------------
/docs/feature-flag/feature_flag7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/docs/feature-flag/feature_flag7.png
--------------------------------------------------------------------------------
/functions-python/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv,build,.*,database_gen,feeds_operations_gen,shared
4 | # Ignored because conflict with black
5 | extend-ignore = E203
--------------------------------------------------------------------------------
/functions-python/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | node_modules
17 | #!include:.gitignore
18 |
--------------------------------------------------------------------------------
/functions-python/.gitignore:
--------------------------------------------------------------------------------
1 | .dist
2 | shared
3 | test_shared
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */shared/*
6 |
7 | [report]
8 | exclude_lines =
9 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the validation report information extraction to run locally
2 | export FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
3 | export ENV=${{ENV}}
4 |
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "backfill-dataset-service-date-range",
3 | "description": "For each dataset, backfill the dataset service with the date range of the dataset",
4 | "entry_point": "backfill_dataset_service_date_range",
5 | "timeout": 3600,
6 | "memory": "2Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 1,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | cloudevents~=1.10.1
19 | google-cloud-storage
20 |
21 | # Configuration
22 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/backfill_dataset_service_date_range/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/backfill_dataset_service_date_range/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/batch_datasets/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */dataset_service/*
5 | */helpers/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/batch_datasets/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for tokens function to run locally. Delete this line after rename the file.
2 | FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/MobilityDatabase
3 | PROJECT_ID=my-project-id
4 | PUBSUB_TOPIC_NAME=my-topic
5 | DATASTORE_DATASET=my-project-id
6 | DATASTORE_EMULATOR_HOST=localhost:8044
7 | DATASTORE_EMULATOR_HOST_PATH=localhost:8044/datastore
8 | DATASTORE_HOST=http://localhost:8044
9 | DATASTORE_PROJECT_ID=my-project-id
10 |
--------------------------------------------------------------------------------
/functions-python/batch_datasets/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "batch-datasets",
3 | "description": "Retrieve all feeds and publish them to Pub/Sub for processing the datasets",
4 | "entry_point": "batch_datasets",
5 | "timeout": 20,
6 | "memory": "256Mi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers", "dataset_service"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 20,
17 | "max_instance_count": 10,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/batch_datasets/main_local_debug.py:
--------------------------------------------------------------------------------
1 | # Code to be able to debug locally without affecting the runtime cloud function
2 |
3 | #
4 | # Requirements:
5 | # - Google Cloud SDK installed
6 | # - Make sure to have the following environment variables set in your .env.local file
7 | # - Local database in running state
8 | # - Pub/Sub emulator running
9 | # - gcloud beta emulators pubsub start --project=project-id --host-port='localhost:8043'
10 | # - Google Datastore emulator running
11 | # - gcloud beta emulators datastore start --project=project-id --host-port='localhost:8042'
12 |
13 | # Usage:
14 | # - python batch_datasets/main_local_debug.py
15 | from main import batch_datasets
16 | from dotenv import load_dotenv
17 |
18 | # Load environment variables from .env.local
19 | load_dotenv(dotenv_path=".env.local")
20 |
21 | if __name__ == "__main__":
22 |
23 | class RequestObject:
24 | def __init__(self, headers):
25 | self.headers = headers
26 |
27 | request = RequestObject({"X-Cloud-Trace-Context": "1234567890abcdef"})
28 | batch_datasets(request)
29 |
--------------------------------------------------------------------------------
/functions-python/batch_datasets/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-pubsub
19 | google-cloud-datastore
20 | cloudevents~=1.10.1
21 |
22 | # Configuration
23 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/batch_datasets/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/batch_datasets/src/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | sys.path.append("..")
4 |
5 | import os
6 |
7 | print(os.getcwd())
8 |
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */dataset_service/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for tokens function to run locally. Delete this line after rename the file.
2 | FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/MobilityDatabase
3 | PROJECT_ID=my-project-id
4 | DATASTORE_DATASET=my-project-id
5 | DATASTORE_EMULATOR_HOST=localhost:8044
6 | DATASTORE_EMULATOR_HOST_PATH=localhost:8044/datastore
7 | DATASTORE_HOST=http://localhost:8044
8 | DATASTORE_PROJECT_ID=my-project-id
9 |
10 |
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "batch-process-dataset",
3 | "description": "Process datasets from the feed passed in the Pub/Sub event",
4 | "entry_point": "process_dataset",
5 | "timeout": 540,
6 | "memory": "2Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers", "dataset_service"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | },
14 | {
15 | "key": "FEEDS_CREDENTIALS",
16 | "secret": "FEEDS_CREDENTIALS"
17 | }
18 | ],
19 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
20 | "max_instance_request_concurrency": 1,
21 | "max_instance_count": 5,
22 | "min_instance_count": 0,
23 | "available_cpu": 1
24 | }
25 |
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-storage
19 | google-cloud-pubsub
20 | google-api-core
21 | google-cloud-firestore
22 | google-cloud-datastore
23 | google-cloud-bigquery
24 | cloudevents~=1.10.1
25 |
26 | # Configuration
27 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
5 | python-dotenv~=1.0.0
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/batch_process_dataset/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/batch_process_dataset/status.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class Status(Enum):
5 | UPDATED = 0
6 | NOT_UPDATED = 1
7 | FAILED = 2
8 | DO_NOT_RETRY = 3
9 | PUBLISHED = 4
10 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */dataset_service/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ingest-data-to-big-query",
3 | "description": "Ingest data to BigQuery",
4 | "entry_point": "ingest_data_to_big_query",
5 | "timeout": 540,
6 | "memory": "8Gi",
7 | "trigger_http": false,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 1,
19 | "min_instance_count": 0,
20 | "available_cpu": 2
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # Google specific packages for this function
14 | google-cloud-bigquery
15 | google-cloud-storage
16 |
17 | # Additional packages for this function
18 | pandas
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/big_query_ingestion/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/src/gbfs/gbfs_big_query_ingest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from common.bq_data_transfer import BigQueryDataTransfer
4 |
5 |
6 | class BigQueryDataTransferGBFS(BigQueryDataTransfer):
7 | """BigQuery data transfer for GBFS data"""
8 |
9 | def __init__(self):
10 | super().__init__()
11 | current_dir = os.path.dirname(os.path.abspath(__file__))
12 | self.schema_path = os.path.join(
13 | current_dir, "../shared/helpers/bq_schema/gbfs_schema.json"
14 | )
15 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/src/gtfs/gtfs_big_query_ingest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from common.bq_data_transfer import BigQueryDataTransfer
4 |
5 |
6 | class BigQueryDataTransferGTFS(BigQueryDataTransfer):
7 | """BigQuery data transfer for GTFS data"""
8 |
9 | def __init__(self):
10 | super().__init__()
11 | current_dir = os.path.dirname(os.path.abspath(__file__))
12 | self.schema_path = os.path.join(
13 | current_dir, "../shared/helpers/bq_schema/gtfs_schema.json"
14 | )
15 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/src/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import functions_framework
4 |
5 | from shared.helpers.logger import init_logger
6 | from gbfs.gbfs_big_query_ingest import BigQueryDataTransferGBFS
7 | from gtfs.gtfs_big_query_ingest import BigQueryDataTransferGTFS
8 |
9 | init_logger()
10 |
11 |
12 | @functions_framework.http
13 | def ingest_data_to_big_query_gtfs(_):
14 | """Google Storage to Big Query data ingestion for GTFS data"""
15 | logging.info("Function triggered")
16 | return BigQueryDataTransferGTFS().send_data_to_bigquery()
17 |
18 |
19 | @functions_framework.http
20 | def ingest_data_to_big_query_gbfs(_):
21 | """Google Storage to Big Query data ingestion for GBFS data"""
22 | logging.info("Function triggered")
23 | return BigQueryDataTransferGBFS().send_data_to_bigquery()
24 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/tests/test_gbfs_ingestion.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import patch
3 |
4 | from gbfs.gbfs_big_query_ingest import BigQueryDataTransferGBFS
5 |
6 |
7 | class TestBigQueryDataTransferGBFS(unittest.TestCase):
8 | @patch("google.cloud.bigquery.Client")
9 | @patch("google.cloud.storage.Client")
10 | def setUp(self, mock_storage_client, _):
11 | self.mock_storage_client = mock_storage_client
12 | self.transfer = BigQueryDataTransferGBFS()
13 |
14 | def test_attributes(self):
15 | self.assertIn("gbfs_schema.json", self.transfer.schema_path)
16 |
--------------------------------------------------------------------------------
/functions-python/big_query_ingestion/tests/test_gtfs_ingestion.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import patch
3 |
4 | from gtfs.gtfs_big_query_ingest import BigQueryDataTransferGTFS
5 |
6 |
7 | class TestBigQueryDataTransferGTFS(unittest.TestCase):
8 | @patch("google.cloud.bigquery.Client")
9 | @patch("google.cloud.storage.Client")
10 | def setUp(self, mock_storage_client, _):
11 | self.mock_storage_client = mock_storage_client
12 | self.transfer = BigQueryDataTransferGTFS()
13 |
14 | def test_attributes(self):
15 | self.assertIn("gtfs_schema.json", self.transfer.schema_path)
16 |
--------------------------------------------------------------------------------
/functions-python/dataset_service/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 |
5 | [report]
6 | exclude_lines =
7 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/dataset_service/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/dataset_service/__init__.py
--------------------------------------------------------------------------------
/functions-python/dataset_service/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
5 | functions-framework==3.*
6 | google-cloud-logging
7 | google-api-core
8 | google-cloud-datastore
9 | psycopg2-binary==2.9.6
--------------------------------------------------------------------------------
/functions-python/export_csv/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */database_gen/*
5 | */dataset_service/*
6 | */helpers/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/export_csv/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "export-csv",
3 | "description": "Export the DB feed data as a csv file",
4 | "entry_point": "export_and_upload_csv",
5 | "timeout": 600,
6 | "memory": "2Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers", "dataset_service"],
9 | "include_api_folders": ["utils", "database", "feed_filters", "common", "database_gen"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 1,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/export_csv/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | psycopg2-binary==2.9.6
3 | aiohttp~=3.10.5
4 | asyncio~=3.4.3
5 | urllib3~=2.2.2
6 | requests~=2.32.3
7 | attrs~=23.1.0
8 | pluggy~=1.3.0
9 | certifi~=2024.7.4
10 | pandas~=2.2.3
11 | python-dotenv==1.0.0
12 | fastapi-filter[sqlalchemy]==1.0.0
13 | packaging~=24.2
14 |
15 | # SQL Alchemy and Geo Alchemy
16 | SQLAlchemy==2.0.23
17 | geoalchemy2==0.14.7
18 | shapely
19 |
20 | # Google
21 | google-cloud-storage
22 | functions-framework==3.*
23 | google-cloud-logging
24 |
25 | # Configuration
26 | python-dotenv==1.0.0
27 |
28 | # Other dependencies
29 | natsort
30 |
31 |
--------------------------------------------------------------------------------
/functions-python/export_csv/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */database_gen/*
5 | */dataset_service/*
6 | */helpers/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for tokens function to run locally. Delete this line after rename the file.
2 | FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:5432/MobilityDatabase
3 | PROJECT_ID=my-project-id
4 | PUBSUB_TOPIC_NAME=my-topic
5 | TRANSITLAND_API_KEY=your-api-key
6 |
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "feed-sync-dispatcher-transitland",
3 | "description": "Feed Sync Dispatcher for Transitland",
4 | "entry_point": "feed_sync_dispatcher_transitland",
5 | "timeout": 3600,
6 | "trigger_http": true,
7 | "include_folders": ["helpers"],
8 | "include_api_folders": ["database_gen", "database", "common"],
9 | "secret_environment_variables": [
10 | {
11 | "key": "FEEDS_DATABASE_URL"
12 | }
13 | ],
14 | "ingress_settings": "ALLOW_ALL",
15 | "max_instance_request_concurrency": 1,
16 | "max_instance_count": 1,
17 | "min_instance_count": 0,
18 | "available_cpu": 1,
19 | "available_memory": "1Gi"
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/main_local_debug.py:
--------------------------------------------------------------------------------
1 | # Code to be able to debug locally without affecting the runtime cloud function
2 |
3 |
4 | # Requirements:
5 | # - Google Cloud SDK installed
6 | # - Make sure to have the following environment variables set in your .env.local file
7 | # - Local database in running state
8 | # - Follow the instructions in the README.md file
9 | #
10 | # Usage:
11 | # - python feed_sync_dispatcher_transitland/main_local_debug.py
12 |
13 | from main import feed_sync_dispatcher_transitland
14 | from dotenv import load_dotenv
15 |
16 | # Load environment variables from .env.local
17 | load_dotenv(dotenv_path=".env.local_test")
18 |
19 | if __name__ == "__main__":
20 |
21 | class RequestObject:
22 | def __init__(self, headers):
23 | self.headers = headers
24 |
25 | request = RequestObject({"X-Cloud-Trace-Context": "1234567890abcdef"})
26 | feed_sync_dispatcher_transitland(request)
27 |
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.8.30
12 | pandas
13 |
14 | # SQL Alchemy and Geo Alchemy
15 | SQLAlchemy==2.0.23
16 | geoalchemy2==0.14.7
17 |
18 | # Google specific packages for this function
19 | google-cloud-pubsub
20 | cloudevents~=1.10.1
21 |
22 | # Configuration
23 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/feed_sync_dispatcher_transitland/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/feed_sync_dispatcher_transitland/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */database_gen/*
5 | */dataset_service/*
6 | */helpers/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for tokens function to run locally. Delete this line after rename the file.
2 | FEEDS_DATABASE_URL=postgresql://postgres:postgres@localhost:54320/MobilityDatabase
3 | PROJECT_ID=mobility-feeds-dev
4 | PUBSUB_TOPIC_NAME=my-topic
5 | DATASET_BATCH_TOPIC_NAME=dataset_batch_topic_{env}_
6 |
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "feed-sync-process-transitland",
3 | "description": "Feed Sync process for Transitland feeds",
4 | "entry_point": "process_feed_event",
5 | "timeout": 540,
6 | "memory": "512Mi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 10,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.8.30
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-pubsub
19 | cloudevents~=1.10.1
20 |
21 | # Additional packages for this function
22 | pandas
23 | pycountry
24 |
25 | # Configuration
26 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/feed_sync_process_transitland/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/feed_sync_process_transitland/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/gbfs_validator/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */dataset_service/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/gbfs_validator/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "gbfs-validator",
3 | "description": "Validate GBFS feeds",
4 | "entry_point": "gbfs_validator",
5 | "timeout": 540,
6 | "memory": "2Gi",
7 | "trigger_http": false,
8 | "include_folders": ["helpers", "dataset_service"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 5,
19 | "min_instance_count": 0,
20 | "available_cpu": 1
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/gbfs_validator/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-storage
19 | google-cloud-pubsub
20 | google-api-core
21 | google-cloud-firestore
22 | google-cloud-datastore
23 | google-cloud-tasks
24 | cloudevents~=1.10.1
25 |
26 | # Configuration
27 | python-dotenv==1.0.0
28 |
29 | # Additional packages for the function
30 | jsonpath-ng
31 | language-tags
--------------------------------------------------------------------------------
/functions-python/gbfs_validator/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/gbfs_validator/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/gbfs_validator/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/helpers/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | database.py
5 | */database_gen/*
6 | */dataset_service/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/helpers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/helpers/__init__.py
--------------------------------------------------------------------------------
/functions-python/helpers/feed_sync/models.py:
--------------------------------------------------------------------------------
1 | import json
2 | from dataclasses import dataclass, asdict
3 | from typing import Optional
4 |
5 |
6 | @dataclass
7 | class TransitFeedSyncPayload:
8 | """Data class for transit feed processing payload"""
9 |
10 | external_id: str
11 | feed_id: str
12 | stable_id: str
13 | entity_types: Optional[str] = None
14 | feed_url: Optional[str] = None
15 | execution_id: Optional[str] = None
16 | spec: Optional[str] = None
17 | auth_info_url: Optional[str] = None
18 | auth_param_name: Optional[str] = None
19 | type: Optional[str] = None
20 | operator_name: Optional[str] = None
21 | country: Optional[str] = None
22 | state_province: Optional[str] = None
23 | city_name: Optional[str] = None
24 | source: Optional[str] = None
25 | payload_type: Optional[str] = None
26 |
27 | def to_dict(self):
28 | return asdict(self)
29 |
30 | def to_json(self):
31 | return json.dumps(self.to_dict())
32 |
--------------------------------------------------------------------------------
/functions-python/helpers/parser.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import json
3 | import logging
4 | from cloudevents.http import CloudEvent
5 |
6 |
7 | def jsonify_pubsub(event: CloudEvent):
8 | """
9 | Convert the message data passed to a pub/sub triggered function to JSON
10 | @param event: The Pub/Sub message.
11 | """
12 | try:
13 | message_data = event["message"]["data"]
14 | message_json = json.loads(base64.b64decode(message_data).decode("utf-8"))
15 | return message_json
16 | except Exception as e:
17 | logging.error(f"Error parsing message data: {e}")
18 | return None
19 |
--------------------------------------------------------------------------------
/functions-python/helpers/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 | python-dotenv==1.0.0
13 |
14 | # SQL Alchemy and Geo Alchemy
15 | SQLAlchemy==2.0.23
16 | geoalchemy2==0.14.7
17 |
18 | # Google specific packages for this function
19 | google-cloud-pubsub
20 | google-cloud-storage
21 | google-cloud-datastore
22 | cloudevents~=1.10.1
23 | google-cloud-bigquery
24 | google-api-core
25 | google-cloud-tasks
26 | google-cloud-firestore
27 | google-cloud-bigquery
28 |
29 | # Additional package
30 | pycountry
--------------------------------------------------------------------------------
/functions-python/helpers/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/helpers/test_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "include_api_folders": ["database_gen", "database", "common"]
3 | }
4 |
--------------------------------------------------------------------------------
/functions-python/operations_api/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */dataset_service/*
7 | */feeds_operations_gen/*
8 | */shared/*
9 |
10 | [report]
11 | exclude_lines =
12 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/operations_api/.gitignore:
--------------------------------------------------------------------------------
1 | # Generated files
2 | src/feeds_operations_gen
--------------------------------------------------------------------------------
/functions-python/operations_api/.openapi-generator/FILES:
--------------------------------------------------------------------------------
1 | src/feeds_operations/impl/__init__.py
2 | src/feeds_operations_gen/apis/__init__.py
3 | src/feeds_operations_gen/apis/operations_api.py
4 | src/feeds_operations_gen/apis/operations_api_base.py
5 | src/feeds_operations_gen/main.py
6 | src/feeds_operations_gen/models/__init__.py
7 | src/feeds_operations_gen/models/authentication_type.py
8 | src/feeds_operations_gen/models/base_feed.py
9 | src/feeds_operations_gen/models/data_type.py
10 | src/feeds_operations_gen/models/entity_type.py
11 | src/feeds_operations_gen/models/external_id.py
12 | src/feeds_operations_gen/models/extra_models.py
13 | src/feeds_operations_gen/models/feed_status.py
14 | src/feeds_operations_gen/models/get_feeds200_response.py
15 | src/feeds_operations_gen/models/gtfs_feed_response.py
16 | src/feeds_operations_gen/models/gtfs_rt_feed_response.py
17 | src/feeds_operations_gen/models/location.py
18 | src/feeds_operations_gen/models/redirect.py
19 | src/feeds_operations_gen/models/source_info.py
20 | src/feeds_operations_gen/models/update_request_gtfs_feed.py
21 | src/feeds_operations_gen/models/update_request_gtfs_rt_feed.py
22 | src/feeds_operations_gen/security_api.py
23 |
--------------------------------------------------------------------------------
/functions-python/operations_api/.openapi-generator/VERSION:
--------------------------------------------------------------------------------
1 | 7.10.0
2 |
--------------------------------------------------------------------------------
/functions-python/operations_api/README.md:
--------------------------------------------------------------------------------
1 | # Operations API
2 | The Operations API is a function that exposes the operations API.
3 | The operations API schema is located at ../../docs/OperationsAPI.yml.
4 |
5 | # Function configuration
6 | The function is configured using the following environment variables:
7 | - `FEEDS_DATABASE_URL`: The URL of the feeds database.
8 | - `GOOGLE_CLIENT_ID`: The Google client ID used for authentication.
9 |
10 | # Useful scripts
11 | - To locally execute a function use the following command:
12 | ```
13 | ./scripts/function-python-run.sh --function_name operations_api
14 | ```
15 | - To locally create a distribution zip use the following command:
16 | ```
17 | ./scripts/function-python-build.sh --function_name operations_api
18 | ```
19 | - Start local and test database
20 | ```
21 | docker compose --env-file ./config/.env.local up -d liquibase-test
22 |
23 |
24 | # Local development
25 | The local development of this function follows the same steps as the other functions. Please refer to the [README.md](../README.md) file for more information.
--------------------------------------------------------------------------------
/functions-python/operations_api/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "operations-api",
3 | "description": "API containing the back-office operations",
4 | "entry_point": "main",
5 | "timeout": 540,
6 | "memory": "1Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [
11 | {
12 | "key": "GOOGLE_CLIENT_ID"
13 | }
14 | ],
15 | "secret_environment_variables": [
16 | {
17 | "key": "FEEDS_DATABASE_URL"
18 | }
19 | ],
20 | "ingress_settings": "ALLOW_ALL",
21 | "max_instance_request_concurrency": 1,
22 | "max_instance_count": 5,
23 | "min_instance_count": 0,
24 | "available_cpu": 1,
25 | "build_settings": {
26 | "pre_build_script": "../../scripts/api-operations-gen.sh"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/functions-python/operations_api/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp~=3.10.5
2 | asgiref~=3.8.1
3 | asyncio~=3.4.3
4 | attrs~=23.1.0
5 | certifi==2024.7.4
6 | email-validator==2.0.0
7 | fastapi==0.115.2
8 | httpx
9 | mangum
10 | pluggy~=1.5.0
11 | promise==2.3
12 | pydantic>=2
13 | python-dotenv==0.17.1
14 | python-multipart==0.0.7
15 | PyYAML>=5.4.1,<6.1.0
16 | requests==2.32.3
17 | Rx==1.6.1
18 | starlette==0.40.0
19 | typing-extensions==4.10.0
20 | ujson==4.0.2
21 | urllib3~=2.2.2
22 | uvicorn
23 | uvloop==0.19.0
24 |
25 | # Additional packages
26 | google-cloud-logging==3.10.0
27 | functions-framework==3.*
28 | SQLAlchemy==2.0.23
29 | geoalchemy2==0.14.7
30 | psycopg2-binary==2.9.6
31 | cachetools
32 | deepdiff
33 | fastapi_filter
34 |
--------------------------------------------------------------------------------
/functions-python/operations_api/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-asyncio
3 | urllib3-mock
4 | requests-mock
5 | python-dotenv~=1.0.0
--------------------------------------------------------------------------------
/functions-python/operations_api/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/operations_api/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/operations_api/src/feeds_operations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/operations_api/src/feeds_operations/__init__.py
--------------------------------------------------------------------------------
/functions-python/operations_api/src/feeds_operations/impl/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/operations_api/src/feeds_operations/impl/__init__.py
--------------------------------------------------------------------------------
/functions-python/operations_api/src/feeds_operations/impl/models/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/operations_api/src/feeds_operations/impl/models/__init__.py
--------------------------------------------------------------------------------
/functions-python/operations_api/src/feeds_operations/impl/models/location_impl.py:
--------------------------------------------------------------------------------
1 | from feeds_operations_gen.models.location import Location
2 | from shared.database_gen.sqlacodegen_models import Location as LocationOrm
3 |
4 |
5 | class LocationImpl(Location):
6 | class Config:
7 | """Pydantic configuration.
8 | Enabling `from_attributes` method to create a model instance from a SQLAlchemy row object.
9 | """
10 |
11 | from_attributes = True
12 |
13 | @classmethod
14 | def from_orm(cls, location: LocationOrm | None) -> Location | None:
15 | """Create a model instance from a SQLAlchemy a Location row object."""
16 | if not location:
17 | return None
18 | return cls(
19 | country_code=location.country_code,
20 | country=location.country,
21 | subdivision_name=location.subdivision_name,
22 | municipality=location.municipality,
23 | )
24 |
--------------------------------------------------------------------------------
/functions-python/operations_api/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/operations_api/tests/__init__.py
--------------------------------------------------------------------------------
/functions-python/operations_api/tests/feeds_operations/impl/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Package for feed operations implementation tests.
3 | """
4 |
--------------------------------------------------------------------------------
/functions-python/operations_api/tests/feeds_operations/impl/models/test_entity_type_impl.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import Mock
2 |
3 | from shared.database_gen.sqlacodegen_models import Entitytype
4 | from feeds_operations.impl.models.entity_type_impl import EntityTypeImpl
5 | from feeds_operations_gen.models.entity_type import EntityType
6 |
7 |
8 | def test_from_orm():
9 | entity_type = Entitytype(name="VP")
10 | result = EntityTypeImpl.from_orm(entity_type)
11 | assert result.name == "VP"
12 |
13 |
14 | def test_from_orm_none():
15 | result = EntityTypeImpl.from_orm(None)
16 | assert result is None
17 |
18 |
19 | def test_to_orm():
20 | entity_type = EntityType("vp")
21 | session = Mock()
22 | mock_query = Mock()
23 | resulting_entity = Mock()
24 | mock_query.filter.return_value.first.return_value = resulting_entity
25 | session.query.return_value = mock_query
26 | result = EntityTypeImpl.to_orm(entity_type, session)
27 | assert result == resulting_entity
28 |
--------------------------------------------------------------------------------
/functions-python/operations_api/tests/feeds_operations/impl/models/test_external_id_impl.py:
--------------------------------------------------------------------------------
1 | from shared.database_gen.sqlacodegen_models import Externalid, Gtfsfeed
2 | from feeds_operations_gen.models.external_id import ExternalId
3 | from feeds_operations.impl.models.external_id_impl import (
4 | ExternalIdImpl,
5 | )
6 |
7 |
8 | def test_from_orm():
9 | external_id = Externalid(associated_id="12345", source="test_source")
10 | result = ExternalIdImpl.from_orm(external_id)
11 | assert result.external_id == "12345"
12 | assert result.source == "test_source"
13 |
14 |
15 | def test_from_orm_none():
16 | result = ExternalIdImpl.from_orm(None)
17 | assert result is None
18 |
19 |
20 | def test_to_orm():
21 | external_id = ExternalId(external_id="12345", source="test_source")
22 | feed = Gtfsfeed(id=1)
23 | result = ExternalIdImpl.to_orm(external_id, feed)
24 | assert result.feed_id == 1
25 | assert result.associated_id == "12345"
26 | assert result.source == "test_source"
27 |
--------------------------------------------------------------------------------
/functions-python/operations_api/tests/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | asyncio_default_fixture_loop_scope = function
3 | testpaths = tests
4 | python_files = test_*.py
5 | python_classes = Test*
6 | python_functions = test_*
7 | python_paths =
8 | src
--------------------------------------------------------------------------------
/functions-python/preprocessed_analytics/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/preprocessed_analytics/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "preprocess-analytics",
3 | "description": "Preprocess analytics",
4 | "entry_point": "preprocess_analytics",
5 | "timeout": 540,
6 | "memory": "4Gi",
7 | "trigger_http": false,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_ALL",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 5,
19 | "min_instance_count": 0,
20 | "available_cpu": 1
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/preprocessed_analytics/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-bigquery
19 | google-cloud-storage
20 | flask
21 |
22 | # Additional packages for this function
23 | pandas
24 | pycountry
25 |
26 | # Configuration
27 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/preprocessed_analytics/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/preprocessed_analytics/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/preprocessed_analytics/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/process_validation_report/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/process_validation_report/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the validation report information extraction to run locally
2 | export FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
3 |
--------------------------------------------------------------------------------
/functions-python/process_validation_report/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "process-validation-report",
3 | "description": "Processes the GTFS validation report to update the database",
4 | "entry_point": "process_validation_report",
5 | "timeout": 540,
6 | "memory": "2Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 8,
17 | "max_instance_count": 1,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/process_validation_report/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | cloudevents~=1.10.1
19 |
20 | # Configuration
21 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/process_validation_report/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/process_validation_report/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/process_validation_report/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | # Linting requirements
2 | flake8==6.0.0
3 | black==23.3.0
4 | pre-commit==3.4.0
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the reverse_geolocation function
2 | FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
3 | QUEUE_NAME=${{QUEUE_NAME}}
4 | PROJECT_ID=${{PROJECT_ID}}
5 | GCP_REGION=${{GCP_REGION}}
6 | SERVICE_ACCOUNT_EMAIL=${{SERVICE_ACCOUNT_EMAIL}}
7 | DATASETS_BUCKET_NAME=${{DATASETS_BUCKET_NAME}}
8 | PUBSUB_TOPIC_NAME=${{PUBSUB_TOPIC_NAME}}
9 | DATASET_BUCKET_NAME_GBFS=${{DATASET_BUCKET_NAME_GBFS}}
10 | DATASET_BUCKET_NAME_GTFS=${{DATASET_BUCKET_NAME_GTFS}}
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "reverse-geolocation",
3 | "description": "Reverse geolocation function",
4 | "entry_point": "reverse_geolocation",
5 | "timeout": 540,
6 | "available_memory": "4Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_ALL",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 5,
19 | "min_instance_count": 0,
20 | "available_cpu": 1
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-bigquery
19 | google-cloud-storage
20 | google-cloud-tasks
21 | google-cloud-datastore
22 | google-cloud-pubsub
23 |
24 | # Additional packages for this function
25 | pandas
26 | pycountry
27 | shapely
28 | gtfs-kit
29 | matplotlib
30 | jsonpath_ng
31 |
32 | # Configuration
33 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the reverse_geolocation_populate function
2 | FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
3 |
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "reverse-geolocation-populate",
3 | "description": "Populate the database with reverse geolocation data",
4 | "entry_point": "reverse_geolocation_populate",
5 | "timeout": 3600,
6 | "trigger_http": true,
7 | "include_folders": ["helpers"],
8 | "include_api_folders": ["database_gen", "database", "common"],
9 | "environment_variables": [],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_ALL",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 10,
18 | "min_instance_count": 0,
19 | "available_cpu": 2,
20 | "available_memory": "4Gi"
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-pubsub
19 | google-cloud-datastore
20 | google-cloud-storage
21 | google-cloud-bigquery
22 | cloudevents~=1.10.1
23 |
24 | # Additional packages for this function
25 | gtfs-kit
26 | pycountry
27 |
28 | # Configuration
29 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/reverse_geolocation_populate/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/reverse_geolocation_populate/src/locality_admin_levels.json:
--------------------------------------------------------------------------------
1 | {
2 | "JP": [7],
3 | "CA": [6, 8],
4 | "FR": []
5 | }
--------------------------------------------------------------------------------
/functions-python/tasks_executor/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/tasks_executor/README.md:
--------------------------------------------------------------------------------
1 | # Tasks Executor
2 |
3 | This directory contains Google Cloud Functions used as a single point of access to multiple _tasks_.
4 |
5 | ## Usage
6 | The function receive the following payload:
7 | ```
8 | {
9 | "task": "string", # [required] Name of the task to execute
10 | "payload": { } [optional] Payload to pass to the task
11 | }
12 | ```
13 |
14 | Example:
15 | ```json
16 | {
17 | "task": "rebuild_missing_validation_reports",
18 | "payload": {
19 | "dry_run": true,
20 | "filter_after_in_days": 14,
21 | "filter_statuses": ["active", "inactive", "future"]
22 | }
23 | }
24 | ```
25 | To get the list of supported tasks use:
26 | ``
27 | {
28 | "name": "list_tasks",
29 | "payload": {}
30 | }
31 | `````
32 |
--------------------------------------------------------------------------------
/functions-python/tasks_executor/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tasks_executor",
3 | "description": "The Tasks Executor function runs maintenance tasks avoiding the creation of multiple functions for one-time execution",
4 | "entry_point": "tasks_executor",
5 | "timeout": 540,
6 | "memory": "4Gi",
7 | "trigger_http": false,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_ALL",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 1,
19 | "min_instance_count": 0,
20 | "available_cpu": 1
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/tasks_executor/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-workflows
19 | flask
20 |
21 | # Configuration
22 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/tasks_executor/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/tasks_executor/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/tasks_executor/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/tasks_executor/src/tasks/validation_reports/README.md:
--------------------------------------------------------------------------------
1 | # Rebuild Missing Validation Reports
2 |
3 | This task generates the missing reports in the GTFS datasets.
4 | The reports are generated using the _gtfs_validator_ GCP workflow.
5 |
6 | ## Task ID
7 | Use task Id: `rebuild_missing_validation_reports`
8 |
9 | ## Usage
10 | The function receive the following payload:
11 | ```
12 | {
13 | "dry_run": bool, # [optional] If True, do not execute the workflow
14 | "filter_after_in_days": int, # [optional] Filter datasets older than this number of days(default: 14 days ago)
15 | "filter_statuses": list[str] # [optional] Filter datasets by status(in)
16 | }
17 | ```
18 | Example:
19 | ```
20 | {
21 | "dry_run": true,
22 | "filter_after_in_days": 14,
23 | "filter_statuses": ["active", "inactive", "future"]
24 | }
25 | ```
26 |
27 | # GCP environment variables
28 | The function uses the following environment variables:
29 | - `ENV`: The environment to use. It can be `dev`, `staging` or `prod`. Default is `dev`.
30 | - `LOCATION`: The location of the GCP project. Default is `northamerica-northeast1`.
31 |
--------------------------------------------------------------------------------
/functions-python/test_utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/test_utils/__init__.py
--------------------------------------------------------------------------------
/functions-python/test_utils/liquibase.properties:
--------------------------------------------------------------------------------
1 | changeLogFile=../liquibase/changelog.xml
2 | username=POSTGRES_USER
3 | password=POSTGRES_PASSWORD
4 | url= postgres://postgres:postgres@127.0.0.1:54320/MobilityDatabaseTest
5 | driver=org.postgresql.Driver
6 | logLevel=info
7 | liquibase.hub.mode=off
--------------------------------------------------------------------------------
/functions-python/tokens/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */shared/*
6 |
7 | [report]
8 | exclude_lines =
9 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/tokens/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for tokens function to run locally
2 | export FEEDS_GCP_IDP_API_KEY={{TOKENS_API_GCP_IDP_API_KEY}}
--------------------------------------------------------------------------------
/functions-python/tokens/README.md:
--------------------------------------------------------------------------------
1 | # Tokens API Function
2 | The tokens API function implements the tokens API described at [docs/DatabaseCatalogTokenAPI.yaml](https://mobilitydata.github.io/mobility-feed-api/SwaggerUI/index.html?urls.primaryName=Tokens).
3 |
4 | # Local development
5 |
6 | ## Requirements
7 |
8 | Python <= 3.10
9 |
10 | ## Installation & Usage
11 |
12 | - Install dependencies
13 | ```bash
14 | cd api
15 | pip3 install -r requirements.txt
16 | pip3 install -r requirements_dev.txt
17 | ```
18 |
19 | ## Environment variables
20 | - Rename file `.env.rename_me` to `.env.local`
21 | - Replace all values enclosed by `{{}}`
22 | - Enjoy Coding!
23 |
24 | ## Linter
25 | This repository uses Flak8 and Black for code styling
26 |
27 | To run linter checks:
28 |
29 | ```bash
30 | scripts/lint-tests.sh
31 | ```
32 |
33 | You can also use the pre-commit installed through [requirements_dev.txt](api%2Frequirements_dev.txt) with
34 | ```bash
35 | pre-commit install
36 | pre-commit run --all-files
37 | ```
38 |
39 | ## Execute
40 | ```
41 | ./scripts/function-python-run.sh --function_name tokens
42 | ```
--------------------------------------------------------------------------------
/functions-python/tokens/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tokens-api",
3 | "description": "Tokens API",
4 | "entry_point": "tokens_post",
5 | "path": "/v1/tokens/*",
6 | "timeout": 20,
7 | "memory": "128Mi",
8 | "trigger_http": true,
9 | "include_folders": ["helpers"],
10 | "include_api_folders": ["common"],
11 | "environment_variables": [],
12 | "secret_environment_variables": [
13 | {
14 | "key": "FEEDS_GCP_IDP_API_KEY"
15 | }
16 | ],
17 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
18 | "max_instance_request_concurrency": 20,
19 | "max_instance_count": 100,
20 | "min_instance_count": 0,
21 | "available_cpu": 1
22 | }
23 |
--------------------------------------------------------------------------------
/functions-python/tokens/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.black]
6 | line-length = 120
7 | extend-exclude = '''
8 | (
9 | /(
10 | \.eggs # exclude a few common directories in the
11 | | \.git # root of the project
12 | | \.hg
13 | | \.mypy_cache
14 | | \.tox
15 | | \.venv
16 | | .*/venv
17 | | _build
18 | | buck-out
19 | | build
20 | | dist
21 | | src/feeds_gen/*
22 | | src/database_gen/*
23 | )/
24 | )
25 | '''
26 |
27 | [tool.isort]
28 | profile = "black"
29 | skip = [
30 | '.eggs', '.git', '.hg', '.mypy_cache', '.nox', '.pants.d', '.tox',
31 | '.venv', '_build', 'buck-out', 'build', 'dist', 'node_modules', 'venv',
32 | ]
33 | skip_gitignore = true
34 |
35 | [tool.pytest.ini_options]
36 | pythonpath = ["src"]
37 |
--------------------------------------------------------------------------------
/functions-python/tokens/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # Flask dependencies for OpenAPI implementation
14 | flask
15 | werkzeug
16 |
17 | # JWT library for JWT token verification
18 | PyJWT
--------------------------------------------------------------------------------
/functions-python/tokens/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | #Testing
2 | pytest>=3.2.3
3 | pytest_mock>=3.11.1
--------------------------------------------------------------------------------
/functions-python/tokens/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = tokens
3 | version = 0.1.0
4 | description = API for the authentication tokens to access the Mobility Database_
5 | long_description = file: README.md
6 | keywords = OpenAPI Mobility Data Catalog API Tokens
7 | python_requires = >= 3.10.*
8 | classifiers =
9 | Operating System :: OS Independent
10 | Programming Language :: Python :: 3
11 | Programming Language :: Python :: 3.10
12 |
13 | [options.packages.find]
14 | where = src
--------------------------------------------------------------------------------
/functions-python/tokens/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/tokens/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/tokens/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/tokens/tests/__init__.py
--------------------------------------------------------------------------------
/functions-python/update_feed_status/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */database_gen/*
5 | */dataset_service/*
6 | */helpers/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/update_feed_status/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the validation report information extraction to run locally
2 | export FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
3 |
--------------------------------------------------------------------------------
/functions-python/update_feed_status/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "update-feed-status",
3 | "description": "Update the feed status with the corresponding latest dataset service date range",
4 | "entry_point": "update_feed_status",
5 | "timeout": 3600,
6 | "memory": "1Gi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 1,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/update_feed_status/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | cloudevents~=1.10.1
19 |
20 | # Configuration
21 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/update_feed_status/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/update_feed_status/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/update_feed_status/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/update_feed_status/src/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import functions_framework
3 | from shared.helpers.logger import init_logger
4 | from shared.helpers.feed_status import update_feed_statuses_query
5 | from shared.database.database import with_db_session
6 |
7 | init_logger()
8 |
9 |
10 | @with_db_session
11 | @functions_framework.http
12 | def update_feed_status(_, db_session):
13 | """Updates the Feed status based on the latets dataset service date range."""
14 | try:
15 | logging.info("Database session started.")
16 | diff_counts = update_feed_statuses_query(db_session, [])
17 | return diff_counts, 200
18 |
19 | except Exception as error:
20 | logging.error(f"Error updating the feed statuses: {error}")
21 | return f"Error updating the feed statuses: {error}", 500
22 |
--------------------------------------------------------------------------------
/functions-python/update_validation_report/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */shared/*
7 |
8 | [report]
9 | exclude_lines =
10 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/update_validation_report/.env.rename_me:
--------------------------------------------------------------------------------
1 | # Environment variables for the validation report updates to run locally
2 | FEEDS_DATABASE_URL={{FEEDS_DATABASE_URL}}
3 | ENV={{ENV}}
4 | BATCH_SIZE={{BATCH_SIZE}}
5 | WEB_VALIDATOR_URL={{WEB_VALIDATOR_URL}}
6 | LOCATION={{LOCATION}}
7 | SLEEP_TIME={{SLEEP_TIME}}
8 |
--------------------------------------------------------------------------------
/functions-python/update_validation_report/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "update-validation-report",
3 | "description": "Batch update of validation report for the latest datasets",
4 | "entry_point": "update_validation_report",
5 | "timeout": 3600,
6 | "memory": "256Mi",
7 | "trigger_http": true,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "secret_environment_variables": [
11 | {
12 | "key": "FEEDS_DATABASE_URL"
13 | }
14 | ],
15 | "ingress_settings": "ALLOW_INTERNAL_AND_GCLB",
16 | "max_instance_request_concurrency": 1,
17 | "max_instance_count": 1,
18 | "min_instance_count": 0,
19 | "available_cpu": 1
20 | }
21 |
--------------------------------------------------------------------------------
/functions-python/update_validation_report/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | cloudevents~=1.10.1
19 | google-cloud-storage
20 | google-cloud-workflows
21 |
22 | # Configuration
23 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/update_validation_report/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
--------------------------------------------------------------------------------
/functions-python/update_validation_report/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/update_validation_report/src/__init__.py
--------------------------------------------------------------------------------
/functions-python/validation_to_ndjson/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit =
3 | */test*/*
4 | */helpers/*
5 | */database_gen/*
6 | */dataset_service/*
7 | */shared/*
8 |
9 | [report]
10 | exclude_lines =
11 | if __name__ == .__main__.:
--------------------------------------------------------------------------------
/functions-python/validation_to_ndjson/function_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "convert-reports-to-ndjson",
3 | "description": "Converts reports to ndjson format for BigQuery ingestion",
4 | "entry_point": "convert_reports_to_ndjson",
5 | "timeout": 540,
6 | "memory": "2Gi",
7 | "trigger_http": false,
8 | "include_folders": ["helpers"],
9 | "include_api_folders": ["database_gen", "database", "common"],
10 | "environment_variables": [],
11 | "secret_environment_variables": [
12 | {
13 | "key": "FEEDS_DATABASE_URL"
14 | }
15 | ],
16 | "ingress_settings": "ALLOW_ALL",
17 | "max_instance_request_concurrency": 1,
18 | "max_instance_count": 5,
19 | "min_instance_count": 0,
20 | "available_cpu": 1
21 | }
22 |
--------------------------------------------------------------------------------
/functions-python/validation_to_ndjson/requirements.txt:
--------------------------------------------------------------------------------
1 | # Common packages
2 | functions-framework==3.*
3 | google-cloud-logging
4 | psycopg2-binary==2.9.6
5 | aiohttp~=3.10.5
6 | asyncio~=3.4.3
7 | urllib3~=2.2.2
8 | requests~=2.32.3
9 | attrs~=23.1.0
10 | pluggy~=1.3.0
11 | certifi~=2024.7.4
12 |
13 | # SQL Alchemy and Geo Alchemy
14 | SQLAlchemy==2.0.23
15 | geoalchemy2==0.14.7
16 |
17 | # Google specific packages for this function
18 | google-cloud-bigquery
19 | google-cloud-storage
20 |
21 | # Additional packages for this function
22 | pandas
23 |
24 | # Configuration
25 | python-dotenv==1.0.0
--------------------------------------------------------------------------------
/functions-python/validation_to_ndjson/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | Faker
2 | pytest~=7.4.3
3 | urllib3-mock
4 | requests-mock
--------------------------------------------------------------------------------
/functions-python/validation_to_ndjson/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/functions-python/validation_to_ndjson/src/__init__.py
--------------------------------------------------------------------------------
/functions/.firebaserc:
--------------------------------------------------------------------------------
1 | {
2 | "projects": {
3 | "default": "mobility-feeds-dev",
4 | "dev": "mobility-feeds-dev",
5 | "qa": "mobility-feeds-qa",
6 | "prod": "mobility-feeds-prod"
7 | }
8 | }
--------------------------------------------------------------------------------
/functions/firebase.json:
--------------------------------------------------------------------------------
1 | {
2 | "functions": [
3 | {
4 | "source": "packages/user-api",
5 | "runtime": "nodejs20",
6 | "codebase": "user-api",
7 | "ignore": [
8 | "node_modules",
9 | ".git",
10 | "__tests__",
11 | "firebase-debug.log",
12 | "firebase-debug.*.log"
13 | ],
14 | "predeploy": [
15 | "yarn --cwd \"$RESOURCE_DIR\" install",
16 | "yarn --cwd \"$RESOURCE_DIR\" build"
17 | ]
18 | },
19 | {
20 | "source": "packages/feed-form",
21 | "runtime": "nodejs20",
22 | "codebase": "feed-form",
23 | "ignore": [
24 | "node_modules",
25 | ".git",
26 | "__tests__",
27 | "firebase-debug.log",
28 | "firebase-debug.*.log"
29 | ],
30 | "predeploy": [
31 | "yarn --cwd \"$RESOURCE_DIR\" install",
32 | "yarn --cwd \"$RESOURCE_DIR\" build"
33 | ]
34 | }
35 | ],
36 | "emulators": {
37 | "functions": {
38 | "port": 5030
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/functions/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "private": true,
3 | "workspaces": ["packages/*"],
4 | "scripts": {
5 | "test": "yarn workspaces run test",
6 | "build": "yarn workspaces run build",
7 | "lint": "yarn workspaces run lint"
8 | },
9 | "devDependencies": {
10 | "firebase-tools": "^12.5.4"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: {
4 | es6: true,
5 | node: true,
6 | },
7 | extends: [
8 | "eslint:recommended",
9 | "plugin:import/errors",
10 | "plugin:import/warnings",
11 | "plugin:import/typescript",
12 | "google",
13 | "plugin:@typescript-eslint/recommended",
14 | ],
15 | parser: "@typescript-eslint/parser",
16 | parserOptions: {
17 | project: ["tsconfig.json", "tsconfig.dev.json"],
18 | sourceType: "module",
19 | },
20 | ignorePatterns: [
21 | "/lib/**/*", // Ignore built files.
22 | "**/*config.*", // Ignore config files.
23 | ],
24 | plugins: [
25 | "@typescript-eslint",
26 | "import",
27 | ],
28 | rules: {
29 | "quotes": ["error", "double"],
30 | "import/no-unresolved": 0,
31 | "indent": ["error", 2],
32 | },
33 | };
34 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled JavaScript files
2 | lib/**/*.js
3 | lib/**/*.js.map
4 |
5 | # TypeScript v1 declaration files
6 | typings/
7 |
8 | # Node.js dependency directory
9 | node_modules
10 |
11 | .env
12 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/jest.config.ts:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest',
3 | testEnvironment: 'node',
4 | testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
5 | };
6 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/src/impl/types.ts:
--------------------------------------------------------------------------------
1 | export type YesNoFormInput = "yes" | "no" | "";
2 | export type AuthTypes =
3 | | "None - 0"
4 | | "API key - 1"
5 | | "HTTP header - 2"
6 | | "choiceRequired";
7 |
8 | export interface FeedSubmissionFormRequestBody {
9 | isOfficialProducer: YesNoFormInput;
10 | isOfficialFeed: "yes" | "no" | "unsure" | undefined;
11 | dataType: "gtfs" | "gtfs_rt";
12 | transitProviderName?: string;
13 | feedLink?: string;
14 | isUpdatingFeed: YesNoFormInput;
15 | oldFeedLink?: string;
16 | licensePath?: string;
17 | country?: string;
18 | region?: string;
19 | municipality?: string;
20 | tripUpdates?: string;
21 | vehiclePositions?: string;
22 | serviceAlerts?: string;
23 | oldTripUpdates?: string;
24 | oldVehiclePositions?: string;
25 | oldServiceAlerts?: string;
26 | gtfsRelatedScheduleLink?: string;
27 | name?: string;
28 | authType: AuthTypes;
29 | authSignupLink?: string;
30 | authParameterName?: string;
31 | dataProducerEmail?: string;
32 | isInterestedInQualityAudit: YesNoFormInput;
33 | userInterviewEmail?: string;
34 | whatToolsUsedText?: string;
35 | hasLogoPermission: YesNoFormInput;
36 | }
37 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/src/index.ts:
--------------------------------------------------------------------------------
1 | import {initializeApp} from "firebase-admin/app";
2 | import {CallableRequest, onCall} from "firebase-functions/v2/https";
3 | import * as feedAPI from "./impl/feed-form-impl";
4 | import {type FeedSubmissionFormRequestBody} from "./impl/types";
5 |
6 | initializeApp();
7 |
8 | export const writeToSheet = onCall(
9 | {
10 | minInstances: 0,
11 | maxInstances: 100,
12 | invoker: "public",
13 | cors: "*",
14 | region: "northamerica-northeast1",
15 | },
16 | async (request: CallableRequest) => {
17 | return await feedAPI.writeToSheet(request);
18 | }
19 | );
20 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/tsconfig.dev.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": [
3 | ".eslintrc.js"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/functions/packages/feed-form/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "noImplicitReturns": true,
5 | "noUnusedLocals": true,
6 | "outDir": "lib",
7 | "sourceMap": true,
8 | "strict": true,
9 | "target": "es2017"
10 | },
11 | "compileOnSave": true,
12 | "include": [
13 | "src"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: {
4 | es6: true,
5 | node: true,
6 | },
7 | extends: [
8 | "eslint:recommended",
9 | "plugin:import/errors",
10 | "plugin:import/warnings",
11 | "plugin:import/typescript",
12 | "google",
13 | "plugin:@typescript-eslint/recommended",
14 | ],
15 | parser: "@typescript-eslint/parser",
16 | parserOptions: {
17 | project: ["tsconfig.json", "tsconfig.dev.json"],
18 | sourceType: "module",
19 | },
20 | ignorePatterns: [
21 | "/lib/**/*", // Ignore built files.
22 | "**/*config.*", // Ignore config files.
23 | ],
24 | plugins: [
25 | "@typescript-eslint",
26 | "import",
27 | ],
28 | rules: {
29 | "quotes": ["error", "double"],
30 | "import/no-unresolved": 0,
31 | "indent": ["error", 2],
32 | },
33 | };
34 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/.gcloudignore:
--------------------------------------------------------------------------------
1 | # This file specifies files that are *not* uploaded to Google Cloud
2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of
3 | # "#!include" directives (which insert the entries of the given .gitignore-style
4 | # file at that point).
5 | #
6 | # For more information, run:
7 | # $ gcloud topic gcloudignore
8 | #
9 | .gcloudignore
10 | # If you would like to upload your .git directory, .gitignore file or files
11 | # from your .gitignore file, remove the corresponding line
12 | # below:
13 | .git
14 | .gitignore
15 |
16 | node_modules
17 | #!include:.gitignore
18 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled JavaScript files
2 | lib/**/*.js
3 | lib/**/*.js.map
4 |
5 | # TypeScript v1 declaration files
6 | typings/
7 |
8 | # Node.js dependency directory
9 | node_modules
10 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/jest.config.ts:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest',
3 | testEnvironment: 'node',
4 | testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
5 | };
6 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "firebase-password-policy",
3 | "version": "1.0.0",
4 | "engines": {
5 | "node": "18"
6 | },
7 | "scripts": {
8 | "lint": "eslint --ext .js,.ts .",
9 | "build": "tsc",
10 | "build:watch": "tsc --watch",
11 | "test": "jest"
12 | },
13 | "dependencies": {
14 | "firebase": "^10.6.0",
15 | "firebase-admin": "^11.11.1"
16 | },
17 | "devDependencies": {
18 | "@types/jest": "^29.5.8",
19 | "@typescript-eslint/eslint-plugin": "^5.12.0",
20 | "@typescript-eslint/parser": "^5.12.0",
21 | "eslint": "^8.9.0",
22 | "eslint-config-google": "^0.14.0",
23 | "eslint-plugin-import": "^2.25.4",
24 | "firebase-functions-test": "^3.1.0",
25 | "jest": "^29.7.0",
26 | "ts-jest": "^29.1.1",
27 | "ts-node": "^10.9.1",
28 | "typescript": "^4.9.0"
29 | },
30 | "main": "lib/index.js",
31 | "description": "Firebase function that configures password policy",
32 | "private": true
33 | }
34 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/src/impl/firebase-password-policy-impl.ts:
--------------------------------------------------------------------------------
1 | import {getAuth} from "firebase-admin/auth";
2 |
3 | /**
4 | * Sets the password policy for the Firebase project.
5 | */
6 | export const setPasswordPolicyConfig = async () => {
7 | try {
8 | await getAuth().projectConfigManager().updateProjectConfig({
9 | passwordPolicyConfig: {
10 | enforcementState: "ENFORCE",
11 | constraints: {
12 | requireUppercase: true,
13 | requireLowercase: true,
14 | requireNonAlphanumeric: true,
15 | requireNumeric: true,
16 | minLength: 12,
17 | },
18 | },
19 | });
20 | console.log("Password policy updated successfully");
21 | } catch (error) {
22 | console.log("Error updating password policy: " + error);
23 | }
24 | };
25 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/src/index.ts:
--------------------------------------------------------------------------------
1 | import {initializeApp} from "firebase-admin/app";
2 | import * as impl from "./impl/firebase-password-policy-impl";
3 |
4 | initializeApp();
5 | impl.setPasswordPolicyConfig().then();
6 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/tsconfig.dev.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": [
3 | ".eslintrc.js"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/functions/packages/firebase-password-policy/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "noImplicitReturns": true,
5 | "noUnusedLocals": true,
6 | "outDir": "lib",
7 | "sourceMap": true,
8 | "strict": true,
9 | "target": "es2017"
10 | },
11 | "compileOnSave": true,
12 | "include": [
13 | "src"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/functions/packages/user-api/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: {
4 | es6: true,
5 | node: true,
6 | },
7 | extends: [
8 | "eslint:recommended",
9 | "plugin:import/errors",
10 | "plugin:import/warnings",
11 | "plugin:import/typescript",
12 | "google",
13 | "plugin:@typescript-eslint/recommended",
14 | ],
15 | parser: "@typescript-eslint/parser",
16 | parserOptions: {
17 | project: ["tsconfig.json", "tsconfig.dev.json"],
18 | sourceType: "module",
19 | },
20 | ignorePatterns: [
21 | "/lib/**/*", // Ignore built files.
22 | "**/*config.*", // Ignore config files.
23 | ],
24 | plugins: [
25 | "@typescript-eslint",
26 | "import",
27 | ],
28 | rules: {
29 | "quotes": ["error", "double"],
30 | "import/no-unresolved": 0,
31 | "indent": ["error", 2],
32 | },
33 | };
34 |
--------------------------------------------------------------------------------
/functions/packages/user-api/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled JavaScript files
2 | lib/**/*.js
3 | lib/**/*.js.map
4 |
5 | # TypeScript v1 declaration files
6 | typings/
7 |
8 | # Node.js dependency directory
9 | node_modules
10 |
--------------------------------------------------------------------------------
/functions/packages/user-api/jest.config.ts:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest',
3 | testEnvironment: 'node',
4 | testMatch: ['**/__tests__/**/*.ts', '**/?(*.)+(spec|test).ts'],
5 | };
6 |
--------------------------------------------------------------------------------
/functions/packages/user-api/tsconfig.dev.json:
--------------------------------------------------------------------------------
1 | {
2 | "include": [
3 | ".eslintrc.js"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/functions/packages/user-api/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "noImplicitReturns": true,
5 | "noUnusedLocals": true,
6 | "outDir": "lib",
7 | "sourceMap": true,
8 | "strict": true,
9 | "target": "es2017"
10 | },
11 | "compileOnSave": true,
12 | "include": [
13 | "src"
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/infra/artifact-registry/vars.tf:
--------------------------------------------------------------------------------
1 | #
2 | # MobilityData 2023
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | variable "project_id" {
18 | type = string
19 | description = "GCP project ID"
20 | }
21 |
22 | variable "gcp_region" {
23 | type = string
24 | description = "GCP region"
25 | }
26 |
27 | variable "environment" {
28 | type = string
29 | description = "API environment. Possible values: prod, staging and dev"
30 | }
31 |
32 | variable "artifact_repo_name" {
33 | type = string
34 | description = "Name of the artifact repository"
35 | }
--------------------------------------------------------------------------------
/infra/artifact-registry/vars.tfvars.rename_me:
--------------------------------------------------------------------------------
1 | # This file is a template for populating the artifact module variables
2 | # To set variables:
3 | # - Rename this file to vars.tfvars.
4 | # - Replace variable values.
5 | # - Execute your terraform script passing this file as a parameter, example: terraform plan -var-file=vars.tfvars
6 |
7 | project_id = {{PROJECT_ID}}
8 | gcp_region = {{REGION}}
9 | environment = {{ENVIRONMENT}}
10 | artifact_repo_name = {{ARTIFACT_REPO_NAME}}
--------------------------------------------------------------------------------
/infra/backend.conf.rename_me:
--------------------------------------------------------------------------------
1 | #
2 | # MobilityData 2023
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | # This file represents a template for setting up a remote terraform state.
18 | # If you choose to use a remote state:
19 | # - Create a GCP storage bucket with the target name.
20 | # - Make sure the `deployer` account has the right permissions on the newly created bucket.
21 | # - Rename this file to backend.conf
22 | # - Replace variable values.
23 | # - Execute: terraform init -backend-conf=backend.conf
24 | # - Enjoy coding!
25 | # More info: https://developer.hashicorp.com/terraform/language/state/remote
26 |
27 | bucket = {{BUCKET_NAME}}
28 | prefix = {{OBJECT_PREFIX}}
29 |
--------------------------------------------------------------------------------
/infra/batch/vars.tfvars.rename_me:
--------------------------------------------------------------------------------
1 | # This file is a template for populating the terraform-init module variables
2 | # To set variables:
3 | # - Rename this file to vars.tfvars.
4 | # - Replace variable values.
5 | # - Execute your terraform script passing this file as a parameter, example: terraform plan -var-file=vars.tfvars
6 |
7 | gcp_region={{REGION}}
8 | project_id={{PROJECT_ID}}
9 | environment={{ENVIRONMENT}}
10 | deployer_service_account={{DEPLOYER_SERVICE_ACCOUNT}}
11 | job_schedule={{JOB_SCHEDULE}}
12 | datasets_bucket_name={{DATASETS_BUCKET_NAME}}
--------------------------------------------------------------------------------
/infra/postgresql/vars.tfvars.rename_me:
--------------------------------------------------------------------------------
1 | # This file is a template for populating the terraform-init module variables
2 | # To set variables:
3 | # - Rename this file to vars.tfvars.
4 | # - Replace variable values.
5 | # - Execute your terraform script passing this file as a parameter, example: terraform plan -var-file=vars.tfvars
6 |
7 | project_id = {{PROJECT_ID}}
8 | gcp_region = {{REGION}}
9 | environment = {{ENVIRONMENT}}
10 |
11 | deployer_service_account = {{DEPLOYER_SERVICE_ACCOUNT}}
12 |
13 | postgresql_instance_name = {{POSTGRE_SQL_INSTANCE_NAME}}
14 | postgresql_database_name = {{POSTGRE_SQL_DB_NAME}}
15 | postgresql_user_name = {{POSTGRE_USER_NAME}}
16 | postgresql_user_password = {{POSTGRE_USER_PASSWORD}}
17 | postgresql_db_instance = {{POSTGRE_INSTANCE_TIER}}
18 | max_db_connections = {{MAX_CONNECTIONS}}
--------------------------------------------------------------------------------
/infra/terraform-init/vars.tf:
--------------------------------------------------------------------------------
1 | #
2 | # MobilityData 2023
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | variable "project_id" {
18 | type = string
19 | description = "GCP project ID"
20 | }
21 |
22 | variable "gcp_region" {
23 | type = string
24 | description = "GCP region"
25 | }
26 |
27 | variable "terraform_state_bucket_name_prefix" {
28 | type = string
29 | description = "Bucket name where terraform state is persisted"
30 | }
31 |
32 | variable "environment" {
33 | type = string
34 | description = "API environment. Possible values: prod, staging and dev"
35 | }
36 |
--------------------------------------------------------------------------------
/infra/terraform-init/vars.tfvars.rename_me:
--------------------------------------------------------------------------------
1 | # This file is a template for populating the terraform-init module variables
2 | # To set variables:
3 | # - Rename this file to vars.tfvars
4 | # - Replace variable values.
5 | # - Execute your terraform script passing this file as a parameter, example: terraform plan -var-file=vars.tfvars
6 |
7 | project_id = {{PROJECT_ID}}
8 | gcp_region = {{REGION}}
9 | environment = {{ENVIRONMENT}}
10 |
11 | terraform_state_bucket_name_prefix = {{STATE_BUCKET_NAME_PREFIX}}
12 |
13 |
--------------------------------------------------------------------------------
/infra/vars.tfvars.rename_me:
--------------------------------------------------------------------------------
1 | # This file is a template for populating the variables
2 | # To set variables:
3 | # - Rename this file to vars.tfvars.
4 | # - Replace variable values.
5 | # - Execute your terraform script passing this file as a parameter, example: terraform plan -var-file=vars.tfvars
6 |
7 | project_id = {{PROJECT_ID}}
8 | gcp_region = {{REGION}}
9 | environment = {{ENVIRONMENT}}
10 | artifact_repo_name = {{ARTIFACT_REPO_NAME}}
11 |
12 | deployer_service_account = {{DEPLOYER_SERVICE_ACCOUNT}}
13 | feed_api_image_version = {{FEED_API_IMAGE_VERSION}}
14 |
15 | oauth2_client_id = {{OAUTH2_CLIENT_ID}}
16 | oauth2_client_secret = {{OAUTH2_CLIENT_SECRET}}
17 | global_rate_limit_req_per_minute = {{GLOBAL_RATE_LIMIT_REQ_PER_MINUTE}}
18 |
19 | validator_endpoint = {{VALIDATOR_ENDPOINT}}
20 | transitland_api_key = {{TRANSITLAND_API_KEY}}
21 |
22 | operations_oauth2_client_id = {{OPERATIONS_OAUTH2_CLIENT_ID}}
--------------------------------------------------------------------------------
/integration-tests/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | exclude = .git,__pycache__,__init__.py,.mypy_cache,.pytest_cache,venv,build,.*,database_gen
4 | # Ignored because conflict with black
5 | extend-ignore = E203
--------------------------------------------------------------------------------
/integration-tests/.gitignore:
--------------------------------------------------------------------------------
1 | src/integration_tests_log.html
2 | src/*.csv
--------------------------------------------------------------------------------
/integration-tests/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | pandas
3 | gtfs_kit
4 | rich
5 |
--------------------------------------------------------------------------------
/integration-tests/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | # Linting requirements
2 | flake8==6.0.0
3 | black==23.3.0
4 | pre-commit==3.4.0
--------------------------------------------------------------------------------
/integration-tests/src/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/integration-tests/src/__init__.py
--------------------------------------------------------------------------------
/liquibase/changes/add_idxs.sql:
--------------------------------------------------------------------------------
1 | -- Feed
2 | CREATE INDEX idx_feed_id ON Feed(id);
3 | CREATE INDEX idx_feed_status ON Feed(status);
4 | CREATE INDEX idx_feed_provider_stable_id ON Feed(provider, stable_id);
5 |
6 | -- GTFSDataset
7 | CREATE INDEX idx_gtfsdataset_feed_id ON GTFSDataset(feed_id);
8 | CREATE INDEX idx_gtfsdataset_latest ON GTFSDataset(latest);
9 |
10 | -- LocationFeed
11 | CREATE INDEX idx_locationfeed_feed_id ON LocationFeed(feed_id);
12 | CREATE INDEX idx_locationfeed_location_id ON LocationFeed(location_id);
13 |
14 | -- ValidationReport
15 | CREATE INDEX idx_validationreport_validator_version ON ValidationReport(validator_version);
16 |
17 | -- ValidationReportGTFSDataset
18 | CREATE INDEX idx_vrgtfsdataset_dataset_id ON ValidationReportGTFSDataset(dataset_id);
19 | CREATE INDEX idx_vrgtfsdataset_report_id ON ValidationReportGTFSDataset(validation_report_id);
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1046.sql:
--------------------------------------------------------------------------------
1 | -- Index to improve the filtering by feed_id and downloaded_at
2 | CREATE INDEX idx_gtfsdataset_feed_id_downloaded_at_desc ON GTFSDataset(feed_id, downloaded_at DESC);
3 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1055.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE validationreport
2 | ADD COLUMN total_error INT DEFAULT 0,
3 | ADD COLUMN total_warning INT DEFAULT 0,
4 | ADD COLUMN total_info INT DEFAULT 0,
5 | ADD COLUMN unique_error_count INT DEFAULT 0,
6 | ADD COLUMN unique_warning_count INT DEFAULT 0,
7 | ADD COLUMN unique_info_count INT DEFAULT 0;
--------------------------------------------------------------------------------
/liquibase/changes/feat_1124.sql:
--------------------------------------------------------------------------------
1 | -- Insert a GbfsEndpoint row to GbfsVersions that miss the gbfs(autodiscovery) endpoint
2 | INSERT INTO GbfsEndpoint (id, gbfs_version_id, url, name, is_feature)
3 | SELECT
4 | Feed.stable_id || '_' || GbfsVersion.version AS id,
5 | GbfsVersion.id AS gbfs_version_id,
6 | GbfsVersion.url AS url,
7 | 'gbfs' AS name,
8 | false AS is_feature -- gbfs file is not a feature, see https://github.com/MobilityData/mobility-feed-api/issues/1125
9 | FROM GbfsVersion
10 | JOIN GbfsFeed ON GbfsVersion.feed_id = GbfsFeed.id
11 | JOIN Feed ON GbfsFeed.id = Feed.id
12 | WHERE NOT EXISTS (
13 | SELECT 1
14 | FROM GbfsEndpoint
15 | WHERE GbfsEndpoint.gbfs_version_id = GbfsVersion.id
16 | AND GbfsEndpoint.name = 'gbfs'
17 | );
18 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1125.sql:
--------------------------------------------------------------------------------
1 | -- Update gbfsendpoint table to set is_feature = false for specific endpoints
2 | UPDATE gbfsendpoint
3 | SET is_feature = CASE
4 | WHEN name IN (
5 | 'manifest',
6 | 'gbfs_versions',
7 | 'vehicle_types',
8 | 'station_status',
9 | 'vehicle_status',
10 | 'system_regions',
11 | 'system_pricing_plans',
12 | 'system_alerts',
13 | 'geofencing_zones'
14 | ) THEN true
15 | ELSE false
16 | END;
--------------------------------------------------------------------------------
/liquibase/changes/feat_1132.sql:
--------------------------------------------------------------------------------
1 | -- Add 'unpublished' to the OperationalStatus enum if it doesn't exist
2 | DO $$
3 | BEGIN
4 | -- Check if the enum already has the 'unpublished' value
5 | IF NOT EXISTS (
6 | SELECT 1
7 | FROM pg_enum
8 | WHERE enumlabel = 'unpublished'
9 | AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'operationalstatus')
10 | ) THEN
11 | -- Add 'unpublished' to the enum
12 | ALTER TYPE OperationalStatus ADD VALUE 'unpublished';
13 | RAISE NOTICE 'Added ''unpublished'' value to OperationalStatus enum';
14 | ELSE
15 | RAISE NOTICE 'The ''unpublished'' value already exists in OperationalStatus enum';
16 | END IF;
17 | EXCEPTION
18 | WHEN OTHERS THEN
19 | RAISE EXCEPTION 'Failed to add ''unpublished'' to OperationalStatus enum: %', SQLERRM;
20 | END $$;
21 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1181.sql:
--------------------------------------------------------------------------------
1 | UPDATE gbfsendpoint
2 | SET
3 | is_feature = TRUE
4 | WHERE
5 | name = 'free_bike_status';
--------------------------------------------------------------------------------
/liquibase/changes/feat_1182.sql:
--------------------------------------------------------------------------------
1 | -- This SQL script adds indexes that enhance the GBFS SQL queries
2 |
3 | CREATE INDEX IF NOT EXISTS idx_gbfsversion_feed_id
4 | ON gbfsversion (feed_id);
5 |
6 | CREATE INDEX IF NOT EXISTS idx_redirectingid_source_id
7 | ON redirectingid (source_id);
8 |
9 | CREATE INDEX IF NOT EXISTS idx_redirectingid_target_id
10 | ON redirectingid (target_id);
11 |
12 | CREATE INDEX IF NOT EXISTS idx_externalid_feed_id
13 | ON externalid (feed_id);
14 |
15 | CREATE INDEX IF NOT EXISTS idx_officialstatushistory_feed_id
16 | ON officialstatushistory (feed_id);
17 |
18 | CREATE INDEX IF NOT EXISTS idx_gbfsendpoint_gv_id
19 | ON gbfsendpoint (gbfs_version_id);
20 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1195.sql:
--------------------------------------------------------------------------------
1 | -- Add the source of gbfs_version to the gbfsversion table
2 | DO $$
3 | BEGIN
4 | IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'gbfs_source') THEN
5 | CREATE TYPE gbfs_source AS ENUM ('autodiscovery', 'gbfs_versions');
6 | END IF;
7 | END
8 | $$;
9 | ALTER TABLE gbfsversion DROP COLUMN IF EXISTS source;
10 | ALTER TABLE gbfsversion ADD COLUMN source gbfs_source DEFAULT 'gbfs_versions' NOT NULL;
11 |
12 | -- Remove latest tag in gbfsversion table
13 | ALTER TABLE gbfsversion
14 | DROP COLUMN IF EXISTS latest;
15 |
16 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_1200.sql:
--------------------------------------------------------------------------------
1 | -- This SQL updates the producer's url to autodiscovery_url for all GBFS feeds
2 |
3 | -- Populate producer_url in the feed entity with the autodiscovery_url from the gbfsfeed table
4 | UPDATE feed
5 | SET producer_url = (
6 | SELECT
7 | auto_discovery_url
8 | FROM
9 | gbfsfeed
10 | WHERE
11 | feed.id = gbfsfeed.id
12 | )
13 | WHERE producer_url IS NULL and data_type = 'gbfs';
14 |
15 |
16 | -- Update search
17 | REFRESH MATERIALIZED VIEW CONCURRENTLY feedsearch;
--------------------------------------------------------------------------------
/liquibase/changes/feat_13.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS postgis;
--------------------------------------------------------------------------------
/liquibase/changes/feat_149.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE RedirectingID
2 | ADD COLUMN redirect_comment VARCHAR(255);
--------------------------------------------------------------------------------
/liquibase/changes/feat_15.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Feed
2 | ADD COLUMN feed_contact_email VARCHAR(255);
3 |
4 | ALTER TABLE Feed
5 | ADD COLUMN provider VARCHAR(255);
6 |
7 | DROP TABLE IF EXISTS ProviderFeed;
8 | DROP TABLE IF EXISTS Provider;
--------------------------------------------------------------------------------
/liquibase/changes/feat_263.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE ValidationReport
2 | ADD COLUMN validated_at TIMESTAMP,
3 | ADD COLUMN html_report VARCHAR(255),
4 | ADD COLUMN json_report VARCHAR(255);
--------------------------------------------------------------------------------
/liquibase/changes/feat_327.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Component
2 | RENAME TO Feature;
3 | DROP TABLE ComponentGtfsDataset;
4 |
5 | CREATE TABLE FeatureValidationReport (
6 | feature VARCHAR(255) REFERENCES Feature(name),
7 | validation_id VARCHAR(255) REFERENCES ValidationReport(id),
8 | PRIMARY KEY (feature, validation_id)
9 | );
--------------------------------------------------------------------------------
/liquibase/changes/feat_533.sql:
--------------------------------------------------------------------------------
1 | -- Adding created_at column to Feed table with default value and not null constraint
2 | ALTER TABLE Feed ADD COLUMN created_at TIMESTAMPTZ NOT NULL DEFAULT '2024-02-08 00:00:00.000000';
--------------------------------------------------------------------------------
/liquibase/changes/feat_565.sql:
--------------------------------------------------------------------------------
1 | ALTER TYPE datatype ADD VALUE IF NOT EXISTS 'gbfs';
2 |
3 | -- Create the tables if they do not exist
4 | CREATE TABLE IF NOT EXISTS GBFS_Feed(
5 | id VARCHAR(255) PRIMARY KEY,
6 | operator VARCHAR(255),
7 | operator_url VARCHAR(255),
8 | auto_discovery_url VARCHAR(255),
9 | FOREIGN KEY (id) REFERENCES Feed(id)
10 | );
11 |
12 | CREATE TABLE IF NOT EXISTS GBFS_Version(
13 | feed_id VARCHAR(255) NOT NULL,
14 | version VARCHAR(6),
15 | url VARCHAR(255),
16 | PRIMARY KEY (feed_id, version),
17 | FOREIGN KEY (feed_id) REFERENCES GBFS_Feed(id)
18 | );
19 |
20 | -- Rename tables to use convention like GBFSFeed and GBFSVersion
21 | ALTER TABLE GBFS_Feed RENAME TO GBFSFeed;
22 | ALTER TABLE GBFS_Version RENAME TO GBFSVersion;
23 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_566.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE GBFSSnapshot(
2 | id VARCHAR(255) NOT NULL PRIMARY KEY,
3 | feed_id VARCHAR(255) NOT NULL,
4 | hosted_url VARCHAR(255) NOT NULL,
5 | downloaded_at TIMESTAMPTZ NOT NULL,
6 | stable_id VARCHAR(255) NOT NULL UNIQUE,
7 | FOREIGN KEY (feed_id) REFERENCES GBFSFeed(id)
8 | );
9 |
10 | CREATE TABLE GBFSValidationReport(
11 | id VARCHAR(255) NOT NULL PRIMARY KEY,
12 | gbfs_snapshot_id VARCHAR(255) NOT NULL,
13 | validated_at TIMESTAMPTZ NOT NULL,
14 | report_summary_url VARCHAR(255) NOT NULL,
15 | FOREIGN KEY (gbfs_snapshot_id) REFERENCES GBFSSnapshot(id)
16 | );
17 |
18 | CREATE TABLE GBFSNotice(
19 | keyword VARCHAR(255) NOT NULL,
20 | message TEXT NOT NULL,
21 | schema_path VARCHAR(255) NOT NULL,
22 | gbfs_file VARCHAR(255) NOT NULL,
23 | validation_report_id VARCHAR(255) NOT NULL,
24 | count INTEGER NOT NULL,
25 | FOREIGN KEY (validation_report_id) REFERENCES GBFSValidationReport(id),
26 | PRIMARY KEY (validation_report_id, keyword, gbfs_file, schema_path)
27 | );
28 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_57.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Feed
2 | DROP COLUMN provider;
3 |
4 | CREATE TABLE Provider (
5 | id VARCHAR(255) PRIMARY KEY,
6 | short_name VARCHAR(255),
7 | long_name VARCHAR(255)
8 | );
9 |
10 | CREATE TABLE ProviderFeed (
11 | provider_id VARCHAR(255) REFERENCES Provider(id),
12 | feed_id VARCHAR(255) REFERENCES Feed(id),
13 | PRIMARY KEY (provider_id, feed_id)
14 | );
15 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_618.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Location
2 | ADD COLUMN country VARCHAR(255);
3 |
4 | -- Create the join table Location_GtfsDataset
5 | CREATE TABLE Location_GTFSDataset (
6 | location_id VARCHAR(255) NOT NULL,
7 | gtfsdataset_id VARCHAR(255) NOT NULL,
8 | PRIMARY KEY (location_id, gtfsdataset_id),
9 | FOREIGN KEY (location_id) REFERENCES Location(id),
10 | FOREIGN KEY (gtfsdataset_id) REFERENCES GtfsDataset(id)
11 | );
12 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_622.sql:
--------------------------------------------------------------------------------
1 | DROP VIEW IF EXISTS location_with_translations;
2 | DROP VIEW IF EXISTS location_with_translations_en;
3 | CREATE VIEW location_with_translations_en AS
4 | SELECT
5 | l.id AS location_id,
6 | l.country_code,
7 | l.country,
8 | l.subdivision_name,
9 | l.municipality,
10 | country_translation.value AS country_translation,
11 | subdivision_name_translation.value AS subdivision_name_translation,
12 | municipality_translation.value AS municipality_translation
13 | FROM
14 | location l
15 | LEFT JOIN
16 | translation AS country_translation
17 | ON l.country = country_translation.key
18 | AND country_translation.type = 'country'
19 | AND country_translation.language_code = 'en'
20 | LEFT JOIN
21 | translation AS subdivision_name_translation
22 | ON l.subdivision_name = subdivision_name_translation.key
23 | AND subdivision_name_translation.type = 'subdivision_name'
24 | AND subdivision_name_translation.language_code = 'en'
25 | LEFT JOIN
26 | translation AS municipality_translation
27 | ON l.municipality = municipality_translation.key
28 | AND municipality_translation.type = 'municipality'
29 | AND municipality_translation.language_code = 'en';
--------------------------------------------------------------------------------
/liquibase/changes/feat_66.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE GTFSDataset
2 | RENAME COLUMN download_date TO downloaded_at;
3 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_741.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE OfficialStatusHistory(
2 | is_official BOOLEAN NOT NULL,
3 | feed_id VARCHAR(255) NOT NULL,
4 | reviewer_email VARCHAR(255) NOT NULL,
5 | timestamp TIMESTAMP NOT NULL,
6 | notes VARCHAR(255),
7 | FOREIGN KEY (feed_id) REFERENCES Feed(id),
8 | PRIMARY KEY (feed_id, timestamp)
9 | );
10 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_76.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Feed
2 | ALTER COLUMN provider TYPE TEXT;
3 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_77.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS FeedLog;
2 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_794_2.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Feed ADD COLUMN official BOOLEAN DEFAULT NULL;
--------------------------------------------------------------------------------
/liquibase/changes/feat_871.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE Feed ADD COLUMN official_updated_at TIMESTAMP DEFAULT NULL;
--------------------------------------------------------------------------------
/liquibase/changes/feat_879.sql:
--------------------------------------------------------------------------------
1 | ALTER TYPE Status ADD VALUE 'future';
--------------------------------------------------------------------------------
/liquibase/changes/feat_88.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE GTFSDataset
2 | DROP COLUMN IF EXISTS last_update_date,
3 | DROP COLUMN IF EXISTS creation_date;
--------------------------------------------------------------------------------
/liquibase/changes/feat_880.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE gtfsdataset ADD COLUMN service_date_range_start DATE DEFAULT NULL;
2 | ALTER TABLE gtfsdataset ADD COLUMN service_date_range_end DATE DEFAULT NULL;
3 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_927.sql:
--------------------------------------------------------------------------------
1 | -- Add 'published' to the OperationalStatus enum if it doesn't exist
2 | DO $$
3 | BEGIN
4 | -- Check if the enum already has the 'published' value
5 | IF NOT EXISTS (
6 | SELECT 1
7 | FROM pg_enum
8 | WHERE enumlabel = 'published'
9 | AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'operationalstatus')
10 | ) THEN
11 | -- Add 'published' to the enum
12 | ALTER TYPE OperationalStatus ADD VALUE 'published';
13 | RAISE NOTICE 'Added ''published'' value to OperationalStatus enum';
14 | ELSE
15 | RAISE NOTICE 'The ''published'' value already exists in OperationalStatus enum';
16 | END IF;
17 | EXCEPTION
18 | WHEN OTHERS THEN
19 | RAISE EXCEPTION 'Failed to add ''published'' to OperationalStatus enum: %', SQLERRM;
20 | END $$;
21 |
--------------------------------------------------------------------------------
/liquibase/changes/feat_927_2.sql:
--------------------------------------------------------------------------------
1 | -- Update all feeds with NULL operational_status to 'published'
2 | DO $$
3 | DECLARE
4 | updated_count INTEGER;
5 | BEGIN
6 | UPDATE Feed
7 | SET operational_status = 'published'
8 | WHERE operational_status IS NULL;
9 |
10 | GET DIAGNOSTICS updated_count = ROW_COUNT;
11 | RAISE NOTICE 'Updated % feeds to have operational_status = published', updated_count;
12 | EXCEPTION
13 | WHEN OTHERS THEN
14 | RAISE EXCEPTION 'Failed to update feeds with NULL operational_status: %', SQLERRM;
15 | END $$;
16 |
17 | -- Refresh the materialized view to reflect the changes
18 | DO $$
19 | BEGIN
20 | REFRESH MATERIALIZED VIEW FeedSearch;
21 | RAISE NOTICE 'Refreshed FeedSearch materialized view';
22 | EXCEPTION
23 | WHEN OTHERS THEN
24 | RAISE EXCEPTION 'Failed to refresh FeedSearch materialized view: %', SQLERRM;
25 | END $$;
26 |
27 | -- Final success message
28 | DO $$
29 | BEGIN
30 | RAISE NOTICE 'Migration completed successfully';
31 | END $$;
--------------------------------------------------------------------------------
/liquibase/changes/feat_951.sql:
--------------------------------------------------------------------------------
1 | CREATE INDEX IF NOT EXISTS idx_feed_osm_location_group_feed_id_stops_count ON feedosmlocationgroup (feed_id, stops_count DESC);
2 |
3 | CREATE INDEX IF NOT EXISTS idx_feed_osm_location_group_feed_id ON feedosmlocationgroup (feed_id);
4 |
5 | CREATE INDEX IF NOT EXISTS idx_feed_osm_location_group_group_id ON feedosmlocationgroup (group_id);
6 |
7 | CREATE INDEX IF NOT EXISTS idx_osm_location_group_group_id ON osmlocationgroup (group_id);
8 |
9 | CREATE INDEX IF NOT EXISTS idx_osm_location_group_geopolygon_group_id ON osmlocationgroupgeopolygon (group_id);
10 |
11 | CREATE INDEX IF NOT EXISTS idx_osm_location_group_geopolygon_osm_id ON osmlocationgroupgeopolygon (osm_id);
12 |
--------------------------------------------------------------------------------
/liquibase/changes/official_tag_update.sql:
--------------------------------------------------------------------------------
1 | -- Query to update official tag for feeds with contact email in the feed table where the source is mdb
2 |
3 | UPDATE public.feed f
4 | SET
5 | official = TRUE,
6 | official_updated_at = NOW()
7 | FROM public.externalid e
8 | WHERE f.id = e.feed_id
9 | AND f.feed_contact_email LIKE '%@%'
10 | AND e.source = 'mdb';
11 |
12 | -- Query to insert a record in officialstatushistory table for feeds with contact email in the feed table where the source is mdb
13 | INSERT INTO public.officialstatushistory (is_official, feed_id, reviewer_email, timestamp, notes)
14 | SELECT
15 | official,
16 | id,
17 | 'api@mobilitydata.org',
18 | NOW(),
19 | 'Official status tag changed'
20 | FROM public.feed
21 | WHERE feed_contact_email LIKE '%@%'
22 | AND official = TRUE;
23 |
--------------------------------------------------------------------------------
/liquibase/population_prep_tables.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 | ANY
7 |
8 | DELETE FROM LocationFeed;
9 | DELETE FROM EntityTypeFeed;
10 | DELETE FROM FeedReference;
11 | DELETE FROM Location;
12 | DELETE FROM EntityType;
13 | DELETE FROM ExternalId;
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/scripts/.deploy-env-dev.rename_me:
--------------------------------------------------------------------------------
1 | # File contains the configuration used by api-deploy-dev.sh
2 | # Rename this file to: .deploy-env-dev
3 | PROJECT_ID=
4 | ENVIRONMENT=
5 | SERVICE_ACCOUNT_EMAIL=
6 | VPC_CONNECTOR=
7 | SECRETS=
8 | REGION=northamerica-northeast1
--------------------------------------------------------------------------------
/scripts/api-operations-gen.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #
4 | # This script generates the fastapi server stubs. It uses the gen-config.yaml file for additional properties.
5 | # For information regarding ignored generated files check .openapi-generator-ignore file.
6 | # As a requirement, you need to execute one time setup-openapi-generator.sh.
7 | # Usage:
8 | # api-gen.sh
9 | #
10 |
11 | GENERATOR_VERSION=7.10.0
12 |
13 | # relative path
14 | SCRIPT_PATH="$(dirname -- "${BASH_SOURCE[0]}")"
15 | OPERATIONS_PATH=functions-python/operations_api
16 | OPENAPI_SCHEMA=$SCRIPT_PATH/../docs/OperationsAPI.yaml
17 | OUTPUT_PATH=$SCRIPT_PATH/../$OPERATIONS_PATH
18 | CONFIG_FILE=$SCRIPT_PATH/gen-operations-config.yaml
19 |
20 | echo "Generating FastAPI server stubs for Operations API from $OPENAPI_SCHEMA to $OUTPUT_PATH"
21 | # Keep the "--global-property apiTests=false" at the end, otherwise it will generate test files that we already have
22 | OPENAPI_GENERATOR_VERSION=$GENERATOR_VERSION $SCRIPT_PATH/bin/openapitools/openapi-generator-cli generate -g python-fastapi \
23 | -i $OPENAPI_SCHEMA -o $OUTPUT_PATH -c $CONFIG_FILE --global-property apiTests=false
24 |
25 |
--------------------------------------------------------------------------------
/scripts/api-start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script starts the uvicorn process listening in port 8080.
4 |
5 | # relative path
6 | SCRIPT_PATH="$(dirname -- "${BASH_SOURCE[0]}")"
7 | PORT=8080
8 | (cd $SCRIPT_PATH/../api/src && uvicorn main:app --host 0.0.0.0 --port $PORT --env-file ../../config/.env.local)
--------------------------------------------------------------------------------
/scripts/gen-config.yaml:
--------------------------------------------------------------------------------
1 | # Documentation, https://openapi-generator.tech/docs/generators/python-fastapi/
2 | additionalProperties:
3 | packageName: feeds_gen
4 | fastapiImplementationPackage: feeds.impl
5 | # Adding this commented line for future reference as it is not currently supported by the fastApi generator
6 | # legacyDiscriminatorBehavior: true
7 |
--------------------------------------------------------------------------------
/scripts/gen-operations-config.yaml:
--------------------------------------------------------------------------------
1 | # Documentation, https://openapi-generator.tech/docs/generators/python-fastapi/
2 | additionalProperties:
3 | packageName: feeds_operations_gen
4 | # modelNameSuffix: Api
5 | removeOperationIdPrefix: true
6 | fastapiImplementationPackage: feeds_operations.impl
7 | useTags: false
8 | # Adding this commented line for future reference as it is not currently supported by the fastApi generator
9 | # legacyDiscriminatorBehavior: true
10 |
--------------------------------------------------------------------------------
/scripts/lint-tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #
4 | # This script checks lint rules based on flake8.
5 | # Usage:
6 | # lint-test.sh
7 | #
8 |
9 | # absolute path
10 | ABS_SCRIPTPATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
11 |
12 | # funtion to execute tests with parameter path
13 | execute_lint() {
14 | printf "\nExecuting lint check in $1\n"
15 | cd $ABS_SCRIPTPATH/$1 || exit 1
16 | pip3 install --disable-pip-version-check virtualenv > /dev/null
17 | python -m virtualenv venv > /dev/null
18 | venv/bin/python -m pip install -r requirements_dev.txt > /dev/null
19 | venv/bin/python -m flake8 && venv/bin/python -m black . --check
20 | if [ $? -ne 0 ]; then
21 | printf "\nError running lint\n"
22 | exit 1
23 | fi
24 | printf "\n"
25 | }
26 |
27 | execute_lint "../api/"
28 | execute_lint "../functions-python/"
29 | execute_lint "../integration-tests/"
30 |
31 |
--------------------------------------------------------------------------------
/scripts/lint-write.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #
4 | # This script write lint rules based on flake8.
5 | # Usage:
6 | # lint-write.sh
7 |
8 | # absolute path
9 | ABS_SCRIPTPATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
10 |
11 | # funtion to execute tests with parameter path
12 | execute_lint() {
13 | printf "\nExecuting lint write in $1\n"
14 | cd $ABS_SCRIPTPATH/$1 || exit 1
15 | pip3 install --disable-pip-version-check virtualenv > /dev/null
16 | python3 -m virtualenv venv > /dev/null
17 | venv/bin/python -m pip install -r requirements_dev.txt > /dev/null
18 | venv/bin/python -m flake8 && venv/bin/python -m black .
19 | if [ $? -ne 0 ]; then
20 | printf "\nError running lint\n"
21 | exit 1
22 | fi
23 | printf "\n"
24 | }
25 |
26 | execute_lint "../api/"
27 | execute_lint "../functions-python/"
28 | execute_lint "../integration-tests/"
29 |
30 |
--------------------------------------------------------------------------------
/scripts/local_docker/Dockerfile:
--------------------------------------------------------------------------------
1 | # This docker is used to test locally scripts that are executed in the CI.
2 | # How to run the container and have shell access:
3 | # docker build -t ubuntu-local-python311 .
4 | # docker run -it ubuntu-local-python311
5 |
6 | FROM ubuntu:latest
7 |
8 | # Install prerequisites and add deadsnakes PPA for newer Python versions
9 | RUN apt-get update && apt-get install -y software-properties-common \
10 | && add-apt-repository ppa:deadsnakes/ppa \
11 | && apt-get update
12 |
13 | # Install Python 3.11 and Git
14 | RUN apt-get install -y python3.11 python3.11-dev python3-pip git
15 |
16 | # Optional: update alternatives to make python3.11 the default
17 | RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1
18 |
19 | # Set working directory
20 | WORKDIR /app
21 |
22 | CMD ["bash"]
23 |
--------------------------------------------------------------------------------
/scripts/populate-db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #
4 | # This script populates the local instance of the database.
5 | # As a requirement, you need to have the local instance of the database running on the port defined in config/.env.local
6 | # The csv file containing the data has to be in the same format as https://bit.ly/catalogs-csv
7 | # Usage:
8 | # populate-db.sh [data_type]
9 | #
10 |
11 | # relative path
12 | SCRIPT_PATH="$(dirname -- "${BASH_SOURCE[0]}")"
13 |
14 | # Set the data_type, defaulting to 'gtfs'
15 | DATA_TYPE=${2:-gtfs}
16 |
17 | # Determine the script to run based on the data_type
18 | if [ "$DATA_TYPE" = "gbfs" ]; then
19 | SCRIPT_NAME="populate_db_gbfs.py"
20 | else
21 | SCRIPT_NAME="populate_db_gtfs.py"
22 | fi
23 |
24 | # Run the appropriate script
25 | (cd "$SCRIPT_PATH"/../api/ && pip3 install -r requirements.txt && PYTHONPATH=src python src/scripts/$SCRIPT_NAME --filepath "$1")
26 |
--------------------------------------------------------------------------------
/scripts/setup-openapi-generator.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #
4 | # This script downloads the openapi-generator-cli.sh locally.
5 | # The openapi-generator-cli.sh helps to switch the generator's versions and make the generation process CI-friendly.
6 | # More info, https://github.com/OpenAPITools/openapi-generator/blob/master/bin/utils/openapi-generator-cli.sh
7 | #
8 |
9 | # relative path
10 | SCRIPT_PATH="$(dirname -- "${BASH_SOURCE[0]}")"
11 |
12 | mkdir -p $SCRIPT_PATH/bin/openapitools
13 | curl https://raw.githubusercontent.com/OpenAPITools/openapi-generator/master/bin/utils/openapi-generator-cli.sh > $SCRIPT_PATH/bin/openapitools/openapi-generator-cli
14 | chmod u+x $SCRIPT_PATH/bin/openapitools/openapi-generator-cli
--------------------------------------------------------------------------------
/scripts/tunnel-kill.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | #
5 | # MobilityData 2023
6 | #
7 | # Licensed under the Apache License, Version 2.0 (the "License");
8 | # you may not use this file except in compliance with the License.
9 | # You may obtain a copy of the License at
10 | #
11 | # http://www.apache.org/licenses/LICENSE-2.0
12 | #
13 | # Unless required by applicable law or agreed to in writing, software
14 | # distributed under the License is distributed on an "AS IS" BASIS,
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | # See the License for the specific language governing permissions and
17 | # limitations under the License.
18 | #
19 | #
20 |
21 | #
22 | # This script kills the background process created by the tunnel-create.sh script.
23 | # Usage:
24 | # tunnel-kill.sh
25 | # Notice: This script will kill a process that was started with parameters "ssh -fN -L" and assumes that you didn't create any other ssh tunnel with the mentioned parameters.
26 |
27 | ps aux | grep "[s]sh -o StrictHostKeyChecking=no -fN -L" | awk '{print $2}' | xargs kill -9
28 |
--------------------------------------------------------------------------------
/web-app/.eslintignore:
--------------------------------------------------------------------------------
1 | build/
2 | node_modules/
3 |
--------------------------------------------------------------------------------
/web-app/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "es2021": true
5 | },
6 | "extends": [
7 | "standard-with-typescript",
8 | "plugin:react/recommended",
9 | "eslint:recommended",
10 | "plugin:@typescript-eslint/eslint-recommended",
11 | "plugin:@typescript-eslint/recommended",
12 | "prettier",
13 | "plugin:prettier/recommended"
14 | ],
15 | "parserOptions": {
16 | "ecmaVersion": "latest"
17 | },
18 | "plugins": [
19 | "react",
20 | "@typescript-eslint",
21 | "unused-imports",
22 | "prettier"
23 | ],
24 | "rules": {
25 | "@typescript-eslint/no-unused-vars": "error",
26 | "no-console": "warn",
27 | "unused-imports/no-unused-imports": "error",
28 | "react/react-in-jsx-scope": "off",
29 | "prettier/prettier": "error",
30 | "@typescript-eslint/ban-tslint-comment": "off",
31 | "react/prop-types": "off",
32 | "eqeqeq": "off"
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/web-app/.firebaserc:
--------------------------------------------------------------------------------
1 | {
2 | "projects": {
3 | "default": "mobility-feeds-dev",
4 | "dev": "mobility-feeds-dev",
5 | "qa": "mobility-feeds-qa",
6 | "prod": "mobility-feeds-prod"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/web-app/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # production
12 | /build
13 |
14 | # misc
15 | .DS_Store
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 |
21 | npm-debug.log*
22 | yarn-debug.log*
23 | yarn-error.log*
24 |
25 | .firebase
26 |
27 | # Cypress
28 | cypress/screenshots
29 | cypress/videos
30 |
31 | .env.*
--------------------------------------------------------------------------------
/web-app/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "semi": true,
4 | "jsxSingleQuote": true
5 | }
6 |
--------------------------------------------------------------------------------
/web-app/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [['@babel/preset-env', {targets: {node: 'current'}}, '@babel/preset-typescript']],
3 | };
--------------------------------------------------------------------------------
/web-app/cypress.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'cypress';
2 | import * as dotenv from 'dotenv';
3 | const localEnv = dotenv.config({ path: './src/.env.dev' }).parsed;
4 | const ciEnv = dotenv.config({ path: './src/.env.test' }).parsed;
5 |
6 | const isEnvEmpty = (obj) => {
7 | return Object.keys(obj).length === 0;
8 | };
9 |
10 | const chosenEnv = isEnvEmpty(localEnv) ? ciEnv : localEnv;
11 |
12 | export default defineConfig({
13 | env: chosenEnv,
14 | e2e: {
15 | baseUrl: 'http://localhost:3000',
16 | },
17 | video: true,
18 | });
19 |
--------------------------------------------------------------------------------
/web-app/cypress.env.json.rename_me:
--------------------------------------------------------------------------------
1 | {
2 | "email": {{CYPRESS_EMAIL}},
3 | "currentPassword": {{CYPRESS_PWD}}
4 | }
5 |
--------------------------------------------------------------------------------
/web-app/cypress/e2e/home.cy.ts:
--------------------------------------------------------------------------------
1 | describe('Home page', () => {
2 | beforeEach(() => {
3 | cy.visit('/');
4 | });
5 |
6 | it('should render page header', () => {
7 | cy.get('[data-testid=websiteTile]')
8 | .should('exist')
9 | .contains('Mobility Database');
10 | });
11 |
12 | it('should render home page title', () => {
13 | cy.get('[data-testid=home-title]').should('exist');
14 | });
15 | });
16 |
--------------------------------------------------------------------------------
/web-app/cypress/e2e/resetpassword.cy.ts:
--------------------------------------------------------------------------------
1 | describe('Reset Password Screen', () => {
2 | beforeEach(() => {
3 | cy.visit('/forgot-password');
4 | });
5 |
6 | it('should render components', () => {
7 | cy.get('input[id="email"]').should('exist');
8 | });
9 |
10 | it('should show error when email no email is provided', () => {
11 | cy.get('input[id="email"]').type('not an email', { force: true });
12 | cy.get('[type="submit"]').click();
13 | cy.get('[data-testid=emailError]').should('exist');
14 | });
15 |
16 | it('should show the captcha error when is not accepted', () => {
17 | cy.get('iframe[title="reCAPTCHA"]').should('exist');
18 | cy.get('input[id="email"]').type('notvalid@e.c', { force: true });
19 | cy.get('[type="submit"]').click();
20 | cy.get('[data-testid=reCaptchaError]')
21 | .should('exist')
22 | .contains('You must verify you are not a robot.');
23 | });
24 | });
25 |
--------------------------------------------------------------------------------
/web-app/cypress/e2e/signin.cy.ts:
--------------------------------------------------------------------------------
1 | describe('Sign In page', () => {
2 | beforeEach(() => {
3 | cy.visit('/sign-in');
4 | });
5 |
6 | it('should render page header', () => {
7 | cy.get('[data-testid=websiteTile]')
8 | .should('exist')
9 | .contains('Mobility Database');
10 | });
11 |
12 | it('should render signin', () => {
13 | cy.get('[data-testid=signin]').should('exist');
14 | });
15 | });
16 |
--------------------------------------------------------------------------------
/web-app/cypress/fixtures/feed_test-516.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "test-516",
3 | "data_type": "gtfs",
4 | "status": "active",
5 | "created_at": "2024-02-08T00:00:00Z",
6 | "external_ids": [
7 | {
8 | "external_id": "516",
9 | "source": "test"
10 | }
11 | ],
12 | "provider": "Metropolitan Transit Authority (MTA)",
13 | "feed_name": "NYC Subway",
14 | "note": "",
15 | "feed_contact_email": "",
16 | "source_info": {
17 | "producer_url": "http://web.mta.info/developers/data/nyct/subway/google_transit.zip",
18 | "authentication_type": 0,
19 | "authentication_info_url": "",
20 | "api_key_parameter_name": "",
21 | "license_url": ""
22 | },
23 | "redirects": []
24 | }
--------------------------------------------------------------------------------
/web-app/cypress/support/e2e.ts:
--------------------------------------------------------------------------------
1 | // ***********************************************************
2 | // This example support/e2e.ts is processed and
3 | // loaded automatically before your test files.
4 | //
5 | // This is a great place to put global configuration and
6 | // behavior that modifies Cypress.
7 | //
8 | // You can change the location of this file or turn off
9 | // automatically serving support files with the
10 | // 'supportFile' configuration option.
11 | //
12 | // You can read more here:
13 | // https://on.cypress.io/configuration
14 | // ***********************************************************
15 |
16 | // Import commands.js using ES2015 syntax:
17 | import './commands';
18 |
19 | // Alternatively you can use CommonJS syntax:
20 | // require('./commands');
21 |
--------------------------------------------------------------------------------
/web-app/firebase.json:
--------------------------------------------------------------------------------
1 | {
2 | "hosting": {
3 | "public": "build",
4 | "ignore": [
5 | "firebase.json",
6 | "**/.*",
7 | "**/node_modules/**"
8 | ],
9 | "rewrites": [ {
10 | "source": "**",
11 | "destination": "/index.html"
12 | } ]
13 | },
14 | "emulators": {
15 | "hosting": {
16 | "port": 4040
17 | },
18 | "storage": {
19 | "port": 9199
20 | },
21 | "auth": {
22 | "port": 9099
23 | },
24 | "ui": {
25 | "enabled": true
26 | },
27 | "singleProjectMode": true
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/web-app/jest-global-setup.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is loaded before all tests.
3 | */
4 | const setup = async (): Promise => {
5 | process.env.TZ = 'UTC';
6 | };
7 |
8 | export default setup;
9 |
--------------------------------------------------------------------------------
/web-app/public/assets/MOBILTYDATA_logo_purple_M.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/web-app/public/assets/MOBILTYDATA_logo_purple_M.png
--------------------------------------------------------------------------------
/web-app/public/assets/MOBILTYDATA_logo_purple_M.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/web-app/public/assets/MOBILTYDATA_logo_purple_M.webp
--------------------------------------------------------------------------------
/web-app/public/assets/rocket.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/web-app/public/assets/rocket.gif
--------------------------------------------------------------------------------
/web-app/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MobilityData/mobility-feed-api/963c7a22760545298b63cc010e9e3125e545d572/web-app/public/favicon.ico
--------------------------------------------------------------------------------
/web-app/public/locales/en/contactUs.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Contact Us",
3 | "email": {
4 | "title": "Email Us",
5 | "description": "For general inquiries regarding the Mobility Database API, please email us at"
6 | },
7 | "slack": {
8 | "title": "Join our Slack",
9 | "description": "Join the MobilityData Slack channel to ask questions, share feedback, and connect with others",
10 | "action": "Join Slack"
11 | },
12 | "contribute": {
13 | "title": "Contribute",
14 | "description": "Help us improve the Mobility Database by contributing to our open-source projects on GitHub",
15 | "action": "View on GitHub"
16 | },
17 | "addFeeds": {
18 | "title": "Add Feeds",
19 | "description": "Looking to add many feeds? You can contribute by heading over to our GitHub catalog repository",
20 | "action": "View Catalogs Repository"
21 | }
22 |
23 | }
--------------------------------------------------------------------------------
/web-app/public/locales/fr/common.json:
--------------------------------------------------------------------------------
1 | {
2 | "copyToClipboard": "Copier dans le presse-papiers",
3 | "copied": "Copié!",
4 | "name": "Nom",
5 | "email": "E-mail",
6 | "organization": "Organisation",
7 | "signOut": "Déconnexion",
8 | "unknown": "Inconnu"
9 | }
10 |
--------------------------------------------------------------------------------
/web-app/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | }
10 | ],
11 | "start_url": ".",
12 | "display": "standalone",
13 | "theme_color": "#000000",
14 | "background_color": "#ffffff"
15 | }
16 |
--------------------------------------------------------------------------------
/web-app/public/robots.staging.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 | Disallow: /
3 |
--------------------------------------------------------------------------------
/web-app/public/robots.txt:
--------------------------------------------------------------------------------
1 | User-agent: *
2 | Allow: /
3 | Sitemap: https://mobilitydatabase.org/sitemap.xml
--------------------------------------------------------------------------------
/web-app/src/.env.rename_me:
--------------------------------------------------------------------------------
1 | DISABLE_ESLINT_PLUGIN=true
2 | REACT_APP_FIREBASE_API_KEY={{REACT_APP_FIREBASE_API_KEY}}
3 | REACT_APP_FIREBASE_AUTH_DOMAIN={{REACT_APP_FIREBASE_AUTH_DOMAIN}}
4 | REACT_APP_FIREBASE_PROJECT_ID={{REACT_APP_FIREBASE_PROJECT_ID}}
5 | REACT_APP_FIREBASE_STORAGE_BUCKET={{REACT_APP_FIREBASE_STORAGE_BUCKET}}
6 | REACT_APP_FIREBASE_MESSAGING_SENDER_ID={{REACT_APP_FIREBASE_MESSAGING_SENDER_ID}}
7 | REACT_APP_FIREBASE_APP_ID={{REACT_APP_FIREBASE_APP_ID}}
8 | REACT_APP_RECAPTCHA_SITE_KEY={{REACT_APP_RECAPTCHA_SITE_KEY}}
9 | REACT_APP_GOOGLE_ANALYTICS_ID={{REACT_APP_GOOGLE_ANALYTICS_ID}}
10 | REACT_APP_REMOTE_CONFIG_MINIMUM_FETCH_INTERVAL_MILLI={{REACT_APP_REMOTE_CONFIG_MINIMUM_FETCH_INTERVAL_MILLI}}
11 | REACT_APP_FEED_API_BASE_URL={{REACT_APP_FEED_API_BASE_URL}}
12 |
--------------------------------------------------------------------------------
/web-app/src/.env.test:
--------------------------------------------------------------------------------
1 | DISABLE_ESLINT_PLUGIN=true
2 | REACT_APP_FIREBASE_API_KEY="REACT_APP_FIREBASE_API_KEY"
3 | REACT_APP_FIREBASE_AUTH_DOMAIN={{REACT_APP_FIREBASE_AUTH_DOMAIN}}
4 | REACT_APP_FIREBASE_PROJECT_ID="REACT_APP_FIREBASE_PROJECT_ID"
5 | REACT_APP_FIREBASE_STORAGE_BUCKET="REACT_APP_FIREBASE_STORAGE_BUCKET"
6 | REACT_APP_FIREBASE_MESSAGING_SENDER_ID="REACT_APP_FIREBASE_MESSAGING_SENDER_ID"
7 | REACT_APP_FIREBASE_APP_ID="REACT_APP_FIREBASE_APP_ID"
8 | REACT_APP_RECAPTCHA_SITE_KEY="{{REACT_APP_RECAPTCHA_SITE_KEY}}"
9 | REACT_APP_REMOTE_CONFIG_MINIMUM_FETCH_INTERVAL_MILLI="300000"
10 |
--------------------------------------------------------------------------------
/web-app/src/app/App.css:
--------------------------------------------------------------------------------
1 | .App {
2 | text-align: center;
3 | }
4 |
5 | .App-logo {
6 | height: 40vmin;
7 | pointer-events: none;
8 | }
9 |
10 | @media (prefers-reduced-motion: no-preference) {
11 | .App-logo {
12 | animation: App-logo-spin infinite 20s linear;
13 | }
14 | }
15 |
16 | .App-header {
17 | background-color: #282c34;
18 | min-height: 100vh;
19 | display: flex;
20 | flex-direction: column;
21 | align-items: center;
22 | justify-content: center;
23 | font-size: calc(10px + 2vmin);
24 | color: white;
25 | }
26 |
27 | .App-link {
28 | color: #61dafb;
29 | }
30 |
31 | @keyframes App-logo-spin {
32 | from {
33 | transform: rotate(0deg);
34 | }
35 | to {
36 | transform: rotate(360deg);
37 | }
38 | }
39 |
40 | #app-main-container {
41 | position: relative;
42 | min-height: 100vh;
43 | padding-bottom: 230px; /* footer space */
44 | box-sizing: border-box;
45 | }
46 |
--------------------------------------------------------------------------------
/web-app/src/app/components/ThemeToggle.tsx:
--------------------------------------------------------------------------------
1 | import { IconButton } from '@mui/material';
2 | import Brightness4Icon from '@mui/icons-material/Brightness4';
3 | import Brightness7Icon from '@mui/icons-material/Brightness7';
4 | import { useTheme } from '../context/ThemeProvider';
5 |
6 | const ThemeToggle = (): JSX.Element => {
7 | const { toggleTheme } = useTheme();
8 |
9 | return (
10 |
11 | {localStorage.getItem('theme') === 'dark' ? (
12 |
13 | ) : (
14 |
15 | )}
16 |
17 | );
18 | };
19 |
20 | export default ThemeToggle;
21 |
--------------------------------------------------------------------------------
/web-app/src/app/components/WarningContentBox.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { Box, Typography, colors } from '@mui/material';
3 | import { ContentBox } from './ContentBox';
4 | import { WarningAmberOutlined } from '@mui/icons-material';
5 |
6 | export const WarningContentBox = (
7 | props: React.PropsWithChildren,
8 | ): JSX.Element => {
9 | return (
10 |
17 |
18 |
19 | {props.children}
20 |
21 |
22 | );
23 | };
24 |
--------------------------------------------------------------------------------
/web-app/src/app/constants/Navigation.spec.ts:
--------------------------------------------------------------------------------
1 | import {
2 | defaultRemoteConfigValues,
3 | type RemoteConfigValues,
4 | } from '../interface/RemoteConfig';
5 | import { buildNavigationItems } from './Navigation';
6 |
7 | jest.mock('firebase/compat/app', () => ({
8 | initializeApp: jest.fn(),
9 | remoteConfig: jest.fn(() => ({
10 | settings: { minimumFetchIntervalMillis: 3600000 },
11 | })),
12 | }));
13 |
14 | describe('Navigation Elements', () => {
15 | it('should return feed nav item if feature flag enabled', () => {
16 | const featureFlags: RemoteConfigValues = {
17 | ...defaultRemoteConfigValues,
18 | enableFeedsPage: true,
19 | };
20 | const navigationItems = buildNavigationItems(featureFlags);
21 | const feedsNavigation = navigationItems.find(
22 | (item) => item.title === 'Feeds',
23 | );
24 | expect(feedsNavigation).toBeDefined();
25 | });
26 | });
27 |
--------------------------------------------------------------------------------
/web-app/src/app/constants/Validation.tsx:
--------------------------------------------------------------------------------
1 | export const passwordValidationRegex =
2 | /^(?=.*[a-z])(?=.*[A-Z])(?=.*[0-9])(?=.*[$*.[\]{}()?"!@#%&/\\,><':;|_~`-])(?=.{12,})/;
3 |
4 | export const passwordValidationError = (
5 |
6 | Password must
7 |
8 | - Contain at least one uppercase letter
9 | - Contain at least one lowercase letter
10 | - Contain at least one digit
11 | -
12 | Contain at least one special char
13 | {'(^ $ * . [ ] { } ( ) ? " ! @ # % & / \\ , > < \' : ; | _ ~ `)'}
14 |
15 | - Be at least 12 chars long
16 |
17 |
18 | );
19 |
--------------------------------------------------------------------------------
/web-app/src/app/hooks/index.ts:
--------------------------------------------------------------------------------
1 | import {
2 | type TypedUseSelectorHook,
3 | useDispatch,
4 | useSelector,
5 | } from 'react-redux';
6 | import { type RootState, type AppDispatch } from '../store/store';
7 |
8 | // Use throughout your app instead of plain `useDispatch` and `useSelector`
9 | export const useAppDispatch = (): AppDispatch => useDispatch();
10 | export const useAppSelector: TypedUseSelectorHook = useSelector;
11 |
--------------------------------------------------------------------------------
/web-app/src/app/interface/ContextProviderProps.ts:
--------------------------------------------------------------------------------
1 | import { type ReactNode } from 'react';
2 |
3 | interface ContextProviderProps {
4 | children: ReactNode;
5 | }
6 |
7 | export default ContextProviderProps;
8 |
--------------------------------------------------------------------------------
/web-app/src/app/interface/Navigation.ts:
--------------------------------------------------------------------------------
1 | export default interface NavigationItem {
2 | title: string;
3 | color: string;
4 | target: string;
5 | external?: boolean;
6 | }
7 |
--------------------------------------------------------------------------------
/web-app/src/app/screens/Analytics/analytics.css:
--------------------------------------------------------------------------------
1 | .navigable-list-item:hover {
2 | text-decoration: underline;
3 | color: #000;
4 | }
5 |
6 | .navigable-list-item{
7 | cursor: pointer;
8 | }
9 |
10 | .notice-severity-label {
11 | border-radius: 5px;
12 | padding: 5px;
13 | margin-left: 5px;
14 | margin-bottom: 2px;
15 | width: fit-content;
16 |
17 | }
18 |
19 | .notice-severity-label.notice-severity-error {
20 | background-color: #d54402;
21 | color: white;
22 | }
23 |
24 | .notice-severity-label.notice-severity-warning {
25 | background-color: #f3c280;
26 | color: black;
27 | }
28 |
29 | .notice-severity-label.notice-severity-info {
30 | background-color: #badfb7;
31 | color: black;
32 | }
33 |
--------------------------------------------------------------------------------
/web-app/src/app/screens/FeedSubmission/Form/components/FormLabelDescription.tsx:
--------------------------------------------------------------------------------
1 | import { Typography } from '@mui/material';
2 | import { type ReactNode } from 'react';
3 |
4 | interface FormLabelDescriptionProps {
5 | children: ReactNode;
6 | }
7 |
8 | const FormLabelDescription: React.FC = ({
9 | children,
10 | }) => {
11 | return (
12 |
13 | {children}
14 |
15 | );
16 | };
17 |
18 | export default FormLabelDescription;
19 |
--------------------------------------------------------------------------------
/web-app/src/app/services/feeds/add-feed-form-service.ts:
--------------------------------------------------------------------------------
1 | import { getFunctions, httpsCallable } from 'firebase/functions';
2 | import { app } from '../../../firebase';
3 | import { type FeedSubmissionFormFormInput } from '../../screens/FeedSubmission/Form';
4 |
5 | export const submitNewFeedForm = async (
6 | formData: FeedSubmissionFormFormInput,
7 | ): Promise => {
8 | const functions = getFunctions(app, 'northamerica-northeast1');
9 | const writeToSheet = httpsCallable(functions, 'writeToSheet');
10 | await writeToSheet(formData);
11 | };
12 |
--------------------------------------------------------------------------------
/web-app/src/app/services/index.ts:
--------------------------------------------------------------------------------
1 | export * from './profile-service';
2 |
--------------------------------------------------------------------------------
/web-app/src/app/store/feeds-selectors.ts:
--------------------------------------------------------------------------------
1 | import { type AllFeedsType } from '../services/feeds/utils';
2 | import { type FeedStatus } from '../types';
3 | import { type RootState } from './store';
4 |
5 | export const selectFeedsData = (state: RootState): AllFeedsType | undefined => {
6 | return state.feeds.data;
7 | };
8 |
9 | export const selectFeedsStatus = (state: RootState): FeedStatus => {
10 | return state.feeds.status;
11 | };
12 |
--------------------------------------------------------------------------------
/web-app/src/app/store/gbfs-analytics-selector.ts:
--------------------------------------------------------------------------------
1 | import { createSelector } from '@reduxjs/toolkit';
2 | import { type RootState } from './store';
3 | import { type GBFSFeedMetrics } from '../screens/Analytics/types';
4 |
5 | // Selector to get the GBFS feed metrics
6 | export const selectGBFSFeedMetrics = (state: RootState): GBFSFeedMetrics[] =>
7 | state.gbfsAnalytics.feedMetrics;
8 |
9 | // Selector to get the status of the GBFS analytics
10 | export const selectGBFSAnalyticsStatus = (
11 | state: RootState,
12 | ): 'loading' | 'loaded' | 'failed' => state.gbfsAnalytics.status;
13 |
14 | // Selector to get any error messages from GBFS analytics
15 | export const selectGBFSAnalyticsError = (
16 | state: RootState,
17 | ): string | undefined => state.gbfsAnalytics.error;
18 |
19 | // Selector to get the list of available analytics files
20 | export const selectAvailableGBFSFiles = createSelector(
21 | (state: RootState) => state.gbfsAnalytics.availableFiles,
22 | (availableFiles) => availableFiles,
23 | );
24 |
25 | // Selector to get the currently selected file
26 | export const selectSelectedGBFSFile = createSelector(
27 | (state: RootState) => state.gbfsAnalytics.selectedFile,
28 | (selectedFile) => selectedFile,
29 | );
30 |
--------------------------------------------------------------------------------
/web-app/src/app/store/gtfs-analytics-selector.ts:
--------------------------------------------------------------------------------
1 | import { type RootState } from './store';
2 | import { type GTFSFeedMetrics } from '../screens/Analytics/types';
3 |
4 | export const selectGTFSFeedMetrics = (state: RootState): GTFSFeedMetrics[] =>
5 | state.gtfsAnalytics.feedMetrics;
6 | export const selectGTFSAnalyticsStatus = (state: RootState): string =>
7 | state.gtfsAnalytics.status;
8 | export const selectGTFSAnalyticsError = (
9 | state: RootState,
10 | ): string | undefined => state.gtfsAnalytics.error;
11 |
--------------------------------------------------------------------------------
/web-app/src/app/store/reducers.ts:
--------------------------------------------------------------------------------
1 | import { combineReducers } from 'redux';
2 | import profileReducer from './profile-reducer';
3 | import feedReducer from './feed-reducer';
4 | import datasetReducer from './dataset-reducer';
5 | import feedsReducer from './feeds-reducer';
6 | import GTFSAnalyticsReducer from './gtfs-analytics-reducer';
7 | import GBFSAnalyticsReducer from './gbfs-analytics-reducer';
8 |
9 | const rootReducer = combineReducers({
10 | userProfile: profileReducer,
11 | feedProfile: feedReducer,
12 | dataset: datasetReducer,
13 | feeds: feedsReducer,
14 | gtfsAnalytics: GTFSAnalyticsReducer,
15 | gbfsAnalytics: GBFSAnalyticsReducer,
16 | });
17 |
18 | export default rootReducer;
19 |
--------------------------------------------------------------------------------
/web-app/src/app/store/saga/root-saga.ts:
--------------------------------------------------------------------------------
1 | import { all } from 'redux-saga/effects';
2 | import { watchAuth } from './auth-saga';
3 | import { watchProfile } from './profile-saga';
4 | import { watchFeed } from './feed-saga';
5 | import { watchDataset } from './dataset-saga';
6 | import { watchFeeds } from './feeds-saga';
7 | import { watchGTFSFetchFeedMetrics } from './gtfs-analytics-saga';
8 | import { watchGBFSFetchFeedMetrics } from './gbfs-analytics-saga';
9 |
10 | const rootSaga = function* (): Generator {
11 | yield all([
12 | watchAuth(),
13 | watchProfile(),
14 | watchFeed(),
15 | watchDataset(),
16 | watchFeeds(),
17 | watchGTFSFetchFeedMetrics(),
18 | watchGBFSFetchFeedMetrics(),
19 | ]);
20 | };
21 |
22 | export default rootSaga;
23 |
--------------------------------------------------------------------------------
/web-app/src/app/store/selectors.ts:
--------------------------------------------------------------------------------
1 | import { type RootState } from './store';
2 |
3 | export * from './profile-selectors';
4 | export * from './feed-selectors';
5 | export * from './dataset-selectors';
6 |
7 | export const selectLoadingApp = (state: RootState): boolean => {
8 | return (
9 | state.userProfile.status === 'login_in' ||
10 | state.userProfile.status === 'registering' ||
11 | state.userProfile.status === 'login_out' ||
12 | state.userProfile.status === 'sign_up' ||
13 | state.userProfile.isAppRefreshing
14 | );
15 | };
16 |
--------------------------------------------------------------------------------
/web-app/src/app/styles/Account.css:
--------------------------------------------------------------------------------
1 | #code-block {
2 | font-family: monospace;
3 | background-color: #121c2d;
4 | color: #d7deea;
5 | padding: 20px;
6 | border-radius: 5px;
7 | margin-top: 20px;
8 | word-wrap: break-word;
9 | }
10 |
11 | #code-block-content {
12 | font-family: monospace;
13 | word-wrap: break-word;
14 | width: 60vw;
15 | }
16 |
17 | .token-display-element {
18 | display: inline-block;
19 | margin: 5px;
20 | padding: 5px;
21 | border-radius: 5px;
22 | display: flex;
23 | justify-content: space-between;
24 | align-items: center;
25 | padding: 0 15px;
26 | border-radius: 2px;
27 | word-wrap: break-word;
28 | width: 100%;
29 | max-width: 610px;
30 | }
31 |
32 | .token-action-buttons {
33 | align-self: baseline;
34 | }
35 |
--------------------------------------------------------------------------------
/web-app/src/app/styles/Footer.css:
--------------------------------------------------------------------------------
1 | .footer {
2 | width: 100%;
3 | text-align: center;
4 | padding: 40px 8px;
5 | font-size: 13px;
6 | box-sizing: border-box;
7 | min-height: 210px;
8 | position: absolute;
9 | bottom: 0;
10 | }
11 |
12 | .link-button:hover {
13 | color: white !important;
14 | background-color: #3959fa !important;
15 | }
16 |
17 | .link-button:hover svg path {
18 | fill: white;
19 | }
--------------------------------------------------------------------------------
/web-app/src/app/styles/PageHeader.style.ts:
--------------------------------------------------------------------------------
1 | import { styled, Typography, type TypographyProps } from '@mui/material';
2 |
3 | export const MainPageHeader = styled(Typography)({
4 | fontWeight: 700,
5 | });
6 |
7 | MainPageHeader.defaultProps = {
8 | variant: 'h4',
9 | color: 'primary',
10 | component: 'h1',
11 | };
12 |
--------------------------------------------------------------------------------
/web-app/src/app/styles/PageLayout.style.ts:
--------------------------------------------------------------------------------
1 | import { Container, styled } from '@mui/material';
2 |
3 | export const ColoredContainer = styled(Container)(({ theme }) => ({
4 | background: theme.palette.background.paper,
5 | borderRadius: '6px',
6 | paddingTop: theme.spacing(3),
7 | paddingBottom: theme.spacing(3),
8 | }));
9 |
--------------------------------------------------------------------------------
/web-app/src/app/styles/VerificationBadge.styles.ts:
--------------------------------------------------------------------------------
1 | import { type Theme } from '@mui/material';
2 | import { type SystemStyleObject } from '@mui/system';
3 |
4 | export const verificationBadgeStyle = (
5 | theme: Theme,
6 | ): SystemStyleObject => ({
7 | background: `linear-gradient(25deg, ${theme.palette.primary.light}, ${theme.palette.primary.dark})`,
8 | color: 'white',
9 | });
10 |
--------------------------------------------------------------------------------
/web-app/src/app/utils/config.spec.ts:
--------------------------------------------------------------------------------
1 | import { getEnvConfig } from './config';
2 |
3 | describe('getEnvConfig', () => {
4 | describe('valid env variable', () => {
5 | const originalEnv = process.env;
6 | beforeEach(() => {
7 | jest.resetModules();
8 | process.env = {
9 | ...originalEnv,
10 | REACT_APP_GOOGLE_ANALYTICS_ID: ' This is the value ',
11 | };
12 | });
13 |
14 | it('should return the environment variable value if it is set and trimmed', () => {
15 | expect(getEnvConfig('REACT_APP_GOOGLE_ANALYTICS_ID')).toEqual(
16 | 'This is the value',
17 | );
18 | });
19 | });
20 |
21 | describe('placeholder env variable', () => {
22 | const originalEnv = process.env;
23 | beforeEach(() => {
24 | jest.resetModules();
25 | process.env = {
26 | ...originalEnv,
27 | REACT_APP_GOOGLE_ANALYTICS_ID: '{{REACT_APP_GOOGLE_ANALYTICS_ID}}',
28 | };
29 | });
30 |
31 | it('should return an empty string if the value is a placeholder', () => {
32 | expect(getEnvConfig('REACT_APP_GOOGLE_ANALYTICS_ID')).toEqual('');
33 | });
34 | });
35 | });
36 |
--------------------------------------------------------------------------------
/web-app/src/app/utils/config.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Get environment config.
3 | * If the value is a placeholder means the value was not set properly, return an empty string
4 | * @param key variable key
5 | * @returns variable value or empty string
6 | */
7 | export const getEnvConfig = (key: string): string => {
8 | const value = process.env[key] ?? '';
9 | return value === '{{' + key + '}}' ? '' : value.trim();
10 | };
11 |
--------------------------------------------------------------------------------
/web-app/src/app/utils/error.ts:
--------------------------------------------------------------------------------
1 | import { type FeedError, type ProfileError } from '../types';
2 | import { FirebaseError } from '@firebase/util';
3 |
4 | export const getAppError = (error: unknown): ProfileError | FeedError => {
5 | const appError: ProfileError | FeedError = {
6 | code: 'unknown',
7 | message: 'Unknown error',
8 | };
9 | if (error instanceof FirebaseError) {
10 | appError.code = error.code;
11 | let message = error.message;
12 | if (error.message.startsWith('Firebase: ')) {
13 | message = error.message.substring('Firebase: '.length);
14 | }
15 | appError.message = message;
16 | } else {
17 | appError.message = `${error as string}`;
18 | }
19 | return appError;
20 | };
21 |
--------------------------------------------------------------------------------
/web-app/src/firebase.ts:
--------------------------------------------------------------------------------
1 | import firebase from 'firebase/compat/app';
2 | import 'firebase/compat/remote-config';
3 | import 'firebase/compat/auth';
4 |
5 | const firebaseConfig = {
6 | apiKey: process.env.REACT_APP_FIREBASE_API_KEY,
7 | authDomain: process.env.REACT_APP_FIREBASE_AUTH_DOMAIN,
8 | projectId: process.env.REACT_APP_FIREBASE_PROJECT_ID,
9 | storageBucket: process.env.REACT_APP_FIREBASE_STORAGE_BUCKET,
10 | messagingSenderId: process.env.REACT_APP_FIREBASE_MESSAGING_SENDER_ID,
11 | appId: process.env.REACT_APP_FIREBASE_APP_ID,
12 | };
13 |
14 | export const app = firebase.initializeApp(firebaseConfig);
15 | export const remoteConfig = firebase.remoteConfig();
16 | remoteConfig.settings.minimumFetchIntervalMillis = Number(
17 | process.env.REACT_APP_REMOTE_CONFIG_MINIMUM_FETCH_INTERVAL_MILLI ?? 3600000, // default to 12 hours
18 | );
19 |
20 | if (window.Cypress) {
21 | app.auth().useEmulator('http://localhost:9099/');
22 | }
23 |
--------------------------------------------------------------------------------
/web-app/src/index.css:
--------------------------------------------------------------------------------
1 | body {
2 | margin: 0;
3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen',
4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue',
5 | sans-serif;
6 | -webkit-font-smoothing: antialiased;
7 | -moz-osx-font-smoothing: grayscale;
8 | }
9 |
10 | code {
11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New',
12 | monospace;
13 | }
14 |
--------------------------------------------------------------------------------
/web-app/src/index.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom/client';
3 | import './index.css';
4 | import App from './app/App';
5 | import ReactGA from 'react-ga4';
6 | import { getEnvConfig } from './app/utils/config';
7 | import ContextProviders from './app/components/Context';
8 | import { CssBaseline } from '@mui/material';
9 | import { ThemeProvider } from './app/context/ThemeProvider';
10 |
11 | const gaId = getEnvConfig('REACT_APP_GOOGLE_ANALYTICS_ID');
12 | if (gaId.length > 0) {
13 | ReactGA.initialize(gaId);
14 | ReactGA.send('pageview');
15 | }
16 |
17 | const root = ReactDOM.createRoot(
18 | document.getElementById('root') as HTMLElement,
19 | );
20 | root.render(
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 | );
30 |
--------------------------------------------------------------------------------
/web-app/src/react-app-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/web-app/src/setupTests.ts:
--------------------------------------------------------------------------------
1 | // jest-dom adds custom jest matchers for asserting on DOM nodes.
2 | // allows you to do things like:
3 | // expect(element).toHaveTextContent(/react/i)
4 | // learn more: https://github.com/testing-library/jest-dom
5 | import '@testing-library/jest-dom';
6 |
7 | jest.mock('leaflet/dist/leaflet.css', () => ({}));
8 | jest.mock('react-leaflet', () => ({}));
9 |
10 | jest.mock('react-i18next', () => ({
11 | // this mock makes sure any components using the translate hook can use it without a warning being shown
12 | useTranslation: () => {
13 | return {
14 | t: (str: string) => str,
15 | i18n: {
16 | changeLanguage: () => new Promise(() => {}),
17 | },
18 | };
19 | },
20 | initReactI18next: {
21 | type: '3rdParty',
22 | init: () => {},
23 | },
24 | }));
25 |
--------------------------------------------------------------------------------
/web-app/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": [
5 | "dom",
6 | "dom.iterable",
7 | "esnext"
8 | ],
9 | "allowJs": true,
10 | "skipLibCheck": true,
11 | "esModuleInterop": true,
12 | "allowSyntheticDefaultImports": true,
13 | "strict": true,
14 | "forceConsistentCasingInFileNames": true,
15 | "noFallthroughCasesInSwitch": true,
16 | "module": "esnext",
17 | "moduleResolution": "node",
18 | "resolveJsonModule": true,
19 | "isolatedModules": true,
20 | "noEmit": true,
21 | "jsx": "react-jsx",
22 | "types": ["node", "jest", "cypress"],
23 | "paths": {
24 | "react": [ "./node_modules/@types/react" ]
25 | }
26 | },
27 | "include": [
28 | "src",
29 | "cypress/**/*.ts",
30 | "jest-global-setup.ts"
31 | ],
32 | }
33 |
--------------------------------------------------------------------------------