├── .circleci └── config.yml ├── .github ├── ISSUE_TEMPLATE │ ├── api-access-python-questions---concerns.md │ ├── bug_report.md │ ├── config.yml │ └── feature_request.md └── workflows │ └── python-publish.yml ├── .gitignore ├── LICENSE ├── README.md ├── conftest.py ├── doc └── images │ ├── 1.1.1.png │ ├── 1.1.2.png │ ├── 1.1.3.png │ ├── 1.1.4.png │ ├── 1.4.1.png │ ├── 1.4.2.png │ ├── 1.5.1.png │ ├── 1.5.2.png │ ├── 2.2.1.png │ ├── 2.2.2.png │ ├── 2.2.3.png │ ├── 2.3.1.png │ ├── 3.1.1.png │ ├── 3.1.2.png │ ├── 3.1.3.png │ ├── 3.2.1.png │ ├── 4.4.1.png │ ├── 4.4.2.png │ ├── 5.2.1.png │ ├── 5.2.2.png │ ├── 6.1.2.png │ ├── mac_2.1.png │ ├── mac_2.2.png │ └── mac_2.3.png ├── extra_test_requires.txt ├── firststreet ├── __init__.py ├── __main__.py ├── api │ ├── __init__.py │ ├── adaptation.py │ ├── api.py │ ├── csv_format.py │ ├── economic.py │ ├── environmental.py │ ├── fema.py │ ├── historic.py │ ├── location.py │ ├── probability.py │ └── tile.py ├── errors.py ├── http_util.py ├── models │ ├── __init__.py │ ├── adaptation.py │ ├── api.py │ ├── economic.py │ ├── environmental.py │ ├── fema.py │ ├── geometry.py │ ├── historic.py │ ├── location.py │ ├── probability.py │ └── tile.py └── util.py ├── pytest.ini ├── requirements.txt ├── setup.py └── tests ├── api ├── test_adaptation.py ├── test_common.py ├── test_economic.py ├── test_environmental.py ├── test_fema.py ├── test_historic.py ├── test_location.py ├── test_probability.py └── test_tiles.py ├── test_api.py ├── test_command_line.bat ├── test_full.py └── test_http.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | workflows: 4 | version: 2 5 | test: 6 | jobs: 7 | - test-3.7 8 | - test-3.8 9 | - test-3.9 10 | 11 | jobs: 12 | test-3.7: &test-template 13 | docker: 14 | - image: circleci/python:3.7 15 | 16 | working_directory: ~/fsf-api-access-python 17 | 18 | steps: 19 | - checkout 20 | 21 | - restore_cache: 22 | keys: 23 | - v1-dependencies-{{ checksum "requirements.txt" }} 24 | 25 | - run: 26 | name: install dependencies 27 | command: | 28 | python3 -m venv venv 29 | . venv/bin/activate 30 | pip install -e .[testing] 31 | 32 | - save_cache: 33 | paths: 34 | - ./venv 35 | key: v1-dependencies-{{ checksum "requirements.txt" }} 36 | 37 | - run: 38 | name: run tests 39 | command: | 40 | . venv/bin/activate 41 | pytest tests 42 | 43 | test-3.8: 44 | <<: *test-template 45 | docker: 46 | - image: circleci/python:3.8 47 | 48 | test-3.9: 49 | <<: *test-template 50 | docker: 51 | - image: circleci/python:3.9 52 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/api-access-python-questions---concerns.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: API Access Python Questions / Concerns 3 | about: If you have any general questions for this GitHub Project, create a report 4 | for us to take a look into 5 | title: '' 6 | labels: question 7 | assignees: '' 8 | 9 | --- 10 | 11 | 12 | 13 | **Question:** 14 | Questions regarding the API Access Library 15 | 16 | **Additional Information:** 17 | Python Version: 3.X 18 | 19 | OS: Windows/Mac/Linux/Other 20 | 21 | 22 | Fsf-api-access-python Version: 23 | 24 | **Screenshots:** 25 | If applicable, add screenshots to help explain your problem. Drag and drop images to the box below. 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: If you found a bug, create an issue for us to take a look into 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **Expected behaviour:** 13 | A clear and concise description of what you expected to happen. 14 | 15 | **Current behaviour:** 16 | A clear and concise description of what you currently experience. 17 | 18 | **Python snippet or command-line used:** 19 | 20 | A snippet of the python code calling the library, or the command called in the terminal 21 | 22 | **Additional Information:** 23 | Python Version: 3.X 24 | 25 | 26 | Fsf-api-access-python Version: 27 | 28 | **Screenshots:** 29 | If applicable, add screenshots to help explain your problem. Drag and drop images to the box below. 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **Is your feature request related to a problem? Please describe:** 13 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 14 | 15 | **Describe the solution you'd like:** 16 | A clear and concise description of what you want to happen. 17 | 18 | **Describe alternatives you've considered:** 19 | A clear and concise description of any alternative solutions or features you've considered. 20 | 21 | **Additional context:** 22 | Add any other context or screenshots about the feature request here. 23 | 24 | **Screenshots:** 25 | If applicable, add screenshots to help explain your problem. Drag and drop images to the box below. 26 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package On Release 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | deploy: 12 | 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@main 17 | - name: Set up Python 3.6 18 | uses: actions/setup-python@v1 19 | with: 20 | python-version: '3.6' 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install setuptools wheel twine 25 | - name: Build and publish 26 | env: 27 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 28 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 29 | run: | 30 | python setup.py sdist bdist_wheel 31 | twine upload --repository-url https://test.pypi.org/legacy/ dist/* 32 | twine upload dist/* 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # general things to ignore 2 | build/ 3 | dist/ 4 | docs/_sources/ 5 | docs/.doctrees 6 | .eggs/ 7 | *.egg-info/ 8 | *.egg 9 | *.py[cod] 10 | *.__pycache__/ 11 | *.so 12 | *~ 13 | .idea 14 | 15 | # virtualenv 16 | env/ 17 | venv/ 18 | 19 | # codecov / coverage 20 | .coverage 21 | cov_* 22 | coverage.xml 23 | 24 | # due to using and pytest 25 | .cache 26 | .pytest_cache/ 27 | .python-version 28 | pip 29 | .mypy_cache/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 First Street Foundation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # First Street Foundation API Access Documentation (Python 3.7+) 2 | [![CircleCI](https://img.shields.io/circleci/build/gh/FirstStreet/fsf_api_access_python)](https://circleci.com/gh/FirstStreet/fsf_api_access_python) 3 | ![GitHub](https://img.shields.io/github/license/firststreet/fsf_api_access_python) 4 | 5 | The First Street Foundation API Access (Python) is a wrapper used to bulk extract flood data from the First Street Foundation API 6 | 7 | **Notice:** This API wrapper is subject to change. 8 | 9 | Please see the [**wiki pages here**](https://github.com/FirstStreet/fsf_api_access_python/wiki) for the most up to date information. 10 | 11 | If there are any problems with either the wrapper or API, please post an [**issue here**](https://github.com/FirstStreet/fsf_api_access_python/issues). 12 | 13 | ### License 14 | ``` 15 | MIT License 16 | 17 | Copyright (c) 2020 First Street Foundation 18 | 19 | Permission is hereby granted, free of charge, to any person obtaining a copy 20 | of this software and associated documentation files (the "Software"), to deal 21 | in the Software without restriction, including without limitation the rights 22 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 23 | copies of the Software, and to permit persons to whom the Software is 24 | furnished to do so, subject to the following conditions: 25 | 26 | The above copyright notice and this permission notice shall be included in all 27 | copies or substantial portions of the Software. 28 | 29 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 30 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 31 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 32 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 33 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 34 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 35 | SOFTWARE. 36 | ``` 37 | 38 | 39 | [git]: 40 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # External Imports 5 | import pytest 6 | 7 | # Internal Imports 8 | from firststreet import Http 9 | 10 | pytest_plugins = 'aiohttp.pytest_plugin' 11 | 12 | 13 | def pytest_addoption(parser): 14 | """Sets up command line arguments for pytest to skip the stress test 15 | """ 16 | parser.addoption("--runstress", action="store_true", default=False, help="runs the stress test") 17 | 18 | 19 | def pytest_collection_modifyitems(config, items): 20 | """Skips the stress test if the argument is not set 21 | """ 22 | if config.getoption("--runstress"): 23 | return 24 | skip_stress = pytest.mark.skip(reason="need --runstress option to run") 25 | for item in items: 26 | if "stress" in item.keywords: 27 | item.add_marker(skip_stress) 28 | 29 | 30 | @pytest.fixture(scope='session', autouse=True) 31 | def setup_connection(request): 32 | return Http("", 100, 4950, 60) 33 | -------------------------------------------------------------------------------- /doc/images/1.1.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.1.1.png -------------------------------------------------------------------------------- /doc/images/1.1.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.1.2.png -------------------------------------------------------------------------------- /doc/images/1.1.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.1.3.png -------------------------------------------------------------------------------- /doc/images/1.1.4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.1.4.png -------------------------------------------------------------------------------- /doc/images/1.4.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.4.1.png -------------------------------------------------------------------------------- /doc/images/1.4.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.4.2.png -------------------------------------------------------------------------------- /doc/images/1.5.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.5.1.png -------------------------------------------------------------------------------- /doc/images/1.5.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/1.5.2.png -------------------------------------------------------------------------------- /doc/images/2.2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/2.2.1.png -------------------------------------------------------------------------------- /doc/images/2.2.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/2.2.2.png -------------------------------------------------------------------------------- /doc/images/2.2.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/2.2.3.png -------------------------------------------------------------------------------- /doc/images/2.3.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/2.3.1.png -------------------------------------------------------------------------------- /doc/images/3.1.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/3.1.1.png -------------------------------------------------------------------------------- /doc/images/3.1.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/3.1.2.png -------------------------------------------------------------------------------- /doc/images/3.1.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/3.1.3.png -------------------------------------------------------------------------------- /doc/images/3.2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/3.2.1.png -------------------------------------------------------------------------------- /doc/images/4.4.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/4.4.1.png -------------------------------------------------------------------------------- /doc/images/4.4.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/4.4.2.png -------------------------------------------------------------------------------- /doc/images/5.2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/5.2.1.png -------------------------------------------------------------------------------- /doc/images/5.2.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/5.2.2.png -------------------------------------------------------------------------------- /doc/images/6.1.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/6.1.2.png -------------------------------------------------------------------------------- /doc/images/mac_2.1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/mac_2.1.png -------------------------------------------------------------------------------- /doc/images/mac_2.2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/mac_2.2.png -------------------------------------------------------------------------------- /doc/images/mac_2.3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/doc/images/mac_2.3.png -------------------------------------------------------------------------------- /extra_test_requires.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | -------------------------------------------------------------------------------- /firststreet/__init__.py: -------------------------------------------------------------------------------- 1 | """A Python module for interacting with the First Street Foundation API""" 2 | # Author: Kelvin Lai 3 | # Copyright: This module is owned by First Street Foundation 4 | 5 | # Standard Imports 6 | import logging 7 | 8 | # Internal Imports 9 | from firststreet.api.adaptation import Adaptation 10 | from firststreet.api.environmental import Environmental 11 | from firststreet.api.fema import Fema 12 | from firststreet.api.historic import Historic 13 | from firststreet.api.location import Location 14 | from firststreet.api.probability import Probability 15 | from firststreet.api.tile import Tile 16 | from firststreet.api.economic import AAL, AVM, Economic 17 | from firststreet.errors import MissingAPIKeyError 18 | from firststreet.http_util import Http 19 | 20 | 21 | class FirstStreet: 22 | """A FirstStreet allows communication with the First Street Foundation API. This handles constructing and sending 23 | HTTP requests to the First Street Foundation API, and parses any response received into the appropriate object. 24 | 25 | Attributes: 26 | api_key (str): A string specifying the API key. 27 | connection_limit (int): max number of connections to make 28 | rate_limit (int): max number of requests during the period 29 | rate_period (int): period of time for the limit 30 | version (str): The version to call the API with 31 | log (bool): To log the outputs on info level 32 | Example: 33 | ```python 34 | import os 35 | import firststreet 36 | 37 | fs = firststreet.FirstStreet(os.environ['FIRSTSTREET_API_KEY']) 38 | property_summary = fs.data_summary.get_property_by_fsid("450350223646") 39 | ``` 40 | Raises: 41 | MissingAPIError: If the API is not provided 42 | """ 43 | 44 | def __init__(self, api_key=None, connection_limit=100, rate_limit=4990, rate_period=60, version=None, log=True): 45 | 46 | if not api_key: 47 | raise MissingAPIKeyError('Missing API Key.') 48 | 49 | if log: 50 | logging.basicConfig(level=logging.INFO, 51 | format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s') 52 | 53 | self.http = Http(api_key, connection_limit, rate_limit, rate_period, version) 54 | self.location = Location(self.http) 55 | self.probability = Probability(self.http) 56 | self.historic = Historic(self.http) 57 | self.adaptation = Adaptation(self.http) 58 | self.environmental = Environmental(self.http) 59 | self.fema = Fema(self.http) 60 | self.tile = Tile(self.http) 61 | self.aal = AAL(self.http) 62 | self.avm = AVM(self.http) 63 | self.economic = Economic(self.http) 64 | -------------------------------------------------------------------------------- /firststreet/__main__.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import argparse 6 | import ast 7 | import os 8 | import logging 9 | from distutils.util import strtobool 10 | import sys 11 | 12 | # Internal Imports 13 | import firststreet 14 | from firststreet.errors import InvalidArgument 15 | from firststreet.util import read_search_items_from_file 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | repeat = True 21 | 22 | while repeat: 23 | parser = argparse.ArgumentParser(description="Description for my parser") 24 | parser.add_argument("-p", "--product", help="Example: adaptation_detail", required=False) 25 | parser.add_argument("-api_key", "--api_key", required=False) 26 | parser.add_argument("-v", "--version", required=False) 27 | parser.add_argument("-log", "--log", help="Example: False", required=False, default="True") 28 | parser.add_argument("-connection_limit", "--connection_limit", help="Example: 100", 29 | required=False, default="100") 30 | parser.add_argument("-rate_limit", "--rate_limit", help="Example: 4990", required=False, default="4990") 31 | parser.add_argument("-rate_period", "--rate_period", help="Example: 60", required=False, default="60") 32 | parser.add_argument("-o", "--output_dir", help="Example: /output", required=False) 33 | parser.add_argument("-s", "--search_items", help="Example: 28,29", required=False,) 34 | parser.add_argument("-l", "--location_type", help="Example: property", required=False) 35 | parser.add_argument("-y", "--year", required=False) 36 | parser.add_argument("-rp", "--return_period", required=False) 37 | parser.add_argument("-eid", "--event_id", required=False) 38 | parser.add_argument("-e", "--extra_param", required=False) 39 | # deprecated file parameter. Will be removed in a later version 40 | parser.add_argument("-f", "--file", help="Example: ./sample.txt", required=False) 41 | 42 | argument = parser.parse_args() 43 | 44 | # Select product if not provided 45 | if not argument.product: 46 | argument.product = input("Input product (Ex: location.get_detail). Arguments and brackets are not needed: " 47 | ).lower() 48 | 49 | if argument.product not in ['adaptation.get_detail', 50 | 'adaptation.get_summary', 51 | 'adaptation.get_detail_by_location', 52 | 'probability.get_depth', 53 | 'probability.get_chance', 54 | 'probability.get_count_summary', 55 | 'probability.get_cumulative', 56 | 'probability.get_count', 57 | 'historic.get_event', 58 | 'historic.get_summary', 59 | 'historic.get_events_by_location', 60 | 'location.get_detail', 61 | 'location.get_summary', 62 | 'fema.get_nfip', 63 | 'environmental.get_precipitation', 64 | 'tile.get_probability_depth', 65 | 'tile.get_historic_event', 66 | 'aal.get_summary', 67 | 'avm.get_avm', 68 | 'avm.get_provider', 69 | 'economic.get_property_nfip']: 70 | logging.error("Product not found. Please check that the argument" 71 | " provided is correct: {}".format(argument.product)) 72 | input("Press Enter to continue...") 73 | sys.exit() 74 | 75 | if not argument.location_type and (argument.product == 'adaptation.get_summary' 76 | or argument.product == 'adaptation.get_detail_by_location' 77 | or argument.product == 'probability.get_count' 78 | or argument.product == 'historic.get_summary' 79 | or argument.product == 'historic.get_events_by_location' 80 | or argument.product == 'location.get_detail' 81 | or argument.product == 'location.get_summary' 82 | or argument.product == 'fema.get_nfip' 83 | or argument.product == 'aal.get_summary'): 84 | 85 | argument.location_type = input("Input location type (Ex: property): ").lower() 86 | if argument.location_type not in ['property', 'neighborhood', 'city', 'zcta', 87 | 'tract', 'county', 'cd', 'state']: 88 | logging.error("Location type not found. Please check that the argument" 89 | " provided is correct: {}".format(argument.location_type)) 90 | input("Press Enter to continue...") 91 | sys.exit() 92 | 93 | if not argument.year and (argument.product == 'tile.get_probability_depth'): 94 | argument.year = input("Input probability depth tile year: ") 95 | 96 | if not argument.return_period and (argument.product == 'tile.get_probability_depth'): 97 | argument.return_period = input("Input probability depth tile return period: ") 98 | 99 | if not argument.event_id and (argument.product == 'tile.get_historic_event'): 100 | argument.event_id = input("Input historic event tile event id: ") 101 | 102 | if not argument.extra_param and (argument.product == 'aal.get_summary'): 103 | aal_variable = input("Adjust calculator variables (Y/N)? ") 104 | if aal_variable.lower() == "y": 105 | input_params = [] 106 | 107 | calculator_avm = input("Input custom AVM value (Ex: 234212) or leave blank for default: ") 108 | if calculator_avm != '': 109 | input_params += ["avm:{}".format(calculator_avm)] 110 | calculator_depths = input("Input depths list (Ex: [11,12,30]) or leave blank for default: ") 111 | if calculator_depths != '': 112 | input_params += ["depths:{}".format(calculator_depths)] 113 | calculator_basement = input("Input basement boolean (Ex: True/False) or leave blank for default: ") 114 | if calculator_basement != '': 115 | input_params += ["basement:{}".format(calculator_basement)] 116 | calculator_elevation = input("Input floor elevation value: (Ex: 22) or leave blank for default: ") 117 | if calculator_elevation != '': 118 | input_params += ["floorElevation:{}".format(calculator_elevation)] 119 | calculator_units = input("Input number of units: (Ex: 2) or leave blank for default: ") 120 | if calculator_units != '': 121 | input_params += ["units:{}".format(calculator_units)] 122 | calculator_stories = input("Input number of stories: (Ex: 1) or leave blank for default: ") 123 | if calculator_stories != '': 124 | input_params += ["stories:{}".format(calculator_stories)] 125 | 126 | argument.extra_param = ";".join(input_params) 127 | 128 | if not argument.extra_param and (argument.product == 'avm.get_avm'): 129 | avm_provider = input("Adjust provider id (Y/N)? ") 130 | if avm_provider.lower() == "y": 131 | input_params = input("Input provider id (Ex: 2): ") 132 | if input_params != '': 133 | argument.extra_param = "providerid:{}".format(input_params) 134 | 135 | # Try to get the API key either from env var or the parameter 136 | if not argument.api_key: 137 | env_var_name = 'FSF_API_KEY' 138 | try: 139 | api_key = os.environ[env_var_name] 140 | except KeyError: 141 | api_key = input("Input API key: ") 142 | else: 143 | api_key = argument.api_key 144 | 145 | # Input the search item file 146 | if not argument.search_items: 147 | argument.search_items = input("Input file name of search items. Either relative or absolute (Ex: " 148 | "'FSIDs_for_NY.csv', or 'C:\\Users\\test_user\\Documents\\prop_geo_sc.csv'): " 149 | ) 150 | 151 | if not os.path.isfile(argument.search_items): 152 | logging.error("Input file not found, please check if the file is at the path: {}".format( 153 | os.path.abspath(argument.search_items))) 154 | input("Press Enter to continue...") 155 | sys.exit() 156 | 157 | # Adjust the connection variables 158 | if not argument.connection_limit and not argument.rate_limit and not argument.rate_period: 159 | connection_adjust = input("Adjust connection parameters (Y/N)? Defaults to 100 connections, with a rate " 160 | "limit of 4990 calls per 60 seconds: ") 161 | if connection_adjust.lower() == "y": 162 | input_params = input("Input new connection limit (default 100): ") 163 | if input_params != '': 164 | argument.connection_limit = "providerid:{}".format(input_params) 165 | input_params = input("Input new rate limit (default 5000): ") 166 | if input_params != '': 167 | argument.rate_limit = "providerid:{}".format(input_params) 168 | input_params = input("Input new rate period in seconds (default 60): ") 169 | if input_params != '': 170 | argument.rate_period = "providerid:{}".format(input_params) 171 | else: 172 | argument.connection_limit = 100 173 | argument.rate_limit = 4990 174 | argument.rate_period = 60 175 | 176 | # Adjust the output directory 177 | if not argument.output_dir: 178 | connection_adjust = input("Change output directory (Y/N)? Defaults to output_data folder in the " 179 | "current directory: ") 180 | if connection_adjust.lower() == "y": 181 | argument.output_dir = input("Input new output directory location: ") 182 | 183 | # Reads a file or converts search items into a list 184 | search_items = [] 185 | if argument.search_items: 186 | 187 | # If file, read addresses from file 188 | if os.path.isfile(argument.search_items): 189 | search_items = read_search_items_from_file(argument.search_items) 190 | else: 191 | items = argument.search_items.strip().split(";") 192 | if len(items) == 1: 193 | logging.warning("Could not find the file '{}'. Treating the input as a search_item instead. " 194 | "If this is unexpected, check the spelling or path of the input" 195 | .format(argument.search_items)) 196 | for search_item in items: 197 | try: 198 | search_items.append(ast.literal_eval(search_item)) 199 | except (SyntaxError, ValueError): 200 | search_items.append(search_item) 201 | 202 | if argument.file: 203 | logging.warning("'file' argument deprecated and will be removed. Use `-s path_to_file` instead. " 204 | "Ex: `-s testing/sample.txt`") 205 | search_items += read_search_items_from_file(argument.file) 206 | 207 | # Ensure there is at least a product and search item 208 | if search_items: 209 | 210 | limit = int(argument.connection_limit) 211 | rate_limit = int(argument.rate_limit) 212 | rate_period = int(argument.rate_period) 213 | 214 | formatted_params = {} 215 | 216 | if argument.extra_param: 217 | for element in argument.extra_param.split(";"): 218 | key, value = element.split(":") 219 | formatted_params[key] = ast.literal_eval(value) 220 | 221 | fs = firststreet.FirstStreet(api_key, 222 | version=argument.version, 223 | connection_limit=limit, 224 | rate_limit=rate_limit, 225 | rate_period=rate_period, 226 | log=bool(strtobool(argument.log))) 227 | 228 | # Set to lower for case insensitive 229 | argument.product = argument.product.lower() 230 | 231 | try: 232 | if argument.product == 'adaptation.get_detail': 233 | fs.adaptation.get_detail(search_items, 234 | csv=True, 235 | output_dir=argument.output_dir, 236 | extra_param=formatted_params) 237 | 238 | elif argument.product == 'adaptation.get_summary': 239 | fs.adaptation.get_summary(search_items, 240 | argument.location_type, 241 | csv=True, 242 | output_dir=argument.output_dir, 243 | extra_param=formatted_params) 244 | 245 | elif argument.product == 'adaptation.get_detail_by_location': 246 | fs.adaptation.get_detail_by_location(search_items, 247 | argument.location_type, 248 | csv=True, 249 | output_dir=argument.output_dir, 250 | extra_param=formatted_params) 251 | 252 | elif argument.product == 'probability.get_depth': 253 | fs.probability.get_depth(search_items, 254 | csv=True, 255 | output_dir=argument.output_dir, 256 | extra_param=formatted_params) 257 | 258 | elif argument.product == 'probability.get_chance': 259 | fs.probability.get_chance(search_items, 260 | csv=True, 261 | output_dir=argument.output_dir, 262 | extra_param=formatted_params) 263 | 264 | elif argument.product == 'probability.get_count_summary': 265 | fs.probability.get_count_summary(search_items, 266 | csv=True, 267 | output_dir=argument.output_dir, 268 | extra_param=formatted_params) 269 | 270 | elif argument.product == 'probability.get_cumulative': 271 | fs.probability.get_cumulative(search_items, 272 | csv=True, 273 | output_dir=argument.output_dir, 274 | extra_param=formatted_params) 275 | 276 | elif argument.product == 'probability.get_count': 277 | fs.probability.get_count(search_items, 278 | argument.location_type, 279 | csv=True, 280 | output_dir=argument.output_dir, 281 | extra_param=formatted_params) 282 | 283 | elif argument.product == 'historic.get_event': 284 | fs.historic.get_event(search_items, 285 | csv=True, 286 | output_dir=argument.output_dir, 287 | extra_param=formatted_params) 288 | 289 | elif argument.product == 'historic.get_summary': 290 | fs.historic.get_summary(search_items, 291 | argument.location_type, 292 | csv=True, 293 | output_dir=argument.output_dir, 294 | extra_param=formatted_params) 295 | 296 | elif argument.product == 'historic.get_events_by_location': 297 | fs.historic.get_events_by_location(search_items, 298 | argument.location_type, 299 | csv=True, 300 | output_dir=argument.output_dir, 301 | extra_param=formatted_params) 302 | 303 | elif argument.product == 'location.get_detail': 304 | fs.location.get_detail(search_items, 305 | argument.location_type, 306 | csv=True, 307 | output_dir=argument.output_dir, 308 | extra_param=formatted_params) 309 | 310 | elif argument.product == 'location.get_summary': 311 | fs.location.get_summary(search_items, 312 | argument.location_type, 313 | csv=True, 314 | output_dir=argument.output_dir, 315 | extra_param=formatted_params) 316 | 317 | elif argument.product == 'fema.get_nfip': 318 | fs.fema.get_nfip(search_items, 319 | argument.location_type, 320 | csv=True, 321 | output_dir=argument.output_dir, 322 | extra_param=formatted_params) 323 | 324 | elif argument.product == 'environmental.get_precipitation': 325 | fs.environmental.get_precipitation(search_items, 326 | csv=True, 327 | output_dir=argument.output_dir, 328 | extra_param=formatted_params) 329 | 330 | elif argument.product == 'tile.get_probability_depth': 331 | if not argument.year: 332 | logging.error("get_probability_depth is missing the year argument") 333 | input("Press Enter to continue...") 334 | sys.exit() 335 | 336 | try: 337 | int(argument.year) 338 | except ValueError: 339 | logging.error("The year argument could not be converted to an int. " 340 | "Provided argument: {}".format(argument.year)) 341 | input("Press Enter to continue...") 342 | sys.exit() 343 | 344 | if not argument.return_period: 345 | logging.error("get_probability_depth is missing the return_period argument") 346 | input("Press Enter to continue...") 347 | sys.exit() 348 | 349 | try: 350 | int(argument.return_period) 351 | except ValueError: 352 | logging.error("The return_period argument could not be converted to an int. " 353 | "Provided argument: {}".format(argument.return_period)) 354 | input("Press Enter to continue...") 355 | sys.exit() 356 | 357 | fs.tile.get_probability_depth(year=int(argument.year), 358 | return_period=int(argument.return_period), 359 | search_items=search_items, 360 | output_dir=argument.output_dir, 361 | image=True) 362 | 363 | elif argument.product == 'tile.get_historic_event': 364 | 365 | if not argument.event_id: 366 | logging.error("get_probability_depth is missing the event_id argument") 367 | input("Press Enter to continue...") 368 | sys.exit() 369 | 370 | try: 371 | int(argument.event_id) 372 | except ValueError: 373 | logging.error("The event_id argument could not be converted to an int. " 374 | "Provided argument: {}".format(argument.event_id)) 375 | input("Press Enter to continue...") 376 | sys.exit() 377 | 378 | fs.tile.get_historic_event(event_id=int(argument.event_id), 379 | search_items=search_items, 380 | output_dir=argument.output_dir, 381 | image=True) 382 | 383 | elif argument.product == 'aal.get_summary': 384 | fs.aal.get_summary(search_items, 385 | argument.location_type, 386 | csv=True, 387 | output_dir=argument.output_dir, 388 | extra_param=formatted_params) 389 | 390 | elif argument.product == 'avm.get_avm': 391 | fs.avm.get_avm(search_items, 392 | csv=True, 393 | output_dir=argument.output_dir, 394 | extra_param=formatted_params) 395 | 396 | elif argument.product == 'avm.get_provider': 397 | fs.avm.get_provider(search_items, 398 | csv=True, 399 | output_dir=argument.output_dir, 400 | extra_param=formatted_params) 401 | 402 | elif argument.product == 'economic.get_property_nfip': 403 | fs.economic.get_property_nfip(search_items, 404 | csv=True, 405 | output_dir=argument.output_dir, 406 | extra_param=formatted_params) 407 | 408 | else: 409 | logging.error("Product not found. Please check that the argument" 410 | " provided is correct: {}".format(argument.product)) 411 | 412 | finally: 413 | input("Press Enter to continue...") 414 | 415 | else: 416 | raise InvalidArgument("No search items were provided from either a search item list or a file. " 417 | "List: '{}', File Name: '{}'".format(argument.search_items, argument.file)) 418 | 419 | repeat_prompt = input("Perform another data pull (Y/N)?") 420 | 421 | if repeat_prompt.lower() == "y": 422 | repeat = True 423 | else: 424 | repeat = False 425 | -------------------------------------------------------------------------------- /firststreet/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/firststreet/api/__init__.py -------------------------------------------------------------------------------- /firststreet/api/adaptation.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.adaptation import AdaptationDetail, AdaptationSummary 12 | 13 | 14 | class Adaptation(Api): 15 | """This class receives a list of search_items and handles the creation of a adaptation product from the request. 16 | 17 | Methods: 18 | get_detail: Retrieves a list of Adaptation Details for the given list of IDs 19 | get_summary: Retrieves a list of Adaptation Summary for the given list of IDs 20 | """ 21 | 22 | def get_detail(self, search_items, csv=False, output_dir=None, extra_param=None): 23 | """Retrieves adaptation detail product data from the First Street Foundation API given a list of search_items 24 | and returns a list of Adaptation Detail objects. 25 | 26 | Args: 27 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 28 | file of First Street Foundation IDs 29 | csv (bool): To output extracted data to a csv or not 30 | output_dir (str): The output directory to save the generated csvs 31 | extra_param (dict): Extra parameter to be added to the url 32 | 33 | Returns: 34 | A list of Adaptation Detail 35 | """ 36 | # Get data from api and create objects 37 | api_datas = self.call_api(search_items, "adaptation", "detail", None, extra_param=extra_param) 38 | product = [AdaptationDetail(api_data) for api_data in api_datas] 39 | 40 | if csv: 41 | csv_format.to_csv(product, "adaptation", "detail", output_dir=output_dir) 42 | 43 | logging.info("Adaptation Detail Data Ready.") 44 | 45 | return product 46 | 47 | def get_detail_by_location(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 48 | """Retrieves adaptation detail product data from the First Street Foundation API given a list of location 49 | search_items and returns a list of Adaptation Detail objects. 50 | 51 | Args: 52 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 53 | file of First Street Foundation IDs 54 | location_type (str): The location lookup type 55 | csv (bool): To output extracted data to a csv or not 56 | output_dir (str): The output directory to save the generated csvs 57 | extra_param (dict): Extra parameter to be added to the url 58 | 59 | Returns: 60 | A list of list of Adaptation Summary and Adaptation Detail 61 | Raises: 62 | InvalidArgument: The location provided is empty 63 | TypeError: The location provided is not a string 64 | """ 65 | 66 | if not location_type: 67 | raise InvalidArgument(location_type) 68 | elif not isinstance(location_type, str): 69 | raise TypeError("location is not a string") 70 | 71 | # Get data from api and create objects 72 | api_datas_summary = self.call_api(search_items, "adaptation", "summary", location_type, extra_param=extra_param) 73 | summary = [AdaptationSummary(api_data) for api_data in api_datas_summary] 74 | 75 | search_items = list(set([adaptation for sum_adap in summary if sum_adap.adaptation for 76 | adaptation in sum_adap.adaptation])) 77 | 78 | if search_items: 79 | api_datas_detail = self.call_api(search_items, "adaptation", "detail", None, extra_param=extra_param) 80 | 81 | else: 82 | api_datas_detail = [{"adaptationId": None, "valid_id": False}] 83 | 84 | detail = [AdaptationDetail(api_data) for api_data in api_datas_detail] 85 | 86 | if csv: 87 | csv_format.to_csv([summary, detail], "adaptation", "summary_detail", location_type, 88 | output_dir=output_dir) 89 | 90 | logging.info("Adaptation Summary Detail Data Ready.") 91 | 92 | return [summary, detail] 93 | 94 | def get_summary(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 95 | """Retrieves adaptation summary product data from the First Street Foundation API given a list of 96 | search_items and returns a list of Adaptation Summary objects. 97 | 98 | Args: 99 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 100 | file of First Street Foundation IDs 101 | location_type (str): The location lookup type 102 | csv (bool): To output extracted data to a csv or not 103 | output_dir (str): The output directory to save the generated csvs 104 | extra_param (dict): Extra parameter to be added to the url 105 | 106 | Returns: 107 | A list of Adaptation Summary 108 | Raises: 109 | InvalidArgument: The location provided is empty 110 | TypeError: The location provided is not a string 111 | """ 112 | 113 | if not location_type: 114 | raise InvalidArgument(location_type) 115 | elif not isinstance(location_type, str): 116 | raise TypeError("location is not a string") 117 | 118 | # Get data from api and create objects 119 | api_datas = self.call_api(search_items, "adaptation", "summary", location_type, extra_param=extra_param) 120 | product = [AdaptationSummary(api_data) for api_data in api_datas] 121 | 122 | if csv: 123 | csv_format.to_csv(product, "adaptation", "summary", location_type, output_dir=output_dir) 124 | 125 | logging.info("Adaptation Summary Data Ready.") 126 | 127 | return product 128 | -------------------------------------------------------------------------------- /firststreet/api/api.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import asyncio 6 | import urllib.parse 7 | 8 | # Internal Imports 9 | import os 10 | 11 | from firststreet.errors import InvalidArgument 12 | from firststreet.util import read_search_items_from_file 13 | 14 | 15 | class Api: 16 | """This class handles the calls to the API through the http class 17 | 18 | Attributes: 19 | http (Http): A http class to connect to the First Street Foundation API 20 | Methods: 21 | call_api: Creates an endpoint 22 | """ 23 | 24 | def __init__(self, http): 25 | """ Init""" 26 | self._http = http 27 | 28 | def call_api(self, search_item, product, product_subtype, location=None, tile_product=None, year=None, 29 | return_period=None, event_id=None, extra_param=None): 30 | """Receives an item, a product, a product subtype, and a location to create and call an endpoint to the First 31 | Street Foundation API. 32 | 33 | Args: 34 | search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 35 | file of First Street Foundation IDs 36 | product (str): The overall product to call 37 | product_subtype (str): The product subtype (if suitable) 38 | location (str/None): The location type (if suitable) 39 | tile_product (str/None): The tile product (if suitable) 40 | year (int/None): The year for probability depth tiles (if suitable) 41 | return_period (int/None): The return period for probability depth tiles (if suitable) 42 | event_id (int/None): The event_id for historic tiles (if suitable) 43 | extra_param (dict): Extra parameter to be added to the url 44 | Returns: 45 | A list of JSON responses 46 | """ 47 | 48 | # Not a list. This means it's should be a file 49 | if not isinstance(search_item, list): 50 | 51 | # Check if it's a file 52 | if isinstance(search_item, str) and os.path.isfile(search_item): 53 | 54 | # Get search items from file 55 | search_item = read_search_items_from_file(search_item) 56 | 57 | else: 58 | raise InvalidArgument("File provided is not a list or a valid file. " 59 | "Please check the file name and path. '{}'".format(str(search_item))) 60 | 61 | else: 62 | 63 | # Check tile product 64 | if tile_product: 65 | if not all(isinstance(t, tuple) for t in search_item): 66 | raise TypeError("Input must be a list of coordinates in a tuple of (z, x, y). " 67 | "Provided Arg: {}".format(search_item)) 68 | 69 | if not all(isinstance(coord, int) for t in search_item for coord in t): 70 | raise TypeError("Each coordinate in the tuple must be an integer. Provided Arg: {}" 71 | .format(search_item)) 72 | 73 | if not all(0 < t[0] <= 18 for t in search_item): 74 | raise TypeError("Max zoom is 18. Provided Arg: {}".format(search_item)) 75 | 76 | # else: 77 | 78 | # Ensure for historic and adaptation the search items are EventIDs or AdaptationIDs 79 | if ((product == "adaptation" and product_subtype == "detail") or 80 | (product == "historic" and product_subtype == "event") or 81 | (product == "economic/avm" and product_subtype == "provider")) and \ 82 | not all(isinstance(t, int) for t in search_item): 83 | raise TypeError("Input must be an integer for this product. " 84 | "Provided Arg: {}".format(search_item)) 85 | 86 | # No items found 87 | if not search_item: 88 | raise InvalidArgument(search_item) 89 | 90 | base_url = self._http.options.get('url') 91 | version = self._http.version 92 | 93 | # Create the endpoint 94 | endpoints = [] 95 | for item in search_item: 96 | if location: 97 | endpoint = "/".join([base_url, version, product, product_subtype, location]) 98 | elif tile_product: 99 | if event_id: 100 | endpoint = "/".join([base_url, version, product, product_subtype, tile_product, 101 | str(event_id), "/".join(map(str, item))]) 102 | else: 103 | endpoint = "/".join([base_url, version, product, product_subtype, tile_product, 104 | str(year), str(return_period), "/".join(map(str, item))]) 105 | else: 106 | endpoint = "/".join([base_url, version, product, product_subtype]) 107 | 108 | if not tile_product: 109 | 110 | if not extra_param: 111 | formatted_params = "" 112 | else: 113 | formatted_params = urllib.parse.urlencode(extra_param) 114 | 115 | # fsid 116 | if isinstance(item, int): 117 | endpoint = endpoint + "/{}".format(item) + "?{}".format(formatted_params) 118 | 119 | # lat/lng 120 | elif isinstance(item, tuple): 121 | endpoint = endpoint + "?lat={}&lng={}&{}".format(item[0], item[1], formatted_params) 122 | 123 | # address 124 | elif isinstance(item, str): 125 | endpoint = endpoint + "?address={}&{}".format(item, formatted_params) 126 | 127 | endpoints.append((endpoint, item, product, product_subtype)) 128 | 129 | # Asynchronously call the API for each endpoint 130 | loop = asyncio.get_event_loop() 131 | response = loop.run_until_complete(self._http.endpoint_execute(endpoints)) 132 | 133 | if product == "economic/aal": 134 | return zip(response, search_item) 135 | 136 | return response 137 | -------------------------------------------------------------------------------- /firststreet/api/economic.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.economic import AVMProperty, AVMProvider, AALSummaryProperty, AALSummaryOther, NFIPPremium 12 | 13 | 14 | class AAL(Api): 15 | """This class receives a list of search_items and handles the creation of an aal product from the request. 16 | 17 | Methods: 18 | get_summary: Retrieves a list of AAL Summary for the given list of IDs 19 | """ 20 | 21 | def get_summary(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 22 | """Retrieves AAL summary product data from the First Street Foundation API given a list of search_items and 23 | returns a list of AAL Summary objects. 24 | 25 | Args: 26 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 27 | file of First Street Foundation IDs 28 | location_type (str): The location lookup type 29 | csv (bool): To output extracted data to a csv or not 30 | output_dir (str): The output directory to save the generated csvs 31 | extra_param (dict): Extra parameter to be added to the url 32 | 33 | Returns: 34 | A list of AAL Summary 35 | Raises: 36 | InvalidArgument: The location provided is empty 37 | TypeError: The location provided is not a string 38 | """ 39 | 40 | if not location_type: 41 | raise InvalidArgument(location_type) 42 | elif not isinstance(location_type, str): 43 | raise TypeError("location is not a string") 44 | 45 | # Get data from api and create objects 46 | if extra_param and "depths" in extra_param: 47 | extra_param["depths"] = ','.join(map(str, extra_param["depths"])) 48 | 49 | api_datas = self.call_api(search_items, "economic/aal", "summary", location_type, extra_param=extra_param) 50 | 51 | product = [] 52 | for api_data, fsid in api_datas: 53 | api_data["fsid"] = fsid 54 | 55 | if location_type == "property": 56 | product.append(AALSummaryProperty(api_data)) 57 | else: 58 | product.append(AALSummaryOther(api_data)) 59 | 60 | if csv: 61 | csv_format.to_csv(product, "economic_aal", "summary", location_type, output_dir=output_dir) 62 | 63 | logging.info("AAL Summary Data Ready.") 64 | 65 | return product 66 | 67 | 68 | class AVM(Api): 69 | """This class receives a list of search_items and handles the creation of an AVM product from the request. 70 | 71 | Methods: 72 | get_avm: Retrieves a list of AVM for the given list of IDs 73 | get_provider: Retrieves a list of AVM providers for the given list of IDs 74 | """ 75 | 76 | def get_avm(self, search_items, csv=False, output_dir=None, extra_param=None): 77 | """Retrieves AVM product data from the First Street Foundation API given a list of search_items and 78 | returns a list of AVM objects. 79 | 80 | Args: 81 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 82 | file of First Street Foundation IDs 83 | csv (bool): To output extracted data to a csv or not 84 | output_dir (str): The output directory to save the generated csvs 85 | extra_param (dict): Extra parameter to be added to the url 86 | 87 | Returns: 88 | A list of AVM 89 | """ 90 | # Get data from api and create objects 91 | api_datas = self.call_api(search_items, "economic", "avm", "property", extra_param=extra_param) 92 | 93 | product = [AVMProperty(api_data) for api_data in api_datas] 94 | 95 | if csv: 96 | csv_format.to_csv(product, "economic_avm", "avm", "property", output_dir=output_dir) 97 | 98 | logging.info("AVM Data Ready.") 99 | 100 | return product 101 | 102 | def get_provider(self, search_items, csv=False, output_dir=None, extra_param=None): 103 | """Retrieves AVM provider product data from the First Street Foundation API given a list of search_items and 104 | returns a list of AVM provider objects. 105 | 106 | Args: 107 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 108 | file of First Street Foundation IDs 109 | csv (bool): To output extracted data to a csv or not 110 | output_dir (str): The output directory to save the generated csvs 111 | extra_param (dict): Extra parameter to be added to the url 112 | 113 | Returns: 114 | A list of AVM Provider 115 | """ 116 | # Get data from api and create objects 117 | api_datas = self.call_api(search_items, "economic/avm", "provider", extra_param=extra_param) 118 | 119 | product = [AVMProvider(api_data) for api_data in api_datas] 120 | 121 | if csv: 122 | csv_format.to_csv(product, "economic_avm", "provider", output_dir=output_dir) 123 | 124 | logging.info("AVM Provider Data Ready.") 125 | 126 | return product 127 | 128 | 129 | class Economic(Api): 130 | """This class receives a list of search_items and handles the creation of an economic product from the request. 131 | 132 | Methods: 133 | get_property_nfip: Retrieves a list of property nfip premiums for the given list of IDs 134 | """ 135 | 136 | def get_property_nfip(self, search_items, csv=False, output_dir=None, extra_param=None): 137 | """Retrieves AVM product data from the First Street Foundation API given a list of search_items and 138 | returns a list of AVM objects. 139 | 140 | Args: 141 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 142 | file of First Street Foundation IDs 143 | csv (bool): To output extracted data to a csv or not 144 | output_dir (str): The output directory to save the generated csvs 145 | extra_param (dict): Extra parameter to be added to the url 146 | 147 | Returns: 148 | A list of property NFIP premiums 149 | """ 150 | # Get data from api and create objects 151 | api_datas = self.call_api(search_items, "economic", "nfip", "property", extra_param=extra_param) 152 | 153 | product = [NFIPPremium(api_data) for api_data in api_datas] 154 | 155 | if csv: 156 | csv_format.to_csv(product, "economic", "nfip", "property", output_dir=output_dir) 157 | 158 | logging.info("NFIP Premium Data Ready.") 159 | 160 | return product 161 | -------------------------------------------------------------------------------- /firststreet/api/environmental.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.models.environmental import EnvironmentalPrecipitation 11 | 12 | 13 | class Environmental(Api): 14 | """This class receives a list of search_items and handles the creation of a environmental product from the request. 15 | 16 | Methods: 17 | get_precipitation: Retrieves a list of Environmental Precipitation for the given list of IDs 18 | """ 19 | 20 | def get_precipitation(self, search_items, csv=False, output_dir=None, extra_param=None): 21 | """Retrieves environmental precipitation product data from the First Street Foundation API given a list of 22 | search_items and returns a list of Environmental Precipitation objects. 23 | 24 | Args: 25 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 26 | file of First Street Foundation IDs 27 | csv (bool): To output extracted data to a csv or not 28 | output_dir (str): The output directory to save the generated csvs 29 | extra_param (dict): Extra parameter to be added to the url 30 | 31 | Returns: 32 | A list of Adaptation Detail 33 | """ 34 | 35 | # Get data from api and create objects 36 | api_datas = self.call_api(search_items, "environmental", "precipitation", "county", extra_param=extra_param) 37 | product = [EnvironmentalPrecipitation(api_data) for api_data in api_datas] 38 | 39 | if csv: 40 | csv_format.to_csv(product, "environmental", "precipitation", "county", output_dir=output_dir) 41 | 42 | logging.info("Environmental Precipitation Data Ready.") 43 | 44 | return product 45 | -------------------------------------------------------------------------------- /firststreet/api/fema.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.fema import FemaNfip 12 | 13 | 14 | class Fema(Api): 15 | """This class receives a list of search_items and handles the creation of a fema product from the request. 16 | 17 | Methods: 18 | get_nfip: Retrieves a list of Fema Nfip for the given list of IDs 19 | """ 20 | 21 | def get_nfip(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 22 | """Retrieves fema nfip product data from the First Street Foundation API given a list of search_items and 23 | returns a list of Fema Nfip objects. 24 | 25 | Args: 26 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 27 | file of First Street Foundation IDs 28 | location_type (str): The location lookup type 29 | csv (bool): To output extracted data to a csv or not 30 | output_dir (str): The output directory to save the generated csvs 31 | extra_param (dict): Extra parameter to be added to the url 32 | 33 | Returns: 34 | A list of Fema Nfip 35 | Raises: 36 | InvalidArgument: The location provided is empty 37 | TypeError: The location provided is not a string 38 | """ 39 | 40 | if not location_type: 41 | raise InvalidArgument(location_type) 42 | elif not isinstance(location_type, str): 43 | raise TypeError("location is not a string") 44 | 45 | # Get data from api and create objects 46 | api_datas = self.call_api(search_items, "fema", "nfip", location_type, extra_param=extra_param) 47 | product = [FemaNfip(api_data) for api_data in api_datas] 48 | 49 | if csv: 50 | csv_format.to_csv(product, "fema", "nfip", location_type, output_dir=output_dir) 51 | 52 | logging.info("Fema Nfip Data Ready.") 53 | 54 | return product 55 | -------------------------------------------------------------------------------- /firststreet/api/historic.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.historic import HistoricEvent, HistoricSummary 12 | 13 | 14 | class Historic(Api): 15 | """This class receives a list of search_items and handles the creation of a historic product from the request. 16 | 17 | Methods: 18 | get_event: Retrieves a list of Historic Event for the given list of IDs 19 | get_summary: Retrieves a list of Historic Summary for the given list of IDs 20 | """ 21 | 22 | def get_event(self, search_items, csv=False, output_dir=None, extra_param=None): 23 | """Retrieves historic event product data from the First Street Foundation API given a list of search_items and 24 | returns a list of Historic Event objects. 25 | 26 | Args: 27 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 28 | file of First Street Foundation IDs 29 | csv (bool): To output extracted data to a csv or not 30 | output_dir (str): The output directory to save the generated csvs 31 | extra_param (dict): Extra parameter to be added to the url 32 | 33 | Returns: 34 | A list of Historic Event 35 | """ 36 | 37 | # Get data from api and create objects 38 | api_datas = self.call_api(search_items, "historic", "event", None, extra_param=extra_param) 39 | product = [HistoricEvent(api_data) for api_data in api_datas] 40 | 41 | if csv: 42 | csv_format.to_csv(product, "historic", "event", output_dir=output_dir) 43 | 44 | logging.info("Historic Event Data Ready.") 45 | 46 | return product 47 | 48 | def get_events_by_location(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 49 | """Retrieves historic summary product data from the First Street Foundation API given a list of location 50 | search_items and returns a list of Historic Summary objects. 51 | 52 | Args: 53 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 54 | file of First Street Foundation IDs 55 | location_type (str): The location lookup type 56 | csv (bool): To output extracted data to a csv or not 57 | output_dir (str): The output directory to save the generated csvs 58 | extra_param (dict): Extra parameter to be added to the url 59 | 60 | Returns: 61 | A list of Historic Event 62 | Raises: 63 | InvalidArgument: The location provided is empty 64 | TypeError: The location provided is not a string 65 | """ 66 | 67 | if not location_type: 68 | raise InvalidArgument(location_type) 69 | elif not isinstance(location_type, str): 70 | raise TypeError("location is not a string") 71 | 72 | # Get data from api and create objects 73 | api_datas = self.call_api(search_items, "historic", "summary", location_type) 74 | summary = [HistoricSummary(api_data) for api_data in api_datas] 75 | 76 | search_item = list(set([event.get("eventId") for sum_hist in summary if sum_hist.historic for 77 | event in sum_hist.historic])) 78 | 79 | if search_item: 80 | api_datas_event = self.call_api(search_item, "historic", "event", None, extra_param=extra_param) 81 | 82 | else: 83 | api_datas_event = [{"eventId": None, "valid_id": False}] 84 | 85 | event = [HistoricEvent(api_data) for api_data in api_datas_event] 86 | 87 | if csv: 88 | csv_format.to_csv([summary, event], "historic", "summary_event", location_type, output_dir=output_dir) 89 | 90 | logging.info("Historic Summary Event Data Ready.") 91 | 92 | return [summary, event] 93 | 94 | def get_summary(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 95 | """Retrieves historic summary product data from the First Street Foundation API given a list of search_items and 96 | returns a list of Historic Summary objects. 97 | 98 | Args: 99 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 100 | file of First Street Foundation IDs 101 | location_type (str): The location lookup type 102 | csv (bool): To output extracted data to a csv or not 103 | output_dir (str): The output directory to save the generated csvs 104 | extra_param (dict): Extra parameter to be added to the url 105 | 106 | Returns: 107 | A list of Historic Summary 108 | Raises: 109 | InvalidArgument: The location provided is empty 110 | TypeError: The location provided is not a string 111 | """ 112 | 113 | if not location_type: 114 | raise InvalidArgument(location_type) 115 | elif not isinstance(location_type, str): 116 | raise TypeError("location is not a string") 117 | 118 | # Get data from api and create objects 119 | api_datas = self.call_api(search_items, "historic", "summary", location_type, extra_param=extra_param) 120 | product = [HistoricSummary(api_data) for api_data in api_datas] 121 | 122 | if csv: 123 | csv_format.to_csv(product, "historic", "summary", location_type, output_dir=output_dir) 124 | 125 | logging.info("Historic Summary Data Ready.") 126 | 127 | return product 128 | -------------------------------------------------------------------------------- /firststreet/api/location.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.location import LocationDetailProperty, LocationDetailNeighborhood, LocationDetailCity, \ 12 | LocationDetailZcta, LocationDetailTract, LocationDetailCounty, LocationDetailCd, \ 13 | LocationDetailState, LocationSummaryProperty, LocationSummaryOther 14 | 15 | 16 | class Location(Api): 17 | """This class receives a list of search_items and handles the creation of a location product from the request. 18 | 19 | Methods: 20 | get_detail: Retrieves a list of Location Details for the given list of IDs 21 | get_summary: Retrieves a list of Location Summary for the given list of IDs 22 | """ 23 | 24 | def get_detail(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 25 | """Retrieves location detail product data from the First Street Foundation API given a list of search_items and 26 | returns a list of Location Detail objects. 27 | 28 | Args: 29 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 30 | file of First Street Foundation IDs 31 | location_type (str): The location lookup type 32 | csv (bool): To output extracted data to a csv or not 33 | output_dir (str): The output directory to save the generated csvs 34 | extra_param (dict): Extra parameter to be added to the url 35 | 36 | Returns: 37 | A list of Location Detail 38 | Raises: 39 | InvalidArgument: The location provided is empty 40 | TypeError: The location provided is not a string 41 | """ 42 | 43 | if not location_type: 44 | raise InvalidArgument("No location type provided: {}".format(location_type)) 45 | elif not isinstance(location_type, str): 46 | raise TypeError("location is not a string") 47 | 48 | # Get data from api and create objects 49 | api_datas = self.call_api(search_items, "location", "detail", location_type, extra_param=extra_param) 50 | 51 | if location_type == 'property': 52 | product = [LocationDetailProperty(api_data) for api_data in api_datas] 53 | 54 | elif location_type == 'neighborhood': 55 | product = [LocationDetailNeighborhood(api_data) for api_data in api_datas] 56 | 57 | elif location_type == 'city': 58 | product = [LocationDetailCity(api_data) for api_data in api_datas] 59 | 60 | elif location_type == 'zcta': 61 | product = [LocationDetailZcta(api_data) for api_data in api_datas] 62 | 63 | elif location_type == 'tract': 64 | product = [LocationDetailTract(api_data) for api_data in api_datas] 65 | 66 | elif location_type == 'county': 67 | product = [LocationDetailCounty(api_data) for api_data in api_datas] 68 | 69 | elif location_type == 'cd': 70 | product = [LocationDetailCd(api_data) for api_data in api_datas] 71 | 72 | elif location_type == 'state': 73 | product = [LocationDetailState(api_data) for api_data in api_datas] 74 | 75 | else: 76 | raise NotImplementedError 77 | 78 | if csv: 79 | csv_format.to_csv(product, "location", "detail", location_type, output_dir=output_dir) 80 | 81 | logging.info("Location Detail Data Ready.") 82 | 83 | return product 84 | 85 | def get_summary(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 86 | """Retrieves location summary product data from the First Street Foundation API given a list of search_items and 87 | returns a list of Location Summary objects. 88 | 89 | Args: 90 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 91 | file of First Street Foundation IDs 92 | location_type (str): The location lookup type 93 | csv (bool): To output extracted data to a csv or not 94 | output_dir (str): The output directory to save the generated csvs 95 | extra_param (dict): Extra parameter to be added to the url 96 | 97 | Returns: 98 | A list of Location Summary 99 | Raises: 100 | InvalidArgument: The location provided is empty 101 | TypeError: The location provided is not a string 102 | """ 103 | 104 | if not location_type: 105 | raise InvalidArgument(location_type) 106 | elif not isinstance(location_type, str): 107 | raise TypeError("location is not a string") 108 | 109 | # Get data from api and create objects 110 | api_datas = self.call_api(search_items, "location", "summary", location_type, extra_param=extra_param) 111 | 112 | if location_type == "property": 113 | product = [LocationSummaryProperty(api_data) for api_data in api_datas] 114 | 115 | else: 116 | product = [LocationSummaryOther(api_data) for api_data in api_datas] 117 | 118 | if csv: 119 | csv_format.to_csv(product, "location", "summary", location_type, output_dir=output_dir) 120 | 121 | logging.info("Location Summary Data Ready.") 122 | 123 | return product 124 | -------------------------------------------------------------------------------- /firststreet/api/probability.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import logging 6 | 7 | # Internal Imports 8 | from firststreet.api import csv_format 9 | from firststreet.api.api import Api 10 | from firststreet.errors import InvalidArgument 11 | from firststreet.models.probability import ProbabilityChance, ProbabilityCount, ProbabilityCountSummary, \ 12 | ProbabilityCumulative, ProbabilityDepth 13 | 14 | 15 | class Probability(Api): 16 | """This class receives a list of search_items and handles the creation of a probability product from the request. 17 | 18 | Methods: 19 | get_depth: Retrieves a list of Probability Depth for the given list of IDs 20 | get_chance: Retrieves a list of Probability Depth for the given list of IDs 21 | get_count: Retrieves a list of Probability Depth for the given list of IDs 22 | get_count_summary: Retrieves a list of Probability Depth for the given list of IDs 23 | get_cumulative: Retrieves a list of Probability Depth for the given list of IDs 24 | """ 25 | 26 | def get_chance(self, search_items, csv=False, output_dir=None, extra_param=None): 27 | """Retrieves probability chance product data from the First Street Foundation API given a list of search_items 28 | and returns a list of Probability Chance objects. 29 | 30 | Args: 31 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 32 | file of First Street Foundation IDs 33 | csv (bool): To output extracted data to a csv or not 34 | output_dir (str): The output directory to save the generated csvs 35 | extra_param (dict): Extra parameter to be added to the url 36 | 37 | Returns: 38 | A list of Probability Chance 39 | """ 40 | 41 | # Get data from api and create objects 42 | api_datas = self.call_api(search_items, "probability", "chance", "property", extra_param=extra_param) 43 | product = [ProbabilityChance(api_data) for api_data in api_datas] 44 | 45 | if csv: 46 | csv_format.to_csv(product, "probability", "chance", output_dir=output_dir) 47 | 48 | logging.info("Probability Chance Data Ready.") 49 | 50 | return product 51 | 52 | def get_count(self, search_items, location_type, csv=False, output_dir=None, extra_param=None): 53 | """Retrieves probability count product data from the First Street Foundation API given a list of search_items 54 | and returns a list of Probability Count objects. 55 | 56 | Args: 57 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 58 | file of First Street Foundation IDs 59 | location_type (str): The location lookup type 60 | csv (bool): To output extracted data to a csv or not 61 | output_dir (str): The output directory to save the generated csvs 62 | extra_param (dict): Extra parameter to be added to the url 63 | 64 | Returns: 65 | A list of Probability Count 66 | Raises: 67 | InvalidArgument: The location provided is empty 68 | TypeError: The location provided is not a string 69 | """ 70 | 71 | if not location_type: 72 | raise InvalidArgument(location_type) 73 | elif not isinstance(location_type, str): 74 | raise TypeError("location is not a string") 75 | 76 | # Get data from api and create objects 77 | api_datas = self.call_api(search_items, "probability", "count", location_type, extra_param=extra_param) 78 | product = [ProbabilityCount(api_data) for api_data in api_datas] 79 | 80 | if csv: 81 | csv_format.to_csv(product, "probability", "count", location_type, output_dir=output_dir) 82 | 83 | logging.info("Probability Count Data Ready.") 84 | 85 | return product 86 | 87 | def get_count_summary(self, search_items, csv=False, output_dir=None, extra_param=None): 88 | """Retrieves probability Count-Summary product data from the First Street Foundation API given a list of 89 | search_items and returns a list of Probability Count-Summary object. 90 | 91 | Args: 92 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 93 | file of First Street Foundation IDs 94 | csv (bool): To output extracted data to a csv or not 95 | output_dir (str): The output directory to save the generated csvs 96 | extra_param (dict): Extra parameter to be added to the url 97 | 98 | Returns: 99 | A list of Probability Count-Summary 100 | """ 101 | 102 | # Get data from api and create objects 103 | api_datas = self.call_api(search_items, "probability", "count-summary", "property", extra_param=extra_param) 104 | product = [ProbabilityCountSummary(api_data) for api_data in api_datas] 105 | 106 | if csv: 107 | csv_format.to_csv(product, "probability", "count-summary", output_dir=output_dir) 108 | 109 | logging.info("Probability Count-Summary Data Ready.") 110 | 111 | return product 112 | 113 | def get_cumulative(self, search_items, csv=False, output_dir=None, extra_param=None): 114 | """Retrieves probability cumulative product data from the First Street Foundation API given a list of 115 | search_items and returns a list of Probability Cumulative object. 116 | 117 | Args: 118 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 119 | file of First Street Foundation IDs 120 | csv (bool): To output extracted data to a csv or not 121 | output_dir (str): The output directory to save the generated csvs 122 | extra_param (dict): Extra parameter to be added to the url 123 | 124 | Returns: 125 | A list of Probability Cumulative 126 | """ 127 | 128 | # Get data from api and create objects 129 | api_datas = self.call_api(search_items, "probability", "cumulative", "property", extra_param=extra_param) 130 | product = [ProbabilityCumulative(api_data) for api_data in api_datas] 131 | 132 | if csv: 133 | csv_format.to_csv(product, "probability", "cumulative", output_dir=output_dir) 134 | 135 | logging.info("Probability Cumulative Data Ready.") 136 | 137 | return product 138 | 139 | def get_depth(self, search_items, csv=False, output_dir=None, extra_param=None): 140 | """Retrieves probability depth product data from the First Street Foundation API given a list of search_items 141 | and returns a list of Probability Depth objects. 142 | 143 | Args: 144 | search_items (list/file): A First Street Foundation IDs, lat/lng pair, address, or a 145 | file of First Street Foundation IDs 146 | csv (bool): To output extracted data to a csv or not 147 | output_dir (str): The output directory to save the generated csvs 148 | extra_param (dict): Extra parameter to be added to the url 149 | 150 | Returns: 151 | A list of Probability Depth 152 | """ 153 | 154 | # Get data from api and create objects 155 | api_datas = self.call_api(search_items, "probability", "depth", "property", extra_param=extra_param) 156 | product = [ProbabilityDepth(api_data) for api_data in api_datas] 157 | 158 | if csv: 159 | csv_format.to_csv(product, "probability", "depth", output_dir=output_dir) 160 | 161 | logging.info("Probability Depth Data Ready.") 162 | 163 | return product 164 | -------------------------------------------------------------------------------- /firststreet/api/tile.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import datetime 6 | import logging 7 | 8 | # Internal Imports 9 | import os 10 | 11 | from firststreet.api.api import Api 12 | from firststreet.errors import InvalidArgument 13 | from firststreet.models.tile import ProbabilityDepthTile, HistoricEventTile 14 | 15 | 16 | class Tile(Api): 17 | """This class receives a list of coordinate and parameters, and handles the return of a tile from the request. 18 | 19 | Methods: 20 | get_probability_depth: Retrieves a list of Probability Depth for the given list of IDs 21 | get_historic_event: Retrieves a list of Probability Depth for the given list of IDs 22 | """ 23 | 24 | def get_probability_depth(self, search_items, year, return_period, image=False, output_dir=None, extra_param=None): 25 | """Retrieves probability depth tile data from the First Street Foundation API given a list of search_items 26 | and returns a list of Probability Depth Tile objects. 27 | 28 | Args: 29 | year (int): The year to get the tile 30 | return_period (int): The return period to get the tile 31 | search_items (list of tuple): A list of coordinates in the form of [(x_1, y_1, z_1), (x_2, y_2, z_2), ...] 32 | image (bool): To output extracted image to a png or not 33 | output_dir (str): The output directory to save the generated tile 34 | extra_param (dict): Extra parameter to be added to the url 35 | 36 | Returns: 37 | A list of Probability Depth tiles 38 | """ 39 | 40 | if not year: 41 | raise InvalidArgument(year) 42 | elif not isinstance(year, int): 43 | raise TypeError("year is not an int") 44 | elif year not in [2020, 2035, 2050]: 45 | logging.error("Year provided is not one of: 2020, 2035, 2050") 46 | raise InvalidArgument(year) 47 | 48 | if not return_period: 49 | raise InvalidArgument(return_period) 50 | elif not isinstance(return_period, int): 51 | raise TypeError("return period is not an int") 52 | elif return_period not in [500, 100, 20, 5, 2]: 53 | logging.error("Return period provided is not one of: 500, 100, 20, 5, 2. " 54 | "(2 year return period is only available for coastal areas.)") 55 | raise InvalidArgument(return_period) 56 | 57 | # Get data from api and create objects 58 | api_datas = self.call_api(search_items, "tile", "probability", tile_product="depth", year=year, 59 | return_period=return_period, extra_param=extra_param) 60 | 61 | if image: 62 | for data in api_datas: 63 | if data: 64 | date = datetime.datetime.today().strftime('%Y_%m_%d_%H_%M_%S') 65 | 66 | # Set file name to the current date, time, and product 67 | file_name = "_".join([date, "probability_depth", str(year), str(return_period), 68 | str(data.get("coordinate"))]) + ".png" 69 | 70 | if not output_dir: 71 | output_dir = os.getcwd() + "/output_data" 72 | 73 | if not os.path.exists(output_dir): 74 | os.makedirs(output_dir) 75 | 76 | with open(output_dir + '/' + file_name, "wb") as f: 77 | f.write(data['image']) 78 | 79 | logging.info("Image(s) generated to '{}'.".format(output_dir)) 80 | 81 | product = [ProbabilityDepthTile(api_data, year, return_period) for api_data in api_datas] 82 | 83 | logging.info("Probability Depth Tile Ready.") 84 | 85 | return product 86 | 87 | def get_historic_event(self, search_items, event_id, image=False, output_dir=None, extra_param=None): 88 | """Retrieves historic event tile data from the First Street Foundation API given a list of search_items 89 | and returns a list of Historic Event Tile objects. 90 | 91 | Args: 92 | search_items (list of tuple): A list of coordinates in the form of [(x_1, y_1, z_1), (x_2, y_2, z_2), ...] 93 | event_id (int): A First Street Foundation eventId 94 | image (bool): To output extracted image to a png or not 95 | output_dir (str): The output directory to save the generated tile 96 | extra_param (dict): Extra parameter to be added to the url 97 | 98 | Returns: 99 | A list of Probability Count 100 | Raises: 101 | InvalidArgument: The event id provided is empty 102 | TypeError: The event id provided is not an int 103 | """ 104 | 105 | if not event_id: 106 | raise InvalidArgument(event_id) 107 | elif not isinstance(event_id, int): 108 | raise TypeError("event id is not an int") 109 | 110 | # Get data from api and create objects 111 | api_datas = self.call_api(search_items, "tile", "historic", tile_product="event", event_id=event_id, 112 | extra_param=extra_param) 113 | 114 | if image: 115 | for data in api_datas: 116 | if data: 117 | date = datetime.datetime.today().strftime('%Y_%m_%d_%H_%M_%S') 118 | 119 | # Set file name to the current date, time, and product 120 | file_name = "_".join([date, "historic_event", str(event_id), str(data.get("coordinate"))]) + ".png" 121 | 122 | if not output_dir: 123 | output_dir = os.getcwd() + "/output_data" 124 | 125 | if not os.path.exists(output_dir): 126 | os.makedirs(output_dir) 127 | 128 | with open(output_dir + '/' + file_name, "wb") as f: 129 | f.write(data.get("image")) 130 | 131 | logging.info("Image(s) generated to '{}'.".format(output_dir)) 132 | 133 | product = [HistoricEventTile(api_data, event_id) for api_data in api_datas] 134 | 135 | logging.info("Historic Event Tile Ready.") 136 | 137 | return product 138 | -------------------------------------------------------------------------------- /firststreet/errors.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | RATE_LIMIT = 'Request rate limited. Free for non-commercial use for up to 10 requests per minute! To increase ' \ 5 | 'your rate limit, please contact api@firststreet.com' 6 | UNAUTHORIZED = 'Unauthorized Access. invalid API key provided.' 7 | UNKNOWN = 'Unknown error, please check your request and try again.' 8 | INTERNAL = 'Internal Server Error.' 9 | NO_BODY = 'No body returned from response.' 10 | NOT_FOUND = 'The specified object could not be found.' 11 | OFFLINE = 'API is currently offline, try again later.' 12 | NOT_ACCEPTABLE = 'You requested a format that is\'t JSON.' 13 | NETWORK_ERROR = 'Network error, check host name.' 14 | DEFAULT_ERROR = 'Unknown Client error.' 15 | ENDPOINT_ERROR = 'HTTP Error: No endpoint provided for request.' 16 | INVALID_ARGUMENT = 'Argument provided was invalid.' 17 | 18 | 19 | class FirstStreetError(Exception): 20 | 21 | def __init__(self, message=DEFAULT_ERROR, attachments=None): 22 | super().__init__(message) 23 | self.message = message 24 | self.attachments = attachments 25 | 26 | 27 | class RateLimitError(FirstStreetError): 28 | 29 | def __init__(self, message=RATE_LIMIT, attachments=None): 30 | super().__init__(message, attachments) 31 | 32 | 33 | class UnauthorizedError(FirstStreetError): 34 | 35 | def __init__(self, message=UNAUTHORIZED, attachments=None): 36 | super().__init__(message, attachments) 37 | 38 | 39 | class UnknownError(FirstStreetError): 40 | 41 | def __init__(self, message=UNKNOWN, attachments=None): 42 | super().__init__(message, attachments) 43 | 44 | 45 | class InternalError(FirstStreetError): 46 | 47 | def __init__(self, message=INTERNAL, attachments=None): 48 | super().__init__(message, attachments) 49 | 50 | 51 | class NoBodyError(FirstStreetError): 52 | 53 | def __init__(self, message=NO_BODY, attachments=None): 54 | super().__init__(message, attachments) 55 | 56 | 57 | class NotFoundError(FirstStreetError): 58 | 59 | def __init__(self, message=NOT_FOUND, attachments=None): 60 | super().__init__(message, attachments) 61 | 62 | 63 | class OfflineError(FirstStreetError): 64 | 65 | def __init__(self, message=OFFLINE, attachments=None): 66 | super().__init__(message, attachments) 67 | 68 | 69 | class NotAcceptableError(FirstStreetError): 70 | 71 | def __init__(self, message=NOT_ACCEPTABLE, attachments=None): 72 | super().__init__(message, attachments) 73 | 74 | 75 | class NetworkError(FirstStreetError): 76 | 77 | def __init__(self, message=NETWORK_ERROR, attachments=None): 78 | super().__init__(message, attachments) 79 | 80 | 81 | class EndpointError(FirstStreetError): 82 | 83 | def __init__(self, message=ENDPOINT_ERROR, attachments=None): 84 | super().__init__(message, attachments) 85 | 86 | 87 | class InvalidArgument(Exception): 88 | 89 | def __init__(self, message): 90 | super().__init__(message) 91 | self.message = INVALID_ARGUMENT + " Provided Arg: %s" % message 92 | 93 | 94 | class MissingAPIKeyError(Exception): 95 | pass 96 | -------------------------------------------------------------------------------- /firststreet/http_util.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import asyncio 6 | 7 | # External Imports 8 | import logging 9 | from json.decoder import JSONDecodeError 10 | 11 | import tqdm 12 | import aiohttp 13 | import ssl 14 | import certifi 15 | from asyncio_throttle import Throttler 16 | 17 | # Internal Imports 18 | import firststreet.errors as e 19 | 20 | DEFAULT_SUMMARY_VERSION = 'v1' 21 | 22 | 23 | class Http: 24 | """This class handles the communication with the First Street Foundation API by constructing and sending the HTTP 25 | requests, and handles any errors during the execution. 26 | Attributes: 27 | api_key (str): A string specifying the API key. 28 | connection_limit (int): The max number of connections to make 29 | rate_limit (int): The max number of requests during the period 30 | rate_period (int): The period of time for the limit 31 | version (str): The version to call the API with 32 | Methods: 33 | endpoint_execute: Sets up the throttler and session for the asynchronous call 34 | execute: Sends a request to the First Street Foundation API for the specified endpoint 35 | tile_response: Handles the response for a tile 36 | product_response: Handles the response for all other products 37 | _parse_rate_limit: Parses the rate limiter returned by the header 38 | _network_error: Handles any network errors returned by the response 39 | limited_as_completed: Limits the number of concurrent coroutines. Prevents Timeout errors due to too 40 | many coroutines 41 | """ 42 | 43 | def __init__(self, api_key, connection_limit, rate_limit, rate_period, version=None): 44 | if version is None: 45 | version = DEFAULT_SUMMARY_VERSION 46 | 47 | self.api_key = api_key 48 | self.options = {'url': "https://api.firststreet.org", 49 | 'headers': { 50 | 'Content-Encoding': 'gzip', 51 | 'Content-Type': 'text/html', 52 | 'User-Agent': 'python/firststreet', 53 | 'Accept': 'application/vnd.api+json', 54 | 'Authorization': 'Bearer %s' % api_key 55 | }} 56 | self.version = version 57 | self.connection_limit = connection_limit 58 | self.rate_limit = rate_limit 59 | self.rate_period = rate_period 60 | 61 | async def bound_fetch(self, sem, endpoint, session, throttler): 62 | async with sem: 63 | return await self.execute(endpoint, session, throttler) 64 | 65 | async def endpoint_execute(self, endpoints): 66 | """Asynchronously calls each endpoint and returns the JSON responses 67 | Args: 68 | endpoints (list): List of endpoints to get 69 | Returns: 70 | The list of JSON responses corresponding to each endpoint 71 | """ 72 | 73 | throttler = Throttler(rate_limit=self.rate_limit, period=self.rate_period) 74 | ssl_ctx = ssl.create_default_context(cafile=certifi.where()) 75 | 76 | connector = aiohttp.TCPConnector(limit_per_host=self.connection_limit, ssl=ssl_ctx) 77 | session = aiohttp.ClientSession(connector=connector) 78 | 79 | # Asnycio create tasks for each endpoint 80 | try: 81 | 82 | sem = asyncio.Semaphore(self.connection_limit) 83 | tasks = [asyncio.create_task(self.bound_fetch(sem, endpoint, session, throttler)) for endpoint in endpoints] 84 | 85 | [await f for f in tqdm.tqdm(asyncio.as_completed(tasks), total=len(endpoints))] 86 | ret = [t.result() for t in tasks] 87 | 88 | finally: 89 | await session.close() 90 | 91 | return ret 92 | 93 | async def execute(self, endpoint, session, throttler): 94 | """Executes the endpoint for the given endpoint with the open session 95 | Args: 96 | endpoint (str): The endpoint to get from 97 | session (ClientSession): The open session 98 | throttler (Throttler): The throttle limiter 99 | Returns: 100 | The JSON reponse or an empty body if error 101 | Raises: 102 | _network_error: if an error occurs 103 | """ 104 | 105 | headers = self.options.get('headers') 106 | 107 | # Retry loop 108 | retry = 0 109 | while retry < 5: 110 | 111 | # Throttle 112 | async with throttler: 113 | try: 114 | async with session.get(endpoint[0], headers=headers, ssl=False) as response: 115 | 116 | # Read a tile response 117 | if endpoint[2] == 'tile': 118 | return await self.tile_response(response, endpoint) 119 | 120 | # Read a json response 121 | else: 122 | return await self.product_response(response, endpoint) 123 | 124 | except (asyncio.TimeoutError, JSONDecodeError) as ex: 125 | logging.info("{} error for item: {} at {}. Retry {}".format(ex.__class__, endpoint[1], 126 | endpoint[0], retry)) 127 | retry += 1 128 | await asyncio.sleep(1) 129 | 130 | except aiohttp.ClientError as ex: 131 | logging.error("{} error getting item: {} from {}".format(ex.__class__, endpoint[1], endpoint[0])) 132 | return {'search_item': endpoint[1]} 133 | 134 | logging.error("Timeout error after 5 retries for search_item: {} from {}".format(endpoint[1], endpoint[0])) 135 | return {'search_item': endpoint[1]} 136 | 137 | async def tile_response(self, response, endpoint): 138 | 139 | # Get rate limit from header 140 | rate_limit = self._parse_rate_limit(response.headers) 141 | 142 | if response.status != 200 and response.status != 500: 143 | raise self._network_error(self.options, rate_limit, 144 | status=response.reason, endpoint=endpoint, message=response.status) 145 | 146 | elif response.status == 500: 147 | logging.info( 148 | "Error retrieving tile from server. Check if the coordinates provided " 149 | "are correct: {}".format(endpoint[1])) 150 | return {"coordinate": endpoint[1], "image": None, 'valid_id': False} 151 | 152 | body = await response.read() 153 | 154 | return {"coordinate": endpoint[1], "image": body} 155 | 156 | async def product_response(self, response, endpoint): 157 | 158 | # Get rate limit from header 159 | rate_limit = self._parse_rate_limit(response.headers) 160 | 161 | body = await response.json(content_type=None) 162 | 163 | try: 164 | if response.status != 200 and response.status != 404 and response.status != 500: 165 | raise self._network_error(self.options, rate_limit, endpoint, error=body.get('error')) 166 | 167 | error = body.get("error") 168 | if error: 169 | search_item = endpoint[1] 170 | product = endpoint[2] 171 | product_subtype = endpoint[3] 172 | 173 | if product == 'adaptation' and product_subtype == 'detail': 174 | return {'adaptationId': search_item, 'valid_id': False, 'error': error['message']} 175 | 176 | elif product == 'historic' and product_subtype == 'event': 177 | return {'eventId': search_item, 'valid_id': False, 'error': error['message']} 178 | 179 | elif product == 'economic/avm' and product_subtype == 'provider': 180 | return {'providerID': search_item, 'valid_id': False, 'error': error['message']} 181 | 182 | elif product == 'economic/aal' and product_subtype == 'summary': 183 | return {'fsid': search_item, 'valid_id': False, 'error': error['message']} 184 | 185 | else: 186 | return {'fsid': search_item, 'valid_id': False, 'error': error['message']} 187 | 188 | except AttributeError: 189 | search_item = endpoint[1] 190 | product = endpoint[2] 191 | product_subtype = endpoint[3] 192 | 193 | if product == 'adaptation' and product_subtype == 'detail': 194 | return {'adaptationId': search_item, 'valid_id': False} 195 | 196 | elif product == 'historic' and product_subtype == 'event': 197 | return {'eventId': search_item, 'valid_id': False} 198 | 199 | else: 200 | return {'fsid': search_item, 'valid_id': False} 201 | 202 | return body 203 | 204 | @staticmethod 205 | def _parse_rate_limit(headers): 206 | """Parses the rate limit form the header 207 | Args: 208 | headers (CIMultiDictProxy): The header returned from the response 209 | Returns: 210 | The rate limit information 211 | """ 212 | return {'limit': headers.get('x-ratelimit-limit'), 'remaining': headers.get('x-ratelimit-remaining'), 213 | 'reset': headers.get('x-ratelimit-reset'), 'requestId': headers.get('x-request-id')} 214 | 215 | @staticmethod 216 | def _network_error(options, rate_limit, endpoint, error=None, status=None, message=None): 217 | """Handles any network errors as a result of the First Street Foundation API 218 | Args: 219 | options (dict): The options used in the header of the response 220 | rate_limit (dict): The rate limit information 221 | endpoint (str): The failing endpoint 222 | error (dict): The body returned from the request call 223 | status (str): The status error from the response 224 | message (str): The message error from the response 225 | Returns: 226 | A First Street error class 227 | """ 228 | if error: 229 | status = int(error.get('code')) 230 | message = error.get('message') 231 | 232 | if not status == 429: 233 | formatted = "Network Error {}: {}. {}".format(status, message, endpoint) 234 | else: 235 | formatted = "Network Error {}: {}. {}. Limit: {}. Remaining: {}. Reset: {}".format(status, 236 | message, 237 | endpoint, 238 | rate_limit.get('limit'), 239 | rate_limit.get('remaining'), 240 | rate_limit.get('reset')) 241 | 242 | return { 243 | 401: e.UnauthorizedError(message=formatted, 244 | attachments={"options": options, "rate_limit": rate_limit}), 245 | 406: e.NotAcceptableError(message=formatted, 246 | attachments={"options": options, "rate_limit": rate_limit}), 247 | 429: e.RateLimitError(message=formatted, attachments={"options": options, "rate_limit": rate_limit}), 248 | 500: e.InternalError(message=formatted, attachments={"options": options, "rate_limit": rate_limit}), 249 | 503: e.OfflineError(message=formatted, attachments={"options": options, "rate_limit": rate_limit}), 250 | }.get(status, 251 | e.UnknownError(message=formatted, attachments={"options": options, "rate_limit": rate_limit})) 252 | -------------------------------------------------------------------------------- /firststreet/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FirstStreet/fsf_api_access_python/7ded0b933bcb54cda1ee9c4f05e7b691516e3a66/firststreet/models/__init__.py -------------------------------------------------------------------------------- /firststreet/models/adaptation.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | from firststreet.models.geometry import Geometry 7 | 8 | 9 | class AdaptationDetail(Api): 10 | """Creates an Adaptation Detail object given a response 11 | 12 | Args: 13 | response (JSON): A JSON response received from the API 14 | """ 15 | 16 | def __init__(self, response): 17 | super().__init__(response) 18 | self.adaptationId = str(response.get('adaptationId')) 19 | self.name = response.get('name') 20 | self.type = response.get('type') 21 | self.scenario = response.get('scenario') 22 | self.conveyance = response.get('conveyance') 23 | self.returnPeriod = response.get('returnPeriod') 24 | self.serving = response.get('serving') 25 | self.geometry = Geometry(response.get('geometry')) 26 | 27 | 28 | class AdaptationSummary(Api): 29 | """Creates an Adaptation Summary object given a response 30 | 31 | Args: 32 | response (JSON): A JSON response received from the API 33 | """ 34 | 35 | def __init__(self, response): 36 | super().__init__(response) 37 | self.fsid = str(response.get('fsid')) 38 | self.adaptation = response.get('adaptation') 39 | self.properties = response.get('properties') 40 | -------------------------------------------------------------------------------- /firststreet/models/api.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | 5 | class Api: 6 | """Creates an Api interface given a response 7 | 8 | Args: 9 | response (JSON): A JSON response received from the API 10 | """ 11 | 12 | def __init__(self, response): 13 | if response.get('valid_id') is not None: 14 | self.valid_id = response.get('valid_id') 15 | else: 16 | self.valid_id = True 17 | 18 | if response.get('error') is not None: 19 | self.error = response.get('error') 20 | else: 21 | self.error = None 22 | -------------------------------------------------------------------------------- /firststreet/models/economic.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | 7 | 8 | class AALSummary(Api): 9 | """A AAL Summary Object parent 10 | 11 | Args: 12 | response (JSON): A JSON response received from the API 13 | """ 14 | 15 | def __init__(self, response): 16 | super().__init__(response) 17 | self.fsid = str(response.get('fsid')) 18 | self.annual_loss = response.get('annualLoss') 19 | 20 | 21 | class AALSummaryProperty(AALSummary): 22 | """Creates a AAL Detail Property object given a response 23 | 24 | Args: 25 | response (JSON): A JSON response received from the API 26 | """ 27 | 28 | def __init__(self, response): 29 | super().__init__(response) 30 | self.depth_loss = response.get('depthLoss') 31 | 32 | 33 | class AALSummaryOther(AALSummary): 34 | """Creates a AAL Detail Property object given a response 35 | 36 | Args: 37 | response (JSON): A JSON response received from the API 38 | """ 39 | 40 | def __init__(self, response): 41 | super().__init__(response) 42 | 43 | 44 | class AVMProperty(Api): 45 | """Creates an AVM Property object given a response 46 | 47 | Args: 48 | response (JSON): A JSON response received from the API 49 | """ 50 | 51 | def __init__(self, response): 52 | super().__init__(response) 53 | self.fsid = str(response.get('fsid')) 54 | self.avm = response.get('avm') 55 | self.provider_id = response.get('providerID') 56 | 57 | 58 | class AVMProvider(Api): 59 | """Creates an AVM Provider object given a response 60 | 61 | Args: 62 | response (JSON): A JSON response received from the API 63 | """ 64 | 65 | def __init__(self, response): 66 | super().__init__(response) 67 | self.provider_id = response.get('providerID') 68 | self.provider_name = response.get('providerName') 69 | self.provider_logo = response.get('providerLogo') 70 | 71 | 72 | class NFIPPremium(Api): 73 | """Creates an NFIP Premium object given a response 74 | 75 | Args: 76 | response (JSON): A JSON response received from the API 77 | """ 78 | 79 | def __init__(self, response): 80 | super().__init__(response) 81 | self.fsid = str(response.get("fsid")) 82 | self.data = response.get('data') 83 | -------------------------------------------------------------------------------- /firststreet/models/environmental.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | 7 | 8 | class EnvironmentalPrecipitation(Api): 9 | """Creates an Environmental Precipitation object given a response 10 | 11 | Args: 12 | response (JSON): A JSON response received from the API 13 | """ 14 | 15 | def __init__(self, response): 16 | super().__init__(response) 17 | self.fsid = str(response.get("fsid")) 18 | self.projected = response.get("projected") 19 | -------------------------------------------------------------------------------- /firststreet/models/fema.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | 7 | 8 | class FemaNfip(Api): 9 | """Creates a FEMA NFIP object given a response 10 | 11 | Args: 12 | response (JSON): A JSON response received from the API 13 | """ 14 | 15 | def __init__(self, response): 16 | super().__init__(response) 17 | self.fsid = str(response.get('fsid')) 18 | self.claimCount = response.get('claimCount') 19 | self.policyCount = response.get('policyCount') 20 | self.buildingPaid = response.get('buildingPaid') 21 | self.contentPaid = response.get('contentPaid') 22 | self.buildingCoverage = response.get('buildingCoverage') 23 | self.contentCoverage = response.get('contentCoverage') 24 | self.iccPaid = response.get('iccPaid') 25 | -------------------------------------------------------------------------------- /firststreet/models/geometry.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # External Imports 5 | from shapely.geometry import shape 6 | 7 | 8 | class Geometry: 9 | """Creates a Geometry object given a response 10 | 11 | Args: 12 | geometry (dict): A dict of geometry 13 | """ 14 | 15 | def __init__(self, geometry): 16 | 17 | if geometry: 18 | self.polygon = shape(geometry.get('polygon')) if geometry.get('polygon') else None 19 | self.center = shape(geometry.get('center')) 20 | if geometry.get('bbox'): 21 | self.bbox = shape(geometry.get('bbox')) 22 | else: 23 | self.bbox = None 24 | 25 | def __eq__(self, other): 26 | if not isinstance(other, Geometry): 27 | return NotImplemented 28 | 29 | return self.polygon == other.polygon and self.center == other.center and self.bbox == other.bbox 30 | -------------------------------------------------------------------------------- /firststreet/models/historic.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | from firststreet.models.geometry import Geometry 7 | 8 | 9 | class HistoricEvent(Api): 10 | """Creates a Historic Event object given a response 11 | 12 | Args: 13 | response (JSON): A JSON response received from the API 14 | """ 15 | 16 | def __init__(self, response): 17 | super().__init__(response) 18 | self.eventId = str(response.get('eventId')) 19 | self.name = response.get('name') 20 | self.month = response.get('month') 21 | self.year = response.get('year') 22 | self.returnPeriod = response.get('returnPeriod') 23 | self.type = response.get('type') 24 | self.properties = response.get('properties') 25 | self.geometry = Geometry(response.get('geometry')) 26 | 27 | 28 | class HistoricSummary(Api): 29 | """Creates a Historic Summary object given a response 30 | 31 | Args: 32 | response (JSON): A JSON response received from the API 33 | """ 34 | 35 | def __init__(self, response): 36 | super().__init__(response) 37 | self.fsid = str(response.get('fsid')) 38 | self.historic = response.get('historic') 39 | -------------------------------------------------------------------------------- /firststreet/models/location.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | from firststreet.models.geometry import Geometry 7 | 8 | 9 | class LocationDetail(Api): 10 | """A Location Detail Object parent 11 | 12 | Args: 13 | response (JSON): A JSON response received from the API 14 | """ 15 | 16 | def __init__(self, response): 17 | super().__init__(response) 18 | self.fsid = response.get('fsid') 19 | 20 | 21 | class LocationDetailProperty(LocationDetail): 22 | """Creates a Location Detail Property object given a response 23 | 24 | Args: 25 | response (JSON): A JSON response received from the API 26 | """ 27 | 28 | def __init__(self, response): 29 | super().__init__(response) 30 | self.fsid = str(response.get('fsid')) 31 | self.streetNumber = response.get('streetNumber') 32 | self.route = response.get('route') 33 | self.city = response.get('city') 34 | self.zipCode = response.get('zipCode') 35 | self.zcta = response.get('zcta') 36 | self.neighborhood = response.get('neighborhood') 37 | self.tract = response.get('tract') 38 | self.county = response.get('county') 39 | self.cd = response.get('cd') 40 | self.state = response.get('state') 41 | self.footprintId = response.get('footprintId') 42 | self.elevation = response.get('elevation') 43 | self.fema = response.get('fema') 44 | self.floorElevation = response.get('floorElevation') 45 | self.building = response.get('building') 46 | self.floodType = response.get('floodType') 47 | self.residential = response.get('residential') 48 | if response.get('geometry'): 49 | self.geometry = Geometry(response.get('geometry')).center 50 | else: 51 | self.geometry = None 52 | 53 | 54 | class LocationDetailNeighborhood(LocationDetail): 55 | """Creates a Location Detail Neighborhood object given a response 56 | 57 | Args: 58 | response (JSON): A JSON response received from the API 59 | """ 60 | 61 | def __init__(self, response): 62 | super().__init__(response) 63 | self.fsid = str(response.get('fsid')) 64 | self.city = response.get('city') 65 | self.subtype = response.get('subtype') 66 | self.county = response.get('county') 67 | self.state = response.get('state') 68 | if response.get('geometry'): 69 | self.geometry = Geometry(response.get('geometry')).center 70 | else: 71 | self.geometry = None 72 | self.name = response.get('name') 73 | 74 | 75 | class LocationDetailCity(LocationDetail): 76 | """Creates a Location Detail City object given a response 77 | 78 | Args: 79 | response (JSON): A JSON response received from the API 80 | """ 81 | 82 | def __init__(self, response): 83 | super().__init__(response) 84 | self.fsid = str(response.get('fsid')) 85 | self.lsad = response.get('lsad') 86 | self.zcta = response.get('zcta') 87 | self.neighborhood = response.get('neighborhood') 88 | self.county = response.get('county') 89 | self.state = response.get('state') 90 | self.geometry = Geometry(response.get('geometry')) 91 | self.name = response.get('name') 92 | 93 | 94 | class LocationDetailZcta(LocationDetail): 95 | """Creates a Location Detail Zcta object given a response 96 | 97 | Args: 98 | response (JSON): A JSON response received from the API 99 | """ 100 | 101 | def __init__(self, response): 102 | super().__init__(response) 103 | self.fsid = str(response.get('fsid')) 104 | self.city = response.get('city') 105 | self.county = response.get('county') 106 | self.state = response.get('state') 107 | self.geometry = Geometry(response.get('geometry')) 108 | self.name = response.get('name') 109 | 110 | 111 | class LocationDetailTract(LocationDetail): 112 | """Creates a Location Detail Tract object given a response 113 | 114 | Args: 115 | response (JSON): A JSON response received from the API 116 | """ 117 | 118 | def __init__(self, response): 119 | super().__init__(response) 120 | self.fsid = str(response.get('fsid')) 121 | self.fips = response.get('fips') 122 | self.county = response.get('county') 123 | self.state = response.get('state') 124 | self.geometry = Geometry(response.get('geometry')) 125 | 126 | 127 | class LocationDetailCounty(LocationDetail): 128 | """Creates a Location Detail County object given a response 129 | 130 | Args: 131 | response (JSON): A JSON response received from the API 132 | """ 133 | 134 | def __init__(self, response): 135 | super().__init__(response) 136 | self.fsid = str(response.get('fsid')) 137 | self.name = response.get('name') 138 | self.city = response.get('city') 139 | self.zcta = response.get('zcta') 140 | self.fips = response.get('fips') 141 | is_coastal = response.get('isCoastal') 142 | if is_coastal: 143 | self.isCoastal = True 144 | else: 145 | self.isCoastal = False 146 | self.cd = response.get('cd') 147 | self.state = response.get('state') 148 | self.geometry = Geometry(response.get('geometry')) 149 | 150 | 151 | class LocationDetailCd(LocationDetail): 152 | """Creates a Location Detail Congressional District object given a response 153 | 154 | Args: 155 | response (JSON): A JSON response received from the API 156 | """ 157 | 158 | def __init__(self, response): 159 | super().__init__(response) 160 | self.fsid = str(response.get('fsid')) 161 | self.county = response.get('county') 162 | self.congress = response.get('congress') 163 | self.state = response.get('state') 164 | self.geometry = Geometry(response.get('geometry')) 165 | self.district = response.get('district') 166 | 167 | 168 | class LocationDetailState(LocationDetail): 169 | """Creates a Location Detail State object given a response 170 | 171 | Args: 172 | response (JSON): A JSON response received from the API 173 | """ 174 | 175 | def __init__(self, response): 176 | super().__init__(response) 177 | self.fsid = str(response.get('fsid')) 178 | self.fips = response.get('fips') 179 | self.geometry = Geometry(response.get('geometry')) 180 | self.name = response.get('name') 181 | 182 | 183 | class LocationSummary(Api): 184 | """A Location Summary Object parent 185 | 186 | Args: 187 | response (JSON): A JSON response received from the API 188 | """ 189 | 190 | def __init__(self, response): 191 | super().__init__(response) 192 | self.fsid = str(response.get('fsid')) 193 | self.riskDirection = response.get('riskDirection') 194 | self.environmentalRisk = response.get('environmentalRisk') 195 | self.historic = response.get('historic') 196 | self.adaptation = response.get('adaptation') 197 | 198 | 199 | class LocationSummaryProperty(LocationSummary): 200 | """Creates a Location Detail Property object given a response 201 | 202 | Args: 203 | response (JSON): A JSON response received from the API 204 | """ 205 | 206 | def __init__(self, response): 207 | super().__init__(response) 208 | self.floodFactor = response.get('floodFactor') 209 | 210 | 211 | class LocationSummaryOther(LocationSummary): 212 | """Creates a Location Detail Property object given a response 213 | 214 | Args: 215 | response (JSON): A JSON response received from the API 216 | """ 217 | 218 | def __init__(self, response): 219 | super().__init__(response) 220 | self.properties = response.get('properties') 221 | -------------------------------------------------------------------------------- /firststreet/models/probability.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | 7 | 8 | class ProbabilityChance(Api): 9 | """Creates a Probability Chance object given a response 10 | 11 | Args: 12 | response (JSON): A JSON response received from the API 13 | """ 14 | 15 | def __init__(self, response): 16 | super().__init__(response) 17 | self.fsid = str(response.get('fsid')) 18 | self.chance = response.get('chance') 19 | 20 | 21 | class ProbabilityCount(Api): 22 | """Creates a Probability Count object given a response 23 | 24 | Args: 25 | response (JSON): A JSON response received from the API 26 | """ 27 | 28 | def __init__(self, response): 29 | super().__init__(response) 30 | self.fsid = str(response.get('fsid')) 31 | self.count = response.get('count') 32 | 33 | 34 | class ProbabilityCountSummary(Api): 35 | """Creates a Probability Count Summary object given a response 36 | 37 | Args: 38 | response (JSON): A JSON response received from the API 39 | """ 40 | 41 | def __init__(self, response): 42 | super().__init__(response) 43 | self.fsid = str(response.get('fsid')) 44 | self.state = response.get('state') 45 | if self.state and any(isinstance(el, list) for el in self.state): 46 | self.state = [item for sublist in self.state for item in sublist] 47 | self.city = response.get('city') 48 | if self.city and any(isinstance(el, list) for el in self.city): 49 | self.city = [item for sublist in self.city for item in sublist] 50 | self.zcta = response.get('zcta') 51 | if self.zcta and any(isinstance(el, list) for el in self.zcta): 52 | self.zcta = [item for sublist in self.zcta for item in sublist] 53 | self.neighborhood = response.get('neighborhood') 54 | if self.neighborhood and any(isinstance(el, list) for el in self.neighborhood): 55 | self.neighborhood = [item for sublist in self.neighborhood for item in sublist] 56 | self.tract = response.get('tract') 57 | if self.tract and any(isinstance(el, list) for el in self.tract): 58 | self.tract = [item for sublist in self.tract for item in sublist] 59 | self.county = response.get('county') 60 | if self.county and any(isinstance(el, list) for el in self.county): 61 | self.county = [item for sublist in self.county for item in sublist] 62 | self.cd = response.get('cd') 63 | if self.cd and any(isinstance(el, list) for el in self.cd): 64 | self.cd = [item for sublist in self.cd for item in sublist] 65 | 66 | 67 | class ProbabilityCumulative(Api): 68 | """Creates a Probability Cumulative object given a response 69 | 70 | Args: 71 | response (JSON): A JSON response received from the API 72 | """ 73 | 74 | def __init__(self, response): 75 | super().__init__(response) 76 | self.fsid = str(response.get('fsid')) 77 | self.cumulative = response.get('cumulative') 78 | 79 | 80 | class ProbabilityDepth(Api): 81 | """Creates a Probability Depth object given a response 82 | 83 | Args: 84 | response (JSON): A JSON response received from the API 85 | """ 86 | 87 | def __init__(self, response): 88 | super().__init__(response) 89 | self.fsid = str(response.get('fsid')) 90 | self.depth = response.get('depth') 91 | -------------------------------------------------------------------------------- /firststreet/models/tile.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Internal Imports 5 | from firststreet.models.api import Api 6 | 7 | 8 | class Tile(Api): 9 | """Creates a Tile object given a response 10 | 11 | Args: 12 | response (JSON): A JSON response received from the API 13 | """ 14 | 15 | def __init__(self, response): 16 | super().__init__(response) 17 | self.coordinate = response.get('coordinate') 18 | self.image = response.get('image') 19 | 20 | 21 | class ProbabilityDepthTile(Tile): 22 | """Creates a Probability Depth Tile object given a response 23 | 24 | Args: 25 | response (JSON): A JSON response received from the API 26 | """ 27 | 28 | def __init__(self, response, year, return_period): 29 | super().__init__(response) 30 | self.year = year 31 | self.return_period = return_period 32 | 33 | 34 | class HistoricEventTile(Tile): 35 | """Creates a Historic Event Tile object given a response 36 | 37 | Args: 38 | response (JSON): A JSON response received from the API 39 | """ 40 | 41 | def __init__(self, response, event_id): 42 | super().__init__(response) 43 | self.event_id = str(event_id) 44 | -------------------------------------------------------------------------------- /firststreet/util.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | import ast 4 | 5 | 6 | def read_search_items_from_file(file_name): 7 | """Reads the given file and pulls a list of search_items from the file 8 | 9 | Args: 10 | file_name (str): A file name 11 | Returns: 12 | A list of search_items 13 | """ 14 | 15 | search_items = [] 16 | 17 | with open(file_name) as fp: 18 | 19 | count = 1 20 | for line in fp: 21 | 22 | item = line.rstrip('\n') 23 | try: 24 | search_items.append(ast.literal_eval(item)) 25 | except SyntaxError: 26 | search_items.append(line.rstrip('\n')) 27 | except ValueError: 28 | if count != 1: 29 | search_items.append(line.rstrip('\n')) 30 | 31 | count += 1 32 | 33 | return search_items 34 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | stress: marks tests as stress test (select with '--runstress') 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp>=3.6.2 2 | pandas>=1.0.5 3 | Shapely>=1.7.0 4 | tqdm>=4.46.1 5 | nest-asyncio>=1.3.3 6 | requests>=2.24.0 7 | certifi>=2020.6.20 8 | asyncio-throttle>=1.0.1 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | with open("README.md", 'r') as f: 4 | readme = f.read() 5 | 6 | with open('requirements.txt') as f: 7 | requirements = [x.strip() for x in f.readlines()] 8 | 9 | with open('extra_test_requires.txt') as f: 10 | extra = {'testing': [x.strip() for x in f.readlines()]} 11 | 12 | setup( 13 | name='fsf-api-access_python', 14 | version='2.3.6', 15 | description='A Python API Access Client for the First Street Foundation API', 16 | url='https://github.com/FirstStreet/fsf_api_access_python', 17 | project_urls={ 18 | 'First Street Foundation Website': 'https://firststreet.org/', 19 | 'API Product Data Dictionary': 'https://docs.firststreet.dev/docs' 20 | }, 21 | long_description=readme, 22 | long_description_content_type="text/markdown", 23 | author="Kelvin", 24 | author_email="kelvin@firststreet.org", 25 | license="MIT", 26 | classifiers=[ 27 | "License :: OSI Approved :: MIT License", 28 | "Topic :: Scientific/Engineering", 29 | "Topic :: Scientific/Engineering :: Hydrology", 30 | "Programming Language :: Python :: 3", 31 | "Programming Language :: Python :: 3.7", 32 | "Programming Language :: Python :: 3.8", 33 | ], 34 | 35 | # Package info 36 | packages=find_packages(exclude=['contrib', 'docs', 'tests*']), 37 | py_modules=[], 38 | install_requires=requirements, 39 | python_requires='>=3.7', 40 | extras_require=extra 41 | ) 42 | -------------------------------------------------------------------------------- /tests/api/test_common.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | class TestApiGeometry: 19 | 20 | def test_adaptation_geom(self): 21 | ada = fs.adaptation.get_detail([29], csv=True) 22 | assert ada[0].geometry is not None 23 | 24 | def test_historic_geom(self): 25 | his = fs.historic.get_event([2], csv=True) 26 | assert his[0].geometry is not None 27 | 28 | def test_location_geom(self): 29 | loc = fs.location.get_detail([39153531702], "tract", csv=True) 30 | assert loc[0].geometry is not None 31 | -------------------------------------------------------------------------------- /tests/api/test_economic.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | # class TestAALProperty: 19 | # 20 | # def test_empty(self): 21 | # with pytest.raises(InvalidArgument): 22 | # fs.aal.get_summary([], "property") 23 | # 24 | # def test_wrong_fsid_type(self): 25 | # with pytest.raises(InvalidArgument): 26 | # fs.aal.get_summary(80000002, "property") 27 | # 28 | # def test_invalid_fsid(self): 29 | # fsid = [00000000] 30 | # aal = fs.aal.get_summary(fsid, "property") 31 | # assert len(aal) == 1 32 | # assert aal[0].fsid == str(fsid[0]) 33 | # assert aal[0].depth_loss is None 34 | # assert aal[0].annual_loss is None 35 | # 36 | # def test_not_property_fsid(self): 37 | # fsid = [12] 38 | # aal = fs.aal.get_summary(fsid, "property") 39 | # assert len(aal) == 1 40 | # assert aal[0].fsid == str(fsid[0]) 41 | # assert aal[0].depth_loss is None 42 | # assert aal[0].annual_loss is None 43 | # 44 | # def test_commercial_fsid(self): 45 | # fsid = [1200171414] 46 | # aal = fs.aal.get_summary(fsid, "property") 47 | # assert len(aal) == 1 48 | # assert aal[0].fsid == str(fsid[0]) 49 | # assert aal[0].depth_loss is None 50 | # assert aal[0].annual_loss is None 51 | # 52 | # def test_single(self): 53 | # fsid = [80000002] 54 | # aal = fs.aal.get_summary(fsid, "property") 55 | # assert len(aal) == 1 56 | # assert aal[0].fsid == str(fsid[0]) 57 | # assert aal[0].depth_loss is not None 58 | # assert aal[0].annual_loss is not None 59 | # 60 | # def test_single_avm_param(self): 61 | # fsid = [80000002] 62 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"avm": 150000}) 63 | # assert len(aal) == 1 64 | # assert aal[0].fsid == str(fsid[0]) 65 | # assert aal[0].depth_loss is not None 66 | # assert aal[0].annual_loss is not None 67 | # 68 | # def test_single_depths_param(self): 69 | # fsid = [80000002] 70 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"depths": [30]}) 71 | # assert len(aal) == 1 72 | # assert aal[0].fsid == str(fsid[0]) 73 | # assert aal[0].depth_loss is not None 74 | # assert aal[0].annual_loss is not None 75 | # 76 | # def test_single_basement_param(self): 77 | # fsid = [80000002] 78 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"basement": True}) 79 | # assert len(aal) == 1 80 | # assert aal[0].fsid == str(fsid[0]) 81 | # assert aal[0].depth_loss is not None 82 | # assert aal[0].annual_loss is not None 83 | # 84 | # def test_single_floor_elevation_param(self): 85 | # fsid = [80000002] 86 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"floorElevation": 22}) 87 | # assert len(aal) == 1 88 | # assert aal[0].fsid == str(fsid[0]) 89 | # assert aal[0].depth_loss is not None 90 | # assert aal[0].annual_loss is not None 91 | # 92 | # def test_single_units_param(self): 93 | # fsid = [80000002] 94 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"units": 2}) 95 | # assert len(aal) == 1 96 | # assert aal[0].fsid == str(fsid[0]) 97 | # assert aal[0].depth_loss is not None 98 | # assert aal[0].annual_loss is not None 99 | # 100 | # def test_single_stories_param(self): 101 | # fsid = [80000002] 102 | # aal = fs.aal.get_summary(fsid, "property", extra_param={"stories": 1}) 103 | # assert len(aal) == 1 104 | # assert aal[0].fsid == str(fsid[0]) 105 | # assert aal[0].depth_loss is not None 106 | # assert aal[0].annual_loss is not None 107 | # 108 | # def test_multiple(self): 109 | # fsid = [10000115, 80000002] 110 | # aal = fs.aal.get_summary(fsid, "property") 111 | # assert len(aal) == 2 112 | # aal.sort(key=lambda x: x.fsid) 113 | # assert aal[0].fsid == str(fsid[0]) 114 | # assert aal[0].depth_loss is not None 115 | # assert aal[0].annual_loss is not None 116 | # assert aal[1].fsid == str(fsid[1]) 117 | # assert aal[1].depth_loss is not None 118 | # assert aal[1].annual_loss is not None 119 | # 120 | # def test_single_csv(self, tmpdir): 121 | # fsid = [80000002] 122 | # aal = fs.aal.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 123 | # assert len(aal) == 1 124 | # assert aal[0].fsid == str(fsid[0]) 125 | # assert aal[0].depth_loss is not None 126 | # assert aal[0].annual_loss is not None 127 | # 128 | # def test_multiple_csv(self, tmpdir): 129 | # fsid = [10000115, 80000002] 130 | # aal = fs.aal.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 131 | # assert len(aal) == 2 132 | # aal.sort(key=lambda x: x.fsid) 133 | # assert aal[0].fsid == str(fsid[0]) 134 | # assert aal[0].depth_loss is not None 135 | # assert aal[0].annual_loss is not None 136 | # assert aal[1].fsid == str(fsid[1]) 137 | # assert aal[1].depth_loss is not None 138 | # assert aal[1].annual_loss is not None 139 | # 140 | # def test_mixed_invalid(self): 141 | # fsid = [0000000000, 80000002] 142 | # aal = fs.aal.get_summary(fsid, "property") 143 | # assert len(aal) == 2 144 | # aal.sort(key=lambda x: x.fsid) 145 | # assert aal[0].fsid == str(fsid[0]) 146 | # assert aal[0].depth_loss is None 147 | # assert aal[0].annual_loss is None 148 | # assert aal[1].fsid == str(fsid[1]) 149 | # assert aal[1].depth_loss is not None 150 | # assert aal[1].annual_loss is not None 151 | # 152 | # def test_mixed_invalid_csv(self, tmpdir): 153 | # fsid = [0000000000, 80000002] 154 | # aal = fs.aal.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 155 | # assert len(aal) == 2 156 | # aal.sort(key=lambda x: x.fsid) 157 | # assert aal[0].fsid == str(fsid[0]) 158 | # assert aal[0].depth_loss is None 159 | # assert aal[0].annual_loss is None 160 | # assert aal[1].fsid == str(fsid[1]) 161 | # assert aal[1].depth_loss is not None 162 | # assert aal[1].annual_loss is not None 163 | # 164 | # def test_one_of_each(self, tmpdir): 165 | # aal = fs.aal.get_summary([1200000342], "property", csv=True, output_dir=tmpdir) 166 | # assert len(aal) == 1 167 | # assert aal[0].fsid == "1200000342" 168 | # assert aal[0].depth_loss is not None 169 | # assert aal[0].annual_loss is not None 170 | # aal = fs.aal.get_summary([1206631], "neighborhood", csv=True, output_dir=tmpdir) 171 | # assert len(aal) == 1 172 | # assert aal[0].fsid == "1206631" 173 | # assert aal[0].annual_loss is not None 174 | # aal = fs.aal.get_summary([3915406], "city", csv=True, output_dir=tmpdir) 175 | # assert len(aal) == 1 176 | # assert aal[0].fsid == "3915406" 177 | # assert aal[0].annual_loss is not None 178 | # aal = fs.aal.get_summary([44654], "zcta", csv=True, output_dir=tmpdir) 179 | # assert len(aal) == 1 180 | # assert aal[0].fsid == "44654" 181 | # assert aal[0].annual_loss is not None 182 | # aal = fs.aal.get_summary([39151712602], "tract", csv=True, output_dir=tmpdir) 183 | # assert len(aal) == 1 184 | # assert aal[0].fsid == "39151712602" 185 | # assert aal[0].annual_loss is not None 186 | # aal = fs.aal.get_summary([39077], "county", csv=True, output_dir=tmpdir) 187 | # assert len(aal) == 1 188 | # assert aal[0].fsid == "39077" 189 | # assert aal[0].annual_loss is not None 190 | # aal = fs.aal.get_summary([3904], "cd", csv=True, output_dir=tmpdir) 191 | # assert len(aal) == 1 192 | # assert aal[0].fsid == "3904" 193 | # assert aal[0].annual_loss is not None 194 | # aal = fs.aal.get_summary([39], "state", csv=True, output_dir=tmpdir) 195 | # assert len(aal) == 1 196 | # assert aal[0].fsid == "39" 197 | # assert aal[0].annual_loss is not None 198 | # 199 | # 200 | # class TestAVMProperty: 201 | # 202 | # def test_empty(self): 203 | # with pytest.raises(InvalidArgument): 204 | # fs.avm.get_avm([]) 205 | # 206 | # def test_wrong_fsid_type(self): 207 | # with pytest.raises(InvalidArgument): 208 | # fs.avm.get_avm(2739) 209 | # 210 | # def test_invalid(self): 211 | # fsid = [0000] 212 | # avm = fs.avm.get_avm(fsid) 213 | # assert len(avm) == 1 214 | # assert avm[0].fsid == str(fsid[0]) 215 | # assert avm[0].provider_id is None 216 | # assert avm[0].valid_id is False 217 | # assert avm[0].avm is None 218 | # 219 | # def test_single(self): 220 | # fsid = [1200171414] 221 | # avm = fs.avm.get_avm(fsid) 222 | # assert len(avm) == 1 223 | # assert avm[0].fsid == str(fsid[0]) 224 | # assert avm[0].avm['mid'] >= 0 225 | # assert avm[0].provider_id == 2 226 | # assert avm[0].valid_id is True 227 | # 228 | # def test_multiple(self): 229 | # fsid = [1200000342, 1200171414] 230 | # avm = fs.avm.get_avm(fsid) 231 | # assert len(avm) == 2 232 | # avm.sort(key=lambda x: x.fsid) 233 | # assert avm[0].fsid == str(fsid[0]) 234 | # assert avm[0].avm['mid'] >= 0 235 | # assert avm[0].provider_id == 2 236 | # assert avm[0].valid_id is True 237 | # assert avm[1].fsid == str(fsid[1]) 238 | # assert avm[1].avm['mid'] >= 0 239 | # assert avm[1].provider_id == 2 240 | # assert avm[1].valid_id is True 241 | # 242 | # def test_single_csv(self, tmpdir): 243 | # fsid = [1200000342] 244 | # avm = fs.avm.get_avm(fsid, csv=True, output_dir=tmpdir) 245 | # assert len(avm) == 1 246 | # assert avm[0].fsid == str(fsid[0]) 247 | # assert avm[0].avm['mid'] >= 0 248 | # assert avm[0].provider_id == 2 249 | # assert avm[0].valid_id is True 250 | # 251 | # def test_multiple_csv(self, tmpdir): 252 | # fsid = [1200000342, 1200171414] 253 | # avm = fs.avm.get_avm(fsid, csv=True, output_dir=tmpdir) 254 | # assert len(avm) == 2 255 | # avm.sort(key=lambda x: x.fsid) 256 | # assert avm[0].fsid == str(fsid[0]) 257 | # assert avm[0].avm['mid'] >= 0 258 | # assert avm[0].provider_id == 2 259 | # assert avm[0].valid_id is True 260 | # assert avm[1].fsid == str(fsid[1]) 261 | # assert avm[1].avm['mid'] >= 0 262 | # assert avm[1].provider_id == 2 263 | # assert avm[1].valid_id is True 264 | # 265 | # def test_mixed_invalid(self): 266 | # fsid = [0000000000, 1200000342] 267 | # avm_out = fs.avm.get_avm(fsid) 268 | # assert len(avm_out) == 2 269 | # avm_out.sort(key=lambda x: x.fsid) 270 | # assert avm_out[0].fsid == str(fsid[0]) 271 | # assert avm_out[0].provider_id is None 272 | # assert avm_out[0].valid_id is False 273 | # assert avm_out[0].avm is None 274 | # assert avm_out[1].fsid == str(fsid[1]) 275 | # assert avm_out[1].avm['mid'] >= 0 276 | # assert avm_out[1].provider_id == 2 277 | # assert avm_out[1].valid_id is True 278 | # 279 | # def test_mixed_invalid_csv(self, tmpdir): 280 | # fsid = [0000000000, 1200000342] 281 | # avm_out = fs.avm.get_avm(fsid, csv=True, output_dir=tmpdir) 282 | # assert len(avm_out) == 2 283 | # avm_out.sort(key=lambda x: x.fsid) 284 | # assert avm_out[0].fsid == str(fsid[0]) 285 | # assert avm_out[0].provider_id is None 286 | # assert avm_out[0].valid_id is False 287 | # assert avm_out[0].avm is None 288 | # assert avm_out[1].fsid == str(fsid[1]) 289 | # assert avm_out[1].avm['mid'] >= 0 290 | # assert avm_out[1].provider_id == 2 291 | # assert avm_out[1].valid_id is True 292 | # 293 | # def test_one_of_each(self, tmpdir): 294 | # avm = fs.avm.get_avm([1200000342], csv=True, output_dir=tmpdir) 295 | # assert len(avm) == 1 296 | # assert avm[0].fsid == "1200000342" 297 | # assert avm[0].avm['mid'] >= 0 298 | # assert avm[0].provider_id == 2 299 | # assert avm[0].valid_id is True 300 | # 301 | # 302 | # class TestAVMProvider: 303 | # 304 | # def test_empty(self): 305 | # with pytest.raises(InvalidArgument): 306 | # fs.avm.get_provider([]) 307 | # 308 | # def test_wrong_provider_id_type(self): 309 | # with pytest.raises(InvalidArgument): 310 | # fs.avm.get_provider(2) 311 | # 312 | # def test_invalid(self): 313 | # provider_id = [999] 314 | # avm = fs.avm.get_provider(provider_id) 315 | # assert len(avm) == 1 316 | # assert avm[0].provider_id == provider_id[0] 317 | # assert avm[0].valid_id is False 318 | # assert avm[0].provider_logo is None 319 | # assert avm[0].provider_name is None 320 | # 321 | # def test_single(self): 322 | # provider_id = [2] 323 | # avm = fs.avm.get_provider(provider_id) 324 | # assert len(avm) == 1 325 | # assert avm[0].provider_id == provider_id[0] 326 | # assert avm[0].valid_id is True 327 | # assert avm[0].provider_logo == "" 328 | # assert avm[0].provider_name == "First Street Foundation" 329 | # 330 | # def test_single_csv(self, tmpdir): 331 | # provider_id = [2] 332 | # avm = fs.avm.get_provider(provider_id, csv=True, output_dir=tmpdir) 333 | # assert len(avm) == 1 334 | # assert avm[0].provider_id == provider_id[0] 335 | # assert avm[0].valid_id is True 336 | # assert avm[0].provider_logo == "" 337 | # assert avm[0].provider_name == "First Street Foundation" 338 | # 339 | # def test_mixed_invalid(self): 340 | # provider_id = [2, 3] 341 | # avm = fs.avm.get_provider(provider_id) 342 | # assert len(avm) == 2 343 | # avm.sort(key=lambda x: x.provider_id) 344 | # assert avm[0].provider_id == provider_id[0] 345 | # assert avm[0].valid_id is True 346 | # assert avm[0].provider_logo == "" 347 | # assert avm[0].provider_name == "First Street Foundation" 348 | # assert avm[1].provider_id == provider_id[1] 349 | # assert avm[1].valid_id is False 350 | # assert avm[1].provider_logo is None 351 | # assert avm[1].provider_name is None 352 | # 353 | # def test_mixed_invalid_csv(self, tmpdir): 354 | # provider_id = [2, 3] 355 | # avm = fs.avm.get_provider(provider_id, csv=True, output_dir=tmpdir) 356 | # assert len(avm) == 2 357 | # avm.sort(key=lambda x: x.provider_id) 358 | # assert avm[0].provider_id == provider_id[0] 359 | # assert avm[0].valid_id is True 360 | # assert avm[0].provider_logo == "" 361 | # assert avm[0].provider_name == "First Street Foundation" 362 | # assert avm[1].provider_id == provider_id[1] 363 | # assert avm[1].valid_id is False 364 | # assert avm[1].provider_logo is None 365 | # assert avm[1].provider_name is None 366 | # 367 | # def test_one_of_each(self, tmpdir): 368 | # avm = fs.avm.get_provider([2], csv=True, output_dir=tmpdir) 369 | # assert len(avm) == 1 370 | # assert avm[0].provider_id == 2 371 | # assert avm[0].valid_id is True 372 | # assert avm[0].provider_logo == "" 373 | # assert avm[0].provider_name == "First Street Foundation" 374 | 375 | 376 | class TestEconomicPropertyNFIP: 377 | 378 | def test_empty(self): 379 | with pytest.raises(InvalidArgument): 380 | fs.economic.get_property_nfip([], "") 381 | 382 | def test_wrong_fsid_type(self): 383 | with pytest.raises(InvalidArgument): 384 | fs.economic.get_property_nfip(18) 385 | 386 | def test_invalid(self): 387 | fsid = [0000000] 388 | nfip = fs.economic.get_property_nfip(fsid) 389 | assert len(nfip) == 1 390 | assert nfip[0].fsid == str(fsid[0]) 391 | assert nfip[0].data is None 392 | assert nfip[0].valid_id is False 393 | 394 | def test_single(self): 395 | fsid = [190836953] 396 | nfip = fs.economic.get_property_nfip(fsid) 397 | assert len(nfip) == 1 398 | assert nfip[0].fsid == str(fsid[0]) 399 | assert nfip[0].data is not None 400 | assert nfip[0].valid_id is True 401 | 402 | def test_multiple(self): 403 | fsid = [190836953, 193139123] 404 | nfip = fs.economic.get_property_nfip(fsid) 405 | assert len(nfip) == 2 406 | nfip.sort(key=lambda x: x.fsid) 407 | assert nfip[0].fsid == str(fsid[0]) 408 | assert nfip[1].fsid == str(fsid[1]) 409 | assert nfip[0].data is not None 410 | assert nfip[0].valid_id is True 411 | assert nfip[1].data is not None 412 | assert nfip[1].valid_id is True 413 | 414 | def test_single_csv(self, tmpdir): 415 | fsid = [190836953] 416 | nfip = fs.economic.get_property_nfip(fsid, csv=True, output_dir=tmpdir) 417 | assert len(nfip) == 1 418 | assert nfip[0].fsid == str(fsid[0]) 419 | assert nfip[0].data is not None 420 | assert nfip[0].valid_id is True 421 | 422 | def test_multiple_csv(self, tmpdir): 423 | fsid = [190836953, 193139123] 424 | nfip = fs.economic.get_property_nfip(fsid, csv=True, output_dir=tmpdir) 425 | assert len(nfip) == 2 426 | nfip.sort(key=lambda x: x.fsid) 427 | assert nfip[0].fsid == str(fsid[0]) 428 | assert nfip[1].fsid == str(fsid[1]) 429 | assert nfip[0].data is not None 430 | assert nfip[0].valid_id is True 431 | assert nfip[1].data is not None 432 | assert nfip[1].valid_id is True 433 | 434 | def test_mixed_invalid(self): 435 | fsid = [190836953, 000000000] 436 | nfip = fs.economic.get_property_nfip(fsid) 437 | assert len(nfip) == 2 438 | nfip.sort(key=lambda x: x.fsid, reverse=True) 439 | assert nfip[0].fsid == str(fsid[0]) 440 | assert nfip[1].fsid == str(fsid[1]) 441 | assert nfip[0].data is not None 442 | assert nfip[0].valid_id is True 443 | assert nfip[1].data is None 444 | assert nfip[1].valid_id is False 445 | 446 | def test_mixed_invalid_csv(self, tmpdir): 447 | fsid = [190836953, 000000000] 448 | nfip = fs.economic.get_property_nfip(fsid, csv=True, output_dir=tmpdir) 449 | assert len(nfip) == 2 450 | nfip.sort(key=lambda x: x.fsid, reverse=True) 451 | assert nfip[0].fsid == str(fsid[0]) 452 | assert nfip[1].fsid == str(fsid[1]) 453 | assert nfip[0].data is not None 454 | assert nfip[0].valid_id is True 455 | assert nfip[1].data is None 456 | assert nfip[1].valid_id is False 457 | 458 | def test_coordinate_invalid(self, tmpdir): 459 | nfip = fs.economic.get_property_nfip([(82.487671, -62.374322)], csv=True, output_dir=tmpdir) 460 | assert len(nfip) == 1 461 | assert nfip[0].data is None 462 | assert nfip[0].valid_id is False 463 | 464 | def test_single_coordinate(self, tmpdir): 465 | nfip = fs.economic.get_property_nfip([(40.7079652311, -74.0021455387)], csv=True, output_dir=tmpdir) 466 | assert len(nfip) == 1 467 | assert nfip[0].data is not None 468 | assert nfip[0].valid_id is True 469 | 470 | def test_address_invalid_404(self, tmpdir): 471 | nfip = fs.economic.get_property_nfip(["Shimik, Nunavut, Canada"], csv=True, output_dir=tmpdir) 472 | assert len(nfip) == 1 473 | assert nfip[0].data is None 474 | assert nfip[0].valid_id is False 475 | 476 | def test_address_invalid_500(self, tmpdir): 477 | nfip = fs.economic.get_property_nfip(["Toronto, Ontario, Canada"], csv=True, output_dir=tmpdir) 478 | assert len(nfip) == 1 479 | assert nfip[0].data is None 480 | assert nfip[0].valid_id is False 481 | 482 | def test_single_address(self, tmpdir): 483 | nfip = fs.economic.get_property_nfip(["247 Water St, New York, New York"], csv=True, output_dir=tmpdir) 484 | assert len(nfip) == 1 485 | assert nfip[0].data is not None 486 | assert nfip[0].valid_id is True 487 | 488 | def test_one_of_each(self, tmpdir): 489 | nfip = fs.economic.get_property_nfip([390000257], csv=True, output_dir=tmpdir) 490 | assert len(nfip) == 1 491 | assert nfip[0].valid_id is True 492 | assert nfip[0].fsid == "390000257" 493 | assert nfip[0].data is not None 494 | -------------------------------------------------------------------------------- /tests/api/test_environmental.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | class TestEnvironmentalEvent: 19 | 20 | def test_empty(self): 21 | with pytest.raises(InvalidArgument): 22 | fs.environmental.get_precipitation([], "") 23 | 24 | def test_wrong_fsid_type(self): 25 | with pytest.raises(InvalidArgument): 26 | fs.environmental.get_precipitation(19117) 27 | 28 | def test_invalid(self): 29 | fsid = [0000] 30 | environmental = fs.environmental.get_precipitation(fsid) 31 | assert len(environmental) == 1 32 | assert environmental[0].fsid == str(fsid[0]) 33 | assert environmental[0].projected is None 34 | assert environmental[0].valid_id is False 35 | 36 | def test_single(self): 37 | fsid = [19117] 38 | environmental = fs.environmental.get_precipitation(fsid) 39 | assert len(environmental) == 1 40 | assert environmental[0].fsid == str(fsid[0]) 41 | assert environmental[0].projected is not None 42 | assert environmental[0].valid_id is True 43 | 44 | def test_multiple(self): 45 | fsid = [19117, 19135] 46 | environmental = fs.environmental.get_precipitation(fsid) 47 | assert len(environmental) == 2 48 | environmental.sort(key=lambda x: x.fsid) 49 | assert environmental[0].fsid == str(fsid[0]) 50 | assert environmental[0].projected is not None 51 | assert environmental[1].fsid == str(fsid[1]) 52 | assert environmental[1].projected is not None 53 | assert environmental[0].valid_id is True 54 | assert environmental[1].valid_id is True 55 | 56 | def test_single_csv(self, tmpdir): 57 | fsid = [19117] 58 | environmental = fs.environmental.get_precipitation(fsid, csv=True, output_dir=tmpdir) 59 | assert len(environmental) == 1 60 | assert environmental[0].fsid == str(fsid[0]) 61 | assert environmental[0].projected is not None 62 | assert environmental[0].valid_id is True 63 | 64 | def test_multiple_csv(self, tmpdir): 65 | fsid = [19117, 19135] 66 | environmental = fs.environmental.get_precipitation(fsid, csv=True, output_dir=tmpdir) 67 | assert len(environmental) == 2 68 | environmental.sort(key=lambda x: x.fsid) 69 | assert environmental[0].fsid == str(fsid[0]) 70 | assert environmental[0].projected is not None 71 | assert environmental[1].fsid == str(fsid[1]) 72 | assert environmental[1].projected is not None 73 | assert environmental[0].valid_id is True 74 | assert environmental[1].valid_id is True 75 | 76 | def test_mixed_invalid(self): 77 | fsid = [19117, 00000] 78 | environmental = fs.environmental.get_precipitation(fsid) 79 | assert len(environmental) == 2 80 | environmental.sort(key=lambda x: x.fsid, reverse=True) 81 | assert environmental[0].fsid == str(fsid[0]) 82 | assert environmental[0].projected is not None 83 | assert environmental[1].fsid == str(fsid[1]) 84 | assert environmental[1].projected is None 85 | assert environmental[0].valid_id is True 86 | assert environmental[1].valid_id is False 87 | 88 | def test_mixed_invalid_csv(self, tmpdir): 89 | fsid = [19117, 00000] 90 | environmental = fs.environmental.get_precipitation(fsid, csv=True, output_dir=tmpdir) 91 | assert len(environmental) == 2 92 | environmental.sort(key=lambda x: x.fsid, reverse=True) 93 | assert environmental[0].fsid == str(fsid[0]) 94 | assert environmental[0].projected is not None 95 | assert environmental[1].fsid == str(fsid[1]) 96 | assert environmental[1].projected is None 97 | assert environmental[0].valid_id is True 98 | assert environmental[1].valid_id is False 99 | 100 | def test_coordinate_invalid(self, tmpdir): 101 | environmental = fs.environmental.get_precipitation([(82.487671, -62.374322)], csv=True, output_dir=tmpdir) 102 | assert len(environmental) == 1 103 | assert environmental[0].projected is None 104 | assert environmental[0].valid_id is False 105 | 106 | def test_single_coordinate(self, tmpdir): 107 | environmental = fs.environmental.get_precipitation([(40.7079652311, -74.0021455387)], 108 | csv=True, output_dir=tmpdir) 109 | assert len(environmental) == 1 110 | assert environmental[0].projected is not None 111 | assert environmental[0].valid_id is True 112 | 113 | def test_address_invalid_404(self, tmpdir): 114 | environmental = fs.environmental.get_precipitation(["NotACounty"], csv=True, output_dir=tmpdir) 115 | assert len(environmental) == 1 116 | assert environmental[0].projected is None 117 | assert environmental[0].valid_id is False 118 | 119 | def test_address_invalid_500(self, tmpdir): 120 | environmental = fs.environmental.get_precipitation(["Toronto, Ontario, Canada"], csv=True, output_dir=tmpdir) 121 | assert len(environmental) == 1 122 | assert environmental[0].projected is None 123 | assert environmental[0].valid_id is False 124 | 125 | def test_single_address(self, tmpdir): 126 | environmental = fs.environmental.get_precipitation(["247 Water St, New York, New York"], 127 | csv=True, output_dir=tmpdir) 128 | assert len(environmental) == 1 129 | assert environmental[0].projected is not None 130 | assert environmental[0].valid_id is True 131 | 132 | def test_one_of_each(self, tmpdir): 133 | environmental = fs.environmental.get_precipitation([39057], csv=True, output_dir=tmpdir) 134 | assert len(environmental) == 1 135 | assert environmental[0].valid_id is True 136 | assert environmental[0].fsid == "39057" 137 | assert environmental[0].projected is not None 138 | assert environmental[0].projected[0].get("year") is not None 139 | assert environmental[0].projected[0].get("data") is not None 140 | assert environmental[0].projected[0].get("data").get("low") is not None 141 | assert environmental[0].projected[0].get("data").get("mid") is not None 142 | assert environmental[0].projected[0].get("data").get("high") is not None 143 | -------------------------------------------------------------------------------- /tests/api/test_fema.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | class TestFemaNfip: 19 | 20 | def test_empty(self): 21 | with pytest.raises(InvalidArgument): 22 | fs.fema.get_nfip([], "") 23 | 24 | def test_empty_fsid(self): 25 | with pytest.raises(InvalidArgument): 26 | fs.fema.get_nfip([], "tract") 27 | 28 | def test_empty_type(self): 29 | with pytest.raises(InvalidArgument): 30 | fs.fema.get_nfip([19055950100], "") 31 | 32 | def test_wrong_fsid_type(self): 33 | with pytest.raises(InvalidArgument): 34 | fs.fema.get_nfip("19055950100", "tract") 35 | 36 | def test_wrong_fsid_number(self): 37 | fsid = [19027] 38 | fema = fs.fema.get_nfip(fsid, "tract") 39 | assert len(fema) == 1 40 | assert fema[0].fsid == str(fsid[0]) 41 | assert fema[0].claimCount is None 42 | assert fema[0].valid_id is False 43 | 44 | def test_incorrect_lookup_type(self, tmpdir): 45 | fsid = [19055950100] 46 | fema = fs.fema.get_nfip(fsid, "county", csv=True, output_dir=tmpdir) 47 | assert len(fema) == 1 48 | assert fema[0].fsid == str(fsid[0]) 49 | assert fema[0].claimCount is None 50 | assert fema[0].valid_id is False 51 | 52 | def test_wrong_fema_type(self): 53 | with pytest.raises(TypeError): 54 | fs.fema.get_nfip([19055950100], 190) 55 | 56 | def test_single(self): 57 | fsid = [19055950100] 58 | fema = fs.fema.get_nfip(fsid, "tract") 59 | assert len(fema) == 1 60 | assert fema[0].fsid == str(fsid[0]) 61 | assert fema[0].claimCount is not None 62 | assert fema[0].valid_id is True 63 | 64 | def test_multiple(self): 65 | fsid = [19055950100, 19153003200] 66 | fema = fs.fema.get_nfip(fsid, "tract") 67 | assert len(fema) == 2 68 | fema.sort(key=lambda x: x.fsid) 69 | assert fema[0].fsid == str(fsid[0]) 70 | assert fema[0].claimCount is not None 71 | assert fema[1].fsid == str(fsid[1]) 72 | assert fema[1].claimCount is not None 73 | assert fema[0].valid_id is True 74 | assert fema[1].valid_id is True 75 | 76 | def test_single_csv(self, tmpdir): 77 | fsid = [19055950100] 78 | fema = fs.fema.get_nfip(fsid, "tract", csv=True, output_dir=tmpdir) 79 | assert len(fema) == 1 80 | assert fema[0].fsid == str(fsid[0]) 81 | assert fema[0].claimCount is not None 82 | assert fema[0].valid_id is True 83 | 84 | def test_multiple_csv(self, tmpdir): 85 | fsid = [19055950100, 19153003200] 86 | fema = fs.fema.get_nfip(fsid, "tract", csv=True, output_dir=tmpdir) 87 | assert len(fema) == 2 88 | fema.sort(key=lambda x: x.fsid) 89 | assert fema[0].fsid == str(fsid[0]) 90 | assert fema[0].claimCount is not None 91 | assert fema[1].fsid == str(fsid[1]) 92 | assert fema[1].claimCount is not None 93 | assert fema[0].valid_id is True 94 | assert fema[1].valid_id is True 95 | 96 | def test_mixed_invalid(self): 97 | fsid = [19055950100, 00000000000] 98 | fema = fs.fema.get_nfip(fsid, "tract") 99 | assert len(fema) == 2 100 | fema.sort(key=lambda x: x.fsid, reverse=True) 101 | assert fema[0].fsid == str(fsid[0]) 102 | assert fema[0].claimCount is not None 103 | assert fema[1].fsid == str(fsid[1]) 104 | assert fema[1].claimCount is None 105 | assert fema[0].valid_id is True 106 | assert fema[1].valid_id is False 107 | 108 | def test_mixed_invalid_csv(self, tmpdir): 109 | fsid = [19055950100, 00000000000] 110 | fema = fs.fema.get_nfip(fsid, "tract", csv=True, output_dir=tmpdir) 111 | assert len(fema) == 2 112 | fema.sort(key=lambda x: x.fsid, reverse=True) 113 | assert fema[0].fsid == str(fsid[0]) 114 | assert fema[0].claimCount is not None 115 | assert fema[1].fsid == str(fsid[1]) 116 | assert fema[1].claimCount is None 117 | assert fema[0].valid_id is True 118 | assert fema[1].valid_id is False 119 | 120 | def test_coordinate_invalid(self, tmpdir): 121 | fema = fs.fema.get_nfip([(82.487671, -62.374322)], "county", csv=True, output_dir=tmpdir) 122 | assert len(fema) == 1 123 | assert fema[0].claimCount is None 124 | assert fema[0].valid_id is False 125 | 126 | def test_single_coordinate(self, tmpdir): 127 | fema = fs.fema.get_nfip([(40.7079652311, -74.0021455387)], "county", csv=True, output_dir=tmpdir) 128 | assert len(fema) == 1 129 | assert fema[0].claimCount is not None 130 | assert fema[0].valid_id is True 131 | 132 | def test_address_invalid_404(self, tmpdir): 133 | fema = fs.fema.get_nfip(["NotATract"], "county", csv=True, output_dir=tmpdir) 134 | assert len(fema) == 1 135 | assert fema[0].claimCount is None 136 | assert fema[0].valid_id is False 137 | 138 | def test_address_invalid_500(self, tmpdir): 139 | fema = fs.fema.get_nfip(["Toronto, Ontario, Canada"], "county", csv=True, output_dir=tmpdir) 140 | assert len(fema) == 1 141 | assert fema[0].claimCount is None 142 | assert fema[0].valid_id is False 143 | 144 | def test_single_address(self, tmpdir): 145 | fema = fs.fema.get_nfip(["247 Water St, New York, New York"], "county", csv=True, output_dir=tmpdir) 146 | assert len(fema) == 1 147 | assert fema[0].claimCount is not None 148 | assert fema[0].valid_id is True 149 | 150 | def test_one_of_each(self, tmpdir): 151 | fema = fs.fema.get_nfip([44074], "zcta", csv=True, output_dir=tmpdir) 152 | assert len(fema) == 1 153 | assert fema[0].valid_id is True 154 | assert fema[0].fsid == "44074" 155 | assert fema[0].claimCount is not None 156 | assert fema[0].policyCount is not None 157 | assert fema[0].buildingPaid is not None 158 | assert fema[0].contentPaid is not None 159 | assert fema[0].buildingCoverage is not None 160 | assert fema[0].contentCoverage is not None 161 | assert fema[0].iccPaid is not None 162 | fema = fs.fema.get_nfip([39013012300], "tract", csv=True, output_dir=tmpdir) 163 | assert len(fema) == 1 164 | assert fema[0].valid_id is True 165 | assert fema[0].fsid == "39013012300" 166 | assert fema[0].claimCount is not None 167 | assert fema[0].policyCount is not None 168 | assert fema[0].buildingPaid is not None 169 | assert fema[0].contentPaid is not None 170 | assert fema[0].buildingCoverage is not None 171 | assert fema[0].contentCoverage is not None 172 | assert fema[0].iccPaid is not None 173 | fema = fs.fema.get_nfip([39093], "county", csv=True, output_dir=tmpdir) 174 | assert len(fema) == 1 175 | assert fema[0].valid_id is True 176 | assert fema[0].fsid == "39093" 177 | assert fema[0].claimCount is not None 178 | assert fema[0].policyCount is not None 179 | assert fema[0].buildingPaid is not None 180 | assert fema[0].contentPaid is not None 181 | assert fema[0].buildingCoverage is not None 182 | assert fema[0].contentCoverage is not None 183 | assert fema[0].iccPaid is not None 184 | fema = fs.fema.get_nfip([39], "state", csv=True, output_dir=tmpdir) 185 | assert len(fema) == 1 186 | assert fema[0].valid_id is True 187 | assert fema[0].fsid == "39" 188 | assert fema[0].claimCount is not None 189 | assert fema[0].policyCount is not None 190 | assert fema[0].buildingPaid is not None 191 | assert fema[0].contentPaid is not None 192 | assert fema[0].buildingCoverage is not None 193 | assert fema[0].contentCoverage is not None 194 | assert fema[0].iccPaid is not None 195 | -------------------------------------------------------------------------------- /tests/api/test_location.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | class TestLocationDetail: 19 | 20 | def test_empty(self): 21 | with pytest.raises(InvalidArgument): 22 | fs.location.get_detail([], "") 23 | 24 | def test_empty_fsid(self): 25 | with pytest.raises(InvalidArgument): 26 | fs.location.get_detail([], "property") 27 | 28 | def test_empty_type(self): 29 | with pytest.raises(InvalidArgument): 30 | fs.location.get_detail([190836953], "") 31 | 32 | def test_wrong_fsid_type(self): 33 | with pytest.raises(InvalidArgument): 34 | fs.location.get_detail(190836953, "property") 35 | 36 | def test_wrong_fsid_number(self): 37 | fsid = [1867176] 38 | location = fs.location.get_detail(fsid, "property") 39 | assert len(location) == 1 40 | assert location[0].fsid == str(fsid[0]) 41 | assert location[0].state is None 42 | assert location[0].valid_id is False 43 | 44 | def test_incorrect_lookup_type(self, tmpdir): 45 | fsid = [190836953] 46 | location = fs.location.get_detail(fsid, "city", csv=True, output_dir=tmpdir) 47 | assert len(location) == 1 48 | assert location[0].fsid == str(fsid[0]) 49 | assert location[0].name is None 50 | assert location[0].valid_id is False 51 | 52 | def test_wrong_location_type(self): 53 | with pytest.raises(TypeError): 54 | fs.location.get_detail([190836953], 190) 55 | 56 | def test_single(self): 57 | fsid = [190836953] 58 | location = fs.location.get_detail(fsid, "property") 59 | assert len(location) == 1 60 | assert location[0].fsid == str(fsid[0]) 61 | assert location[0].state is not None 62 | assert location[0].valid_id is True 63 | 64 | def test_multiple(self): 65 | fsid = [190836953, 193139123] 66 | location = fs.location.get_detail(fsid, "property") 67 | assert len(location) == 2 68 | location.sort(key=lambda x: x.fsid) 69 | assert location[0].fsid == str(fsid[0]) 70 | assert location[1].fsid == str(fsid[1]) 71 | assert location[0].state is not None 72 | assert location[1].state is not None 73 | assert location[0].valid_id is True 74 | assert location[1].valid_id is True 75 | 76 | def test_single_csv(self, tmpdir): 77 | fsid = [190836953] 78 | location = fs.location.get_detail(fsid, "property", csv=True, output_dir=tmpdir) 79 | assert len(location) == 1 80 | assert location[0].fsid == str(fsid[0]) 81 | assert location[0].state is not None 82 | assert location[0].valid_id is True 83 | 84 | def test_multiple_csv(self, tmpdir): 85 | fsid = [190836953, 193139123] 86 | location = fs.location.get_detail(fsid, "property", csv=True, output_dir=tmpdir) 87 | assert len(location) == 2 88 | location.sort(key=lambda x: x.fsid) 89 | assert location[0].fsid == str(fsid[0]) 90 | assert location[1].fsid == str(fsid[1]) 91 | assert location[0].state is not None 92 | assert location[1].state is not None 93 | assert location[0].valid_id is True 94 | assert location[1].valid_id is True 95 | 96 | def test_mixed_invalid(self): 97 | fsid = [190836953, 000000000] 98 | location = fs.location.get_detail(fsid, "property") 99 | assert len(location) == 2 100 | location.sort(key=lambda x: x.fsid, reverse=True) 101 | assert location[0].fsid == str(fsid[0]) 102 | assert location[1].fsid == str(fsid[1]) 103 | assert location[0].state is not None 104 | assert location[1].state is None 105 | assert location[0].valid_id is True 106 | assert location[1].valid_id is False 107 | 108 | def test_mixed_invalid_csv(self, tmpdir): 109 | fsid = [190836953, 000000000] 110 | location = fs.location.get_detail(fsid, "property", csv=True, output_dir=tmpdir) 111 | assert len(location) == 2 112 | location.sort(key=lambda x: x.fsid, reverse=True) 113 | assert location[0].fsid == str(fsid[0]) 114 | assert location[1].fsid == str(fsid[1]) 115 | assert location[0].state is not None 116 | assert location[1].state is None 117 | assert location[0].valid_id is True 118 | assert location[1].valid_id is False 119 | 120 | def test_coordinate_invalid(self, tmpdir): 121 | location = fs.location.get_detail([(82.487671, -62.374322)], "property", csv=True, output_dir=tmpdir) 122 | assert len(location) == 1 123 | assert location[0].state is None 124 | assert location[0].valid_id is False 125 | 126 | def test_single_coordinate(self, tmpdir): 127 | location = fs.location.get_detail([(40.7079652311, -74.0021455387)], "property", csv=True, output_dir=tmpdir) 128 | assert len(location) == 1 129 | assert location[0].state is not None 130 | assert location[0].valid_id is True 131 | 132 | def test_address_invalid_404(self, tmpdir): 133 | location = fs.location.get_detail(["Shimik, Nunavut, Canada"], "property", csv=True, output_dir=tmpdir) 134 | assert len(location) == 1 135 | assert location[0].state is None 136 | assert location[0].valid_id is False 137 | 138 | def test_address_invalid_500(self, tmpdir): 139 | location = fs.location.get_detail(["Toronto, Ontario, Canada"], "property", csv=True, output_dir=tmpdir) 140 | assert len(location) == 1 141 | assert location[0].state is None 142 | assert location[0].valid_id is False 143 | 144 | def test_single_address(self, tmpdir): 145 | location = fs.location.get_detail(["247 Water St, New York, New York"], "property", csv=True, output_dir=tmpdir) 146 | assert len(location) == 1 147 | assert location[0].state is not None 148 | assert location[0].valid_id is True 149 | 150 | def test_one_of_each(self, tmpdir): 151 | location = fs.location.get_detail([395112095], "property", csv=True, output_dir=tmpdir) 152 | assert len(location) == 1 153 | assert location[0].valid_id is True 154 | assert location[0].fsid == "395112095" 155 | assert location[0].streetNumber is not None 156 | assert location[0].route is not None 157 | assert location[0].city is not None 158 | assert location[0].zipCode is not None 159 | assert location[0].zcta is not None 160 | assert location[0].neighborhood is not None 161 | assert location[0].tract is not None 162 | assert location[0].county is not None 163 | assert location[0].cd is not None 164 | assert location[0].state is not None 165 | assert location[0].footprintId is not None 166 | assert location[0].elevation is None 167 | assert location[0].fema is not None 168 | assert location[0].geometry is not None 169 | location = fs.location.get_detail([1206631], "neighborhood", csv=True, output_dir=tmpdir) 170 | assert len(location) == 1 171 | assert location[0].valid_id is True 172 | assert location[0].fsid == "1206631" 173 | assert location[0].city is not None 174 | assert location[0].name is not None 175 | assert location[0].subtype is not None 176 | assert location[0].county is not None 177 | assert location[0].state is not None 178 | assert location[0].geometry is not None 179 | location = fs.location.get_detail([3915406], "city", csv=True, output_dir=tmpdir) 180 | assert len(location) == 1 181 | assert location[0].valid_id is True 182 | assert location[0].fsid == "3915406" 183 | assert location[0].name is not None 184 | assert location[0].lsad is not None 185 | assert location[0].zcta is not None 186 | assert location[0].neighborhood is not None 187 | assert location[0].county is not None 188 | assert location[0].state is not None 189 | assert location[0].geometry is not None 190 | location = fs.location.get_detail([44654], "zcta", csv=True, output_dir=tmpdir) 191 | assert len(location) == 1 192 | assert location[0].valid_id is True 193 | assert location[0].fsid == "44654" 194 | assert location[0].name is not None 195 | assert location[0].county is not None 196 | assert location[0].city is not None 197 | assert location[0].state is not None 198 | assert location[0].geometry is not None 199 | location = fs.location.get_detail([39151712602], "tract", csv=True, output_dir=tmpdir) 200 | assert len(location) == 1 201 | assert location[0].valid_id is True 202 | assert location[0].fips == "39151712602" 203 | assert location[0].county is not None 204 | assert location[0].state is not None 205 | assert location[0].geometry is not None 206 | location = fs.location.get_detail([39077], "county", csv=True, output_dir=tmpdir) 207 | assert len(location) == 1 208 | assert location[0].valid_id is True 209 | assert location[0].fsid == "39077" 210 | assert location[0].fips is not None 211 | assert location[0].name is not None 212 | assert location[0].isCoastal is not None 213 | assert location[0].city is not None 214 | assert location[0].zcta is not None 215 | assert location[0].cd is not None 216 | assert location[0].state is not None 217 | assert location[0].geometry is not None 218 | location = fs.location.get_detail([3904], "cd", csv=True, output_dir=tmpdir) 219 | assert len(location) == 1 220 | assert location[0].valid_id is True 221 | assert location[0].fsid == "3904" 222 | assert location[0].district is not None 223 | assert location[0].congress is not None 224 | assert location[0].county is not None 225 | assert location[0].state is not None 226 | assert location[0].geometry is not None 227 | location = fs.location.get_detail([39], "state", csv=True, output_dir=tmpdir) 228 | assert len(location) == 1 229 | assert location[0].valid_id is True 230 | assert location[0].fsid == "39" 231 | assert location[0].name is not None 232 | assert location[0].fips is not None 233 | assert location[0].geometry is not None 234 | 235 | 236 | class TestLocationSummary: 237 | 238 | def test_empty(self): 239 | with pytest.raises(InvalidArgument): 240 | fs.location.get_summary([], "") 241 | 242 | def test_empty_fsid(self): 243 | with pytest.raises(InvalidArgument): 244 | fs.location.get_summary([], "property") 245 | 246 | def test_empty_type(self): 247 | with pytest.raises(InvalidArgument): 248 | fs.location.get_summary([190836953], "") 249 | 250 | def test_wrong_fsid_type(self): 251 | with pytest.raises(InvalidArgument): 252 | fs.location.get_summary(190836953, "property") 253 | 254 | def test_wrong_fsid_number(self): 255 | fsid = [1867176] 256 | location = fs.location.get_summary(fsid, "property") 257 | assert len(location) == 1 258 | assert location[0].fsid == str(fsid[0]) 259 | assert location[0].adaptation is None 260 | assert location[0].valid_id is False 261 | 262 | def test_incorrect_lookup_type(self, tmpdir): 263 | fsid = [190836953] 264 | location = fs.location.get_summary(fsid, "city", csv=True, output_dir=tmpdir) 265 | assert len(location) == 1 266 | assert location[0].fsid == str(fsid[0]) 267 | assert location[0].adaptation is None 268 | assert location[0].valid_id is False 269 | 270 | def test_wrong_location_type(self): 271 | with pytest.raises(TypeError): 272 | fs.location.get_summary([190836953], 190) 273 | 274 | def test_single(self): 275 | fsid = [190836953] 276 | location = fs.location.get_summary(fsid, "property") 277 | assert len(location) == 1 278 | assert location[0].fsid == str(fsid[0]) 279 | assert location[0].adaptation is not None 280 | assert location[0].valid_id is True 281 | 282 | def test_multiple(self): 283 | fsid = [190836953, 193139123] 284 | location = fs.location.get_summary(fsid, "property") 285 | assert len(location) == 2 286 | location.sort(key=lambda x: x.fsid) 287 | assert location[0].fsid == str(fsid[0]) 288 | assert location[1].fsid == str(fsid[1]) 289 | assert location[0].adaptation is not None 290 | assert location[1].adaptation is not None 291 | assert location[0].valid_id is True 292 | assert location[1].valid_id is True 293 | 294 | def test_single_csv(self, tmpdir): 295 | fsid = [190836953] 296 | location = fs.location.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 297 | assert len(location) == 1 298 | assert location[0].fsid == str(fsid[0]) 299 | assert location[0].adaptation is not None 300 | assert location[0].valid_id is True 301 | 302 | def test_multiple_csv(self, tmpdir): 303 | fsid = [190836953, 193139123] 304 | location = fs.location.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 305 | assert len(location) == 2 306 | location.sort(key=lambda x: x.fsid) 307 | assert location[0].fsid == str(fsid[0]) 308 | assert location[1].fsid == str(fsid[1]) 309 | assert location[0].adaptation is not None 310 | assert location[1].adaptation is not None 311 | assert location[0].valid_id is True 312 | assert location[1].valid_id is True 313 | 314 | def test_mixed_invalid(self): 315 | fsid = [190836953, 000000000] 316 | location = fs.location.get_summary(fsid, "property") 317 | assert len(location) == 2 318 | location.sort(key=lambda x: x.fsid, reverse=True) 319 | assert location[0].fsid == str(fsid[0]) 320 | assert location[1].fsid == str(fsid[1]) 321 | assert location[0].adaptation is not None 322 | assert location[1].adaptation is None 323 | assert location[0].valid_id is True 324 | assert location[1].valid_id is False 325 | 326 | def test_mixed_invalid_csv(self, tmpdir): 327 | fsid = [190836953, 000000000] 328 | location = fs.location.get_summary(fsid, "property", csv=True, output_dir=tmpdir) 329 | assert len(location) == 2 330 | location.sort(key=lambda x: x.fsid, reverse=True) 331 | assert location[0].fsid == str(fsid[0]) 332 | assert location[1].fsid == str(fsid[1]) 333 | assert location[0].adaptation is not None 334 | assert location[1].adaptation is None 335 | assert location[0].valid_id is True 336 | assert location[1].valid_id is False 337 | 338 | def test_coordinate_invalid(self, tmpdir): 339 | location = fs.location.get_summary([(82.487671, -62.374322)], "property", csv=True, output_dir=tmpdir) 340 | assert len(location) == 1 341 | assert location[0].adaptation is None 342 | assert location[0].valid_id is False 343 | 344 | def test_single_coordinate(self, tmpdir): 345 | location = fs.location.get_summary([(40.7079652311, -74.0021455387)], "property", csv=True, output_dir=tmpdir) 346 | assert len(location) == 1 347 | assert location[0].adaptation is not None 348 | assert location[0].valid_id is True 349 | 350 | def test_address_invalid_404(self, tmpdir): 351 | location = fs.location.get_summary(["Shimik, Nunavut, Canada"], "property", csv=True, output_dir=tmpdir) 352 | assert len(location) == 1 353 | assert location[0].adaptation is None 354 | assert location[0].valid_id is False 355 | 356 | def test_address_invalid_500(self, tmpdir): 357 | location = fs.location.get_summary(["Toronto, Ontario, Canada"], "property", csv=True, output_dir=tmpdir) 358 | assert len(location) == 1 359 | assert location[0].adaptation is None 360 | assert location[0].valid_id is False 361 | 362 | def test_single_address(self, tmpdir): 363 | location = fs.location.get_summary(["247 Water St, New York, New York"], "property", 364 | csv=True, output_dir=tmpdir) 365 | assert len(location) == 1 366 | assert location[0].adaptation is not None 367 | assert location[0].valid_id is True 368 | 369 | def test_one_of_each(self, tmpdir): 370 | location = fs.location.get_summary([395112095], "property", csv=True, output_dir=tmpdir) 371 | assert len(location) == 1 372 | assert location[0].valid_id is True 373 | assert location[0].fsid == "395112095" 374 | assert location[0].floodFactor is not None 375 | assert location[0].riskDirection is not None 376 | assert location[0].historic is not None 377 | assert location[0].environmentalRisk is not None 378 | assert location[0].adaptation is not None 379 | location = fs.location.get_summary([631054], "neighborhood", csv=True, output_dir=tmpdir) 380 | assert len(location) == 1 381 | assert location[0].valid_id is True 382 | assert location[0].fsid == "631054" 383 | assert location[0].riskDirection is not None 384 | assert location[0].historic is not None 385 | assert location[0].environmentalRisk is not None 386 | assert location[0].adaptation is not None 387 | assert location[0].properties is not None 388 | assert location[0].properties.get("total") is not None 389 | assert location[0].properties.get("atRisk") is not None 390 | location = fs.location.get_summary([3958002], "city", csv=True, output_dir=tmpdir) 391 | assert len(location) == 1 392 | assert location[0].valid_id is True 393 | assert location[0].fsid == "3958002" 394 | assert location[0].riskDirection is not None 395 | assert location[0].historic is not None 396 | assert location[0].environmentalRisk is not None 397 | assert location[0].adaptation is not None 398 | assert location[0].properties is not None 399 | assert location[0].properties.get("total") is not None 400 | assert location[0].properties.get("atRisk") is not None 401 | location = fs.location.get_summary([43935], "zcta", csv=True, output_dir=tmpdir) 402 | assert len(location) == 1 403 | assert location[0].valid_id is True 404 | assert location[0].fsid == "43935" 405 | assert location[0].riskDirection is not None 406 | assert location[0].historic is not None 407 | assert location[0].environmentalRisk is not None 408 | assert location[0].adaptation is not None 409 | assert location[0].properties is not None 410 | assert location[0].properties.get("total") is not None 411 | assert location[0].properties.get("atRisk") is not None 412 | location = fs.location.get_summary([39153531702], "tract", csv=True, output_dir=tmpdir) 413 | assert len(location) == 1 414 | assert location[0].valid_id is True 415 | assert location[0].fsid == "39153531702" 416 | assert location[0].riskDirection is not None 417 | assert location[0].historic is not None 418 | assert location[0].environmentalRisk is not None 419 | assert location[0].adaptation is not None 420 | assert location[0].properties is not None 421 | assert location[0].properties.get("total") is not None 422 | assert location[0].properties.get("atRisk") is not None 423 | location = fs.location.get_summary([39027], "county", csv=True, output_dir=tmpdir) 424 | assert len(location) == 1 425 | assert location[0].valid_id is True 426 | assert location[0].fsid == "39027" 427 | assert location[0].riskDirection is not None 428 | assert location[0].historic is not None 429 | assert location[0].environmentalRisk is not None 430 | assert location[0].adaptation is not None 431 | assert location[0].properties is not None 432 | assert location[0].properties.get("total") is not None 433 | assert location[0].properties.get("atRisk") is not None 434 | location = fs.location.get_summary([3903], "cd", csv=True, output_dir=tmpdir) 435 | assert len(location) == 1 436 | assert location[0].valid_id is True 437 | assert location[0].fsid == "3903" 438 | assert location[0].riskDirection is not None 439 | assert location[0].historic is not None 440 | assert location[0].environmentalRisk is not None 441 | assert location[0].adaptation is not None 442 | assert location[0].properties is not None 443 | assert location[0].properties.get("total") is not None 444 | assert location[0].properties.get("atRisk") is not None 445 | location = fs.location.get_summary([39], "state", csv=True, output_dir=tmpdir) 446 | assert len(location) == 1 447 | assert location[0].valid_id is True 448 | assert location[0].fsid == "39" 449 | assert location[0].riskDirection is not None 450 | assert location[0].historic is not None 451 | assert location[0].environmentalRisk is not None 452 | assert location[0].adaptation is not None 453 | assert location[0].properties is not None 454 | assert location[0].properties.get("total") is not None 455 | assert location[0].properties.get("atRisk") is not None 456 | -------------------------------------------------------------------------------- /tests/api/test_tiles.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument 13 | 14 | api_key = os.environ['FSF_API_KEY'] 15 | fs = firststreet.FirstStreet(api_key) 16 | 17 | 18 | class TestProbabilityTiles: 19 | 20 | def test_empty(self): 21 | with pytest.raises(InvalidArgument): 22 | fs.tile.get_probability_depth(year=2050, return_period=5, search_items=[]) 23 | 24 | def test_wrong_coord_type(self): 25 | with pytest.raises(InvalidArgument): 26 | fs.tile.get_probability_depth(year=2050, return_period=5, search_items=(12, 942, 1715)) 27 | 28 | def test_wrong_coord_tuple_type(self): 29 | with pytest.raises(TypeError): 30 | fs.tile.get_probability_depth(year=2050, return_period=500, search_items=[500]) 31 | 32 | def test_invalid(self): 33 | coord = [(1, 1, 1)] 34 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=coord) 35 | assert len(tile) == 1 36 | assert tile[0].coordinate == coord[0] 37 | assert tile[0].image is None 38 | assert tile[0].valid_id is False 39 | 40 | def test_wrong_year_type(self): 41 | with pytest.raises(TypeError): 42 | fs.tile.get_probability_depth(year="year", return_period=5, search_items=[(12, 942, 1715)]) 43 | 44 | def test_wrong_return_period_type(self): 45 | with pytest.raises(TypeError): 46 | fs.tile.get_probability_depth(year=2050, return_period="rp", search_items=[(12, 942, 1715)]) 47 | 48 | def test_bad_year(self): 49 | with pytest.raises(InvalidArgument): 50 | fs.tile.get_probability_depth(year=1000, return_period=5, search_items=[(12, 942, 1715)]) 51 | 52 | def test_bad_return_period(self): 53 | with pytest.raises(InvalidArgument): 54 | fs.tile.get_probability_depth(year=1000, return_period=5, search_items=[(12, 942, 1715)]) 55 | 56 | def test_single(self): 57 | coord = [(12, 942, 1715)] 58 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=coord) 59 | assert len(tile) == 1 60 | assert tile[0].coordinate == coord[0] 61 | assert tile[0].image is not None 62 | assert tile[0].valid_id is True 63 | 64 | def test_multiple(self): 65 | coord = [(12, 942, 1715), (17, 30990, 54379)] 66 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=coord) 67 | assert len(tile) == 2 68 | tile.sort(key=lambda x: x.coordinate) 69 | assert tile[0].coordinate == coord[0] 70 | assert tile[1].coordinate == coord[1] 71 | assert tile[0].image is not None 72 | assert tile[0].valid_id is True 73 | assert tile[1].image is not None 74 | assert tile[1].valid_id is True 75 | 76 | def test_single_image(self): 77 | coord = [(12, 942, 1715)] 78 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=coord, image=True) 79 | assert len(tile) == 1 80 | assert tile[0].coordinate == coord[0] 81 | assert tile[0].image is not None 82 | assert tile[0].valid_id is True 83 | 84 | def test_mixed_invalid(self): 85 | coord = [(12, 942, 1715), (1, 1, 1)] 86 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=coord) 87 | assert len(tile) == 2 88 | tile.sort(key=lambda x: x.coordinate, reverse=True) 89 | assert tile[0].coordinate == coord[0] 90 | assert tile[1].coordinate == coord[1] 91 | assert tile[0].image is not None 92 | assert tile[0].valid_id is True 93 | assert tile[1].image is None 94 | assert tile[1].valid_id is False 95 | 96 | def test_one_of_each(self): 97 | tile = fs.tile.get_probability_depth(year=2050, return_period=5, search_items=[(12, 942, 1715)]) 98 | assert len(tile) == 1 99 | assert tile[0].valid_id is True 100 | assert tile[0].coordinate == (12, 942, 1715) 101 | assert tile[0].image is not None 102 | assert tile[0].return_period 103 | assert tile[0].year 104 | 105 | 106 | class TestHistoricTiles: 107 | 108 | def test_empty(self): 109 | with pytest.raises(InvalidArgument): 110 | fs.tile.get_historic_event(event_id=2, search_items=[]) 111 | 112 | def test_wrong_coord_type(self): 113 | with pytest.raises(InvalidArgument): 114 | fs.tile.get_historic_event(event_id=2, search_items=(12, 942, 1715)) 115 | 116 | def test_invalid(self): 117 | coord = [(12, 1, 1)] 118 | tile = fs.tile.get_historic_event(event_id=2, search_items=coord) 119 | assert len(tile) == 1 120 | assert tile[0].coordinate == coord[0] 121 | # No way to test if image is bad 122 | 123 | def test_wrong_event_id_type(self): 124 | with pytest.raises(TypeError): 125 | fs.tile.get_historic_event(event_id="event_id", search_items=[(12, 942, 1715)]) 126 | 127 | def test_bad_event(self): 128 | coord = [(12, 942, 1715)] 129 | tile = fs.tile.get_historic_event(event_id=99999, search_items=coord) 130 | assert len(tile) == 1 131 | assert tile[0].coordinate == coord[0] 132 | # No way to test if image is bad 133 | 134 | def test_single(self): 135 | coord = [(12, 942, 1715)] 136 | tile = fs.tile.get_historic_event(event_id=2, search_items=coord) 137 | assert len(tile) == 1 138 | assert tile[0].coordinate == coord[0] 139 | assert tile[0].image is not None 140 | assert tile[0].valid_id is True 141 | 142 | def test_multiple(self): 143 | coord = [(12, 942, 1715), (17, 30990, 54379)] 144 | tile = fs.tile.get_historic_event(event_id=2, search_items=coord) 145 | assert len(tile) == 2 146 | tile.sort(key=lambda x: x.coordinate) 147 | assert tile[0].coordinate == coord[0] 148 | assert tile[1].coordinate == coord[1] 149 | assert tile[0].image is not None 150 | assert tile[0].valid_id is True 151 | assert tile[1].image is not None 152 | assert tile[1].valid_id is True 153 | 154 | def test_single_image(self): 155 | coord = [(12, 942, 1715)] 156 | tile = fs.tile.get_historic_event(event_id=2, search_items=coord, image=True) 157 | assert len(tile) == 1 158 | assert tile[0].coordinate == coord[0] 159 | assert tile[0].image is not None 160 | assert tile[0].valid_id is True 161 | 162 | def test_mixed_invalid(self): 163 | coord = [(12, 942, 1715), (2, 1, 1)] 164 | tile = fs.tile.get_historic_event(event_id=2, search_items=coord) 165 | assert len(tile) == 2 166 | tile.sort(key=lambda x: x.coordinate, reverse=True) 167 | assert tile[0].coordinate == coord[0] 168 | assert tile[1].coordinate == coord[1] 169 | assert tile[0].image is not None 170 | assert tile[0].valid_id is True 171 | # No way to test if image is bad 172 | 173 | def test_one_of_each(self): 174 | tile = fs.tile.get_historic_event(event_id=2, search_items=[(12, 942, 1715)]) 175 | assert len(tile) == 1 176 | assert tile[0].valid_id is True 177 | assert tile[0].coordinate == (12, 942, 1715) 178 | assert tile[0].image is not None 179 | assert tile[0].event_id 180 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # External Imports 8 | import pytest 9 | 10 | # Internal Imports 11 | import firststreet 12 | from firststreet.errors import InvalidArgument, MissingAPIKeyError 13 | 14 | 15 | class TestApi: 16 | 17 | def test_invalid_key(self): 18 | with pytest.raises(MissingAPIKeyError): 19 | firststreet.FirstStreet("") 20 | 21 | def test_vaid_key(self): 22 | api_key = os.environ['FSF_API_KEY'] 23 | firststreet.FirstStreet(api_key) 24 | 25 | def test_valid_call(self): 26 | api_key = os.environ['FSF_API_KEY'] 27 | fs = firststreet.FirstStreet(api_key) 28 | adap = fs.adaptation.get_detail([29], csv=False) 29 | assert len(adap) == 1 30 | assert adap[0].name == 'Riverfront Park' 31 | 32 | def test_invalid_call(self): 33 | api_key = os.environ['FSF_API_KEY'] 34 | fs = firststreet.FirstStreet(api_key) 35 | with pytest.raises(InvalidArgument): 36 | fs.location.get_detail([392873515], "", csv=True) 37 | 38 | def test_file(self): 39 | with open(os.getcwd() + "/" + "sample.txt", "w+") as file: 40 | file.write("395133768\n") 41 | file.write("10212 BUCKEYE RD, Cleveland, Ohio\n") 42 | file.write("(41.48195701269418, -81.6138601319609)\n") 43 | 44 | api_key = os.environ['FSF_API_KEY'] 45 | fs = firststreet.FirstStreet(api_key) 46 | loc = fs.location.get_detail("sample.txt", "property", csv=True) 47 | assert len(loc) == 3 48 | assert loc[0].route == 'BUCKEYE RD' 49 | assert loc[1].route == 'BUCKEYE RD' 50 | assert loc[2].route == 'BUCKEYE RD' 51 | 52 | os.remove(os.getcwd() + "/" + "sample.txt") 53 | 54 | def test_invalid_file(self): 55 | with open(os.getcwd() + "/" + "sample.txt", "w+") as file: 56 | file.write("395133768, 10212 BUCKEYE RD, Cleveland, Ohio, (41.48195701269418, -81.6138601319609)") 57 | 58 | api_key = os.environ['FSF_API_KEY'] 59 | fs = firststreet.FirstStreet(api_key) 60 | loc = fs.location.get_detail("sample.txt", "property", csv=True) 61 | assert len(loc) == 1 62 | 63 | os.remove(os.getcwd() + "/" + "sample.txt") 64 | 65 | def test_multi_type(self): 66 | api_key = os.environ['FSF_API_KEY'] 67 | fs = firststreet.FirstStreet(api_key) 68 | loc = fs.location.get_detail([395133768, 69 | "10212 BUCKEYE RD, Cleveland, Ohio", 70 | (41.48195701269418, -81.6138601319609)], 71 | "property", csv=True) 72 | assert len(loc) == 3 73 | assert loc[0].route == 'BUCKEYE RD' 74 | assert loc[1].route == 'BUCKEYE RD' 75 | assert loc[2].route == 'BUCKEYE RD' 76 | -------------------------------------------------------------------------------- /tests/test_command_line.bat: -------------------------------------------------------------------------------- 1 | python -m firststreet -p location.get_detail 2 | python -m firststreet -p adaptation.get_detail -i 39 3 | python -m firststreet -p adaptation.get_detail -i p 4 | python -m firststreet -p adaptation.get_detail -i p,39 5 | python -m firststreet -p adaptation.get_detail -i 39 -v v2 6 | python -m firststreet -p location.get_detail -f .\tests\data_text\sample.txt 7 | python -m firststreet -p location.get_detail -f .\tests\data_text\sample.txt -l property 8 | python -m firststreet -p adaptation.get_detail -limit 1 9 | python -m firststreet -p location.get_detail -f .\tests\sample.txt -l property -limit 1 10 | python -m firststreet -p adaptation.get_detail -i 39 -log True 11 | python -m firststreet -p adaptation.get_detail -i 39 -log False 12 | python -m firststreet -p adaptation.get_details_by_location -i 3807200 -l city -log True 13 | python -m firststreet -p historic.get_events_by_location -i 3807200 -l city -log False 14 | 15 | python -m firststreet -p tile.get_probability_depth -i (12,942,1715) -year 2050 -return_period 500 16 | python -m firststreet -p tile.get_historic_event -i (12,942,1715) -event_id 14 17 | -------------------------------------------------------------------------------- /tests/test_full.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # Standard Imports 5 | import os 6 | 7 | # Internal Imports 8 | import firststreet 9 | 10 | 11 | def test_full(tmpdir): 12 | api_key = os.environ['FSF_API_KEY'] 13 | fs = firststreet.FirstStreet(api_key) 14 | # fs.adaptation.get_detail([29], csv=True, output_dir=tmpdir) 15 | # fs.adaptation.get_summary([395133768], "property", csv=True, output_dir=tmpdir) 16 | # fs.adaptation.get_summary([7924], "neighborhood", csv=True, output_dir=tmpdir) 17 | # fs.adaptation.get_summary([1935265], "city", csv=True, output_dir=tmpdir) 18 | # fs.adaptation.get_summary([50158], "zcta", csv=True, output_dir=tmpdir) 19 | # fs.adaptation.get_summary([39061007100], "tract", csv=True, output_dir=tmpdir) 20 | # fs.adaptation.get_summary([19047], "county", csv=True, output_dir=tmpdir) 21 | # fs.adaptation.get_summary([3915], "cd", csv=True, output_dir=tmpdir) 22 | # fs.adaptation.get_summary([39], "state", csv=True, output_dir=tmpdir) 23 | # fs.adaptation.get_detail_by_location([395133768], "property", csv=True, output_dir=tmpdir) 24 | # fs.adaptation.get_detail_by_location([7924], "neighborhood", csv=True, output_dir=tmpdir) 25 | # fs.adaptation.get_detail_by_location([1935265], "city", csv=True, output_dir=tmpdir) 26 | # fs.adaptation.get_detail_by_location([50158], "zcta", csv=True, output_dir=tmpdir) 27 | # fs.adaptation.get_detail_by_location([39061007100], "tract", csv=True, output_dir=tmpdir) 28 | # fs.adaptation.get_detail_by_location([19047], "county", csv=True, output_dir=tmpdir) 29 | # fs.adaptation.get_detail_by_location([3915], "cd", csv=True, output_dir=tmpdir) 30 | # fs.adaptation.get_detail_by_location([39], "state", csv=True, output_dir=tmpdir) 31 | # fs.probability.get_chance([390000257], csv=True, output_dir=tmpdir) 32 | # fs.probability.get_count([7935], 'neighborhood', csv=True, output_dir=tmpdir) 33 | # fs.probability.get_count([1959835], 'city', csv=True, output_dir=tmpdir) 34 | # fs.probability.get_count([44203], 'zcta', csv=True, output_dir=tmpdir) 35 | # fs.probability.get_count([39035103400], 'tract', csv=True, output_dir=tmpdir) 36 | # fs.probability.get_count([39047], 'county', csv=True, output_dir=tmpdir) 37 | # fs.probability.get_count([3904], 'cd', csv=True, output_dir=tmpdir) 38 | # fs.probability.get_count([39], 'state', csv=True, output_dir=tmpdir) 39 | # fs.probability.get_count_summary([394406220], csv=True, output_dir=tmpdir) 40 | # fs.probability.get_cumulative([390000439], csv=True, output_dir=tmpdir) 41 | # fs.probability.get_depth([390000227], csv=True, output_dir=tmpdir) 42 | # fs.environmental.get_precipitation([39057], csv=True, output_dir=tmpdir) 43 | # fs.historic.get_event([2], csv=True, output_dir=tmpdir) 44 | # fs.historic.get_summary([511447411], "property", csv=True, output_dir=tmpdir) 45 | # fs.historic.get_summary([540225], "neighborhood", csv=True, output_dir=tmpdir) 46 | # fs.historic.get_summary([1982200], "city", csv=True, output_dir=tmpdir) 47 | # fs.historic.get_summary([50156], "zcta", csv=True, output_dir=tmpdir) 48 | # fs.historic.get_summary([19153004900], "tract", csv=True, output_dir=tmpdir) 49 | # fs.historic.get_summary([19163], "county", csv=True, output_dir=tmpdir) 50 | # fs.historic.get_summary([1901], "cd", csv=True, output_dir=tmpdir) 51 | # fs.historic.get_summary([39], "state", csv=True, output_dir=tmpdir) 52 | # fs.historic.get_events_by_location([511447411], "property", csv=True, output_dir=tmpdir) 53 | # fs.historic.get_events_by_location([540225], "neighborhood", csv=True, output_dir=tmpdir) 54 | # fs.historic.get_events_by_location([1982200], "city", csv=True, output_dir=tmpdir) 55 | # fs.historic.get_events_by_location([50156], "zcta", csv=True, output_dir=tmpdir) 56 | # fs.historic.get_events_by_location([19153004900], "tract", csv=True, output_dir=tmpdir) 57 | # fs.historic.get_events_by_location([19163], "county", csv=True, output_dir=tmpdir) 58 | # fs.historic.get_events_by_location([1901], "cd", csv=True, output_dir=tmpdir) 59 | # fs.historic.get_events_by_location([39], "state", csv=True, output_dir=tmpdir) 60 | # fs.location.get_detail([511447411], "property", csv=True, output_dir=tmpdir) 61 | # fs.location.get_detail([1206631], "neighborhood", csv=True, output_dir=tmpdir) 62 | # fs.location.get_detail([3915406], "city", csv=True, output_dir=tmpdir) 63 | # fs.location.get_detail([44654], "zcta", csv=True, output_dir=tmpdir) 64 | # fs.location.get_detail([39151712602], "tract", csv=True, output_dir=tmpdir) 65 | # fs.location.get_detail([39077], "county", csv=True, output_dir=tmpdir) 66 | # fs.location.get_detail([3904], "cd", csv=True, output_dir=tmpdir) 67 | # fs.location.get_detail([39], "state", csv=True, output_dir=tmpdir) 68 | # fs.location.get_summary([395112095], "property", csv=True, output_dir=tmpdir) 69 | # fs.location.get_summary([631054], "neighborhood", csv=True, output_dir=tmpdir) 70 | # fs.location.get_summary([3958002], "city", csv=True, output_dir=tmpdir) 71 | # fs.location.get_summary([43935], "zcta", csv=True, output_dir=tmpdir) 72 | # fs.location.get_summary([39153531702], "tract", csv=True, output_dir=tmpdir) 73 | # fs.location.get_summary([39027], "county", csv=True, output_dir=tmpdir) 74 | # fs.location.get_summary([3903], "cd", csv=True, output_dir=tmpdir) 75 | # fs.location.get_summary([39], "state", csv=True, output_dir=tmpdir) 76 | # fs.fema.get_nfip([44074], "zcta", csv=True, output_dir=tmpdir) 77 | # fs.fema.get_nfip([39013012300], "tract", csv=True, output_dir=tmpdir) 78 | # fs.fema.get_nfip([39093], "county", csv=True, output_dir=tmpdir) 79 | # fs.fema.get_nfip([39], "state", csv=True, output_dir=tmpdir) 80 | fs.aal.get_summary([511447411], location_type="property", csv=True) 81 | fs.avm.get_avm([395112095], csv=True) 82 | fs.avm.get_provider([2], csv=True) 83 | 84 | 85 | test_full(None) 86 | -------------------------------------------------------------------------------- /tests/test_http.py: -------------------------------------------------------------------------------- 1 | # Author: Kelvin Lai 2 | # Copyright: This module is owned by First Street Foundation 3 | 4 | # External Imports 5 | import asyncio 6 | 7 | 8 | class TestNetworkErrors: 9 | 10 | def test_error_400(self, setup_connection): 11 | 12 | loop = asyncio.get_event_loop() 13 | endpoint = ("https://httpstat.us/400", "test_item", "test_product", "test_subtype") 14 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 15 | assert len(response) == 1 16 | assert response[0]['search_item'] == "test_item" 17 | 18 | def test_error_403(self, setup_connection): 19 | 20 | loop = asyncio.get_event_loop() 21 | endpoint = ("https://httpstat.us/403", "test_item", "test_product", "test_subtype") 22 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 23 | assert len(response) == 1 24 | assert response[0]['search_item'] == "test_item" 25 | 26 | def test_error_404(self, setup_connection): 27 | 28 | loop = asyncio.get_event_loop() 29 | endpoint = ("https://httpstat.us/404", "test_item", "test_product", "test_subtype") 30 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 31 | assert len(response) == 1 32 | assert response[0]['search_item'] == "test_item" 33 | 34 | def test_error_500(self, setup_connection): 35 | 36 | loop = asyncio.get_event_loop() 37 | endpoint = ("https://httpstat.us/500", "test_item", "test_product", "test_subtype") 38 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 39 | assert len(response) == 1 40 | assert response[0]['search_item'] == "test_item" 41 | 42 | def test_error_501(self, setup_connection): 43 | 44 | loop = asyncio.get_event_loop() 45 | endpoint = ("https://httpstat.us/501", "test_item", "test_product", "test_subtype") 46 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 47 | assert len(response) == 1 48 | assert response[0]['search_item'] == "test_item" 49 | 50 | def test_error_502(self, setup_connection): 51 | 52 | loop = asyncio.get_event_loop() 53 | endpoint = ("https://httpstat.us/502", "test_item", "test_product", "test_subtype") 54 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 55 | assert len(response) == 1 56 | assert response[0]['search_item'] == "test_item" 57 | 58 | def test_error_503(self, setup_connection): 59 | 60 | loop = asyncio.get_event_loop() 61 | endpoint = ("https://httpstat.us/503", "test_item", "test_product", "test_subtype") 62 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 63 | assert len(response) == 1 64 | assert response[0]['search_item'] == "test_item" 65 | 66 | def test_error_522(self, setup_connection): 67 | 68 | loop = asyncio.get_event_loop() 69 | endpoint = ("https://httpstat.us/522", "test_item", "test_product", "test_subtype") 70 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 71 | assert len(response) == 1 72 | assert response[0]['search_item'] == "test_item" 73 | 74 | def test_error_524(self, setup_connection): 75 | 76 | loop = asyncio.get_event_loop() 77 | endpoint = ("https://httpstat.us/524", "test_item", "test_product", "test_subtype") 78 | response = loop.run_until_complete(setup_connection.endpoint_execute([endpoint])) 79 | assert len(response) == 1 80 | assert response[0]['search_item'] == "test_item" 81 | --------------------------------------------------------------------------------