├── .coveragerc ├── .github ├── dependabot.yml └── workflows │ ├── rstcheck.yml │ ├── scorecard.yml │ ├── test_gdal_latest.yml │ └── tests.yml ├── .gitignore ├── .mailmap ├── .readthedocs.yaml ├── CHANGES.txt ├── CITATION.cff ├── CODE_OF_CONDUCT.md ├── CREDITS.txt ├── Dockerfile ├── FAQ.rst ├── ISSUE_TEMPLATE.md ├── LICENSE.txt ├── MANIFEST.in ├── Makefile ├── README.rst ├── SECURITY.md ├── appveyor.yml ├── appveyor ├── install.ps1 └── run_with_env.cmd ├── ci ├── gdal-compile.sh └── rstcheck │ ├── requirements.in │ └── requirements.txt ├── docs ├── Makefile ├── README.rst ├── cli.rst ├── conf.py ├── encoding.txt ├── fiona.fio.rst ├── fiona.rst ├── img │ ├── concave.png │ ├── convex.png │ ├── simplified-buffer.png │ └── zones.png ├── index.rst ├── install.rst ├── manual.rst └── modules.rst ├── environment.yml ├── examples ├── open.py ├── orient-ccw.py ├── with-descartes-functional.py ├── with-descartes.py ├── with-pyproj.py └── with-shapely.py ├── fiona ├── __init__.py ├── _cpl.pxd ├── _crs.pyx ├── _csl.pxd ├── _env.pxd ├── _env.pyx ├── _err.pxd ├── _err.pyx ├── _geometry.pxd ├── _geometry.pyx ├── _path.py ├── _show_versions.py ├── _transform.pyx ├── _vendor │ ├── munch │ │ ├── LICENSE.txt │ │ └── __init__.py │ └── snuggs.py ├── _vsiopener.pxd ├── _vsiopener.pyx ├── abc.py ├── collection.py ├── compat.py ├── crs.pxd ├── crs.pyx ├── drvsupport.py ├── enums.py ├── env.py ├── errors.py ├── features.py ├── fio │ ├── __init__.py │ ├── bounds.py │ ├── calc.py │ ├── cat.py │ ├── collect.py │ ├── distrib.py │ ├── dump.py │ ├── env.py │ ├── features.py │ ├── helpers.py │ ├── info.py │ ├── insp.py │ ├── load.py │ ├── ls.py │ ├── main.py │ ├── options.py │ └── rm.py ├── gdal.pxi ├── inspector.py ├── io.py ├── logutils.py ├── meta.py ├── model.py ├── ogrext.pyx ├── ogrext1.pxd ├── ogrext2.pxd ├── ogrext3.pxd ├── path.py ├── rfc3339.py ├── schema.pyx ├── session.py ├── transform.py └── vfs.py ├── pyproject.toml ├── pytest.ini ├── requirements-ci.txt ├── requirements-dev.txt ├── requirements.txt ├── scripts ├── check_deprecated.py └── check_urls.py ├── setup.py └── tests ├── __init__.py ├── conftest.py ├── data ├── !test.geojson ├── LICENSE.txt ├── collection-pp.txt ├── collection.txt ├── coutwildrnp.cpg ├── coutwildrnp.dbf ├── coutwildrnp.prj ├── coutwildrnp.shp ├── coutwildrnp.shx ├── coutwildrnp.zip ├── curves_line.csv ├── example.topojson ├── gre.cpg ├── gre.dbf ├── gre.prj ├── gre.shp ├── gre.shx ├── grenada.geojson ├── issue627.geojson ├── multicurve.gml ├── multicurve.xsd ├── rmnp.geojson ├── sequence-pp.txt ├── sequence.txt ├── test_gpx.gpx ├── test_tin.csv ├── test_tin.dbf ├── test_tin.shp ├── test_tin.shx ├── test_tz.geojson ├── testopenfilegdb.gdb.zip ├── trio.geojson └── trio.seq ├── test__env.py ├── test__path.py ├── test_bigint.py ├── test_binary_field.py ├── test_bounds.py ├── test_bytescollection.py ├── test_collection.py ├── test_collection_crs.py ├── test_collection_legacy.py ├── test_compound_crs.py ├── test_crs.py ├── test_cursor_interruptions.py ├── test_curve_geometries.py ├── test_data_paths.py ├── test_datetime.py ├── test_driver_options.py ├── test_drivers.py ├── test_drvsupport.py ├── test_encoding.py ├── test_env.py ├── test_feature.py ├── test_features.py ├── test_fio_bounds.py ├── test_fio_calc.py ├── test_fio_cat.py ├── test_fio_collect.py ├── test_fio_distrib.py ├── test_fio_dump.py ├── test_fio_features.py ├── test_fio_filter.py ├── test_fio_info.py ├── test_fio_load.py ├── test_fio_ls.py ├── test_fio_rm.py ├── test_geojson.py ├── test_geometry.py ├── test_geopackage.py ├── test_http_session.py ├── test_integration.py ├── test_layer.py ├── test_listing.py ├── test_logutils.py ├── test_memoryfile.py ├── test_meta.py ├── test_model.py ├── test_multiconxn.py ├── test_non_counting_layer.py ├── test_open.py ├── test_profile.py ├── test_props.py ├── test_pyopener.py ├── test_read_drivers.py ├── test_remove.py ├── test_revolvingdoor.py ├── test_rfc3339.py ├── test_rfc64_tin.py ├── test_schema.py ├── test_schema_geom.py ├── test_session.py ├── test_slice.py ├── test_snuggs.py ├── test_subtypes.py ├── test_topojson.py ├── test_transactions.py ├── test_transform.py ├── test_unicode.py ├── test_version.py ├── test_vfs.py └── test_write.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | plugins = Cython.Coverage 3 | omit = *pxd 4 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | # Check for updates to GitHub Actions every week 8 | interval: "weekly" 9 | groups: 10 | actions: 11 | patterns: 12 | - "*" 13 | -------------------------------------------------------------------------------- /.github/workflows/rstcheck.yml: -------------------------------------------------------------------------------- 1 | name: rstcheck 2 | 3 | # Run this workflow for commits to doc files 4 | on: 5 | push: 6 | paths: 7 | - ".github/workflows/rstcheck.yml" 8 | - "README.rst" 9 | - "docs/**" 10 | - "ci/rstcheck/*" 11 | pull_request: 12 | paths: 13 | - ".github/workflows/rstcheck.yml" 14 | - "README.rst" 15 | - "docs/**" 16 | - "ci/rstcheck/*" 17 | 18 | permissions: 19 | contents: read 20 | 21 | jobs: 22 | rstcheck: 23 | name: rstcheck 24 | runs-on: ubuntu-latest 25 | 26 | steps: 27 | - name: Checkout code 28 | uses: actions/checkout@v4.1.3 29 | 30 | - name: Set up Python 31 | uses: actions/setup-python@v5 32 | with: 33 | python-version: 3.11 34 | 35 | - name: Install Python dependencies 36 | run: | 37 | python -m pip install -r ci/rstcheck/requirements.txt 38 | 39 | - name: Run rstcheck 40 | run: | 41 | rstcheck -r --ignore-directives automodule --ignore-substitutions version,release,today . 42 | -------------------------------------------------------------------------------- /.github/workflows/scorecard.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: '41 21 * * 0' 14 | push: 15 | branches: [ "main" ] 16 | 17 | # Declare default permissions as read only. 18 | permissions: read-all 19 | 20 | jobs: 21 | analysis: 22 | name: Scorecard analysis 23 | runs-on: ubuntu-latest 24 | permissions: 25 | # Needed to upload the results to code-scanning dashboard. 26 | security-events: write 27 | # Needed to publish results and get a badge (see publish_results below). 28 | id-token: write 29 | # Uncomment the permissions below if installing in a private repository. 30 | # contents: read 31 | # actions: read 32 | 33 | steps: 34 | - name: "Checkout code" 35 | uses: actions/checkout@1d96c772d19495a3b5c517cd2bc0cb401ea0529f # v4.1.3 36 | with: 37 | persist-credentials: false 38 | 39 | - name: "Run analysis" 40 | uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 41 | with: 42 | results_file: results.sarif 43 | results_format: sarif 44 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 45 | # - you want to enable the Branch-Protection check on a *public* repository, or 46 | # - you are installing Scorecard on a *private* repository 47 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 48 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 49 | 50 | # Public repositories: 51 | # - Publish results to OpenSSF REST API for easy access by consumers 52 | # - Allows the repository to include the Scorecard badge. 53 | # - See https://github.com/ossf/scorecard-action#publishing-results. 54 | # For private repositories: 55 | # - `publish_results` will always be set to `false`, regardless 56 | # of the value entered here. 57 | publish_results: true 58 | 59 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 60 | # format to the repository Actions tab. 61 | - name: "Upload artifact" 62 | uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 63 | with: 64 | name: SARIF file 65 | path: results.sarif 66 | retention-days: 5 67 | 68 | # Upload the results to GitHub's code scanning dashboard. 69 | - name: "Upload to code-scanning" 70 | uses: github/codeql-action/upload-sarif@662472033e021d55d94146f66f6058822b0b39fd # v3.27.0 71 | with: 72 | sarif_file: results.sarif 73 | -------------------------------------------------------------------------------- /.github/workflows/test_gdal_latest.yml: -------------------------------------------------------------------------------- 1 | name: Test GDAL Latest 2 | 3 | on: 4 | push: 5 | branches: [ main, 'maint-*' ] 6 | schedule: 7 | - cron: '0 0 * * 0' 8 | pull_request: # also build on PRs touching this file 9 | paths: 10 | - ".github/workflows/test_gdal_latest.yml" 11 | - "ci/gdal-compile.sh" 12 | 13 | concurrency: 14 | group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} 15 | cancel-in-progress: true 16 | 17 | permissions: 18 | contents: read 19 | 20 | jobs: 21 | test_gdal_latest: 22 | name: GDAL Latest 23 | runs-on: ubuntu-latest 24 | container: osgeo/proj:9.2.0 25 | env: 26 | GDAL_DIR: ${{ github.workspace }}/gdal_install 27 | GDAL_DATA: ${{ github.workspace }}/gdal_install/share/gdal 28 | LD_LIBRARY_PATH: "${{ github.workspace }}/gdal_install/lib/:${LD_LIBRARY_PATH}" 29 | steps: 30 | - uses: actions/checkout@v4 31 | - name: Update 32 | run: | 33 | apt-get update 34 | apt-get -y install software-properties-common 35 | add-apt-repository -y ppa:deadsnakes/ppa 36 | apt-get update 37 | 38 | - name: Set up Python 39 | run: | 40 | apt-get install -y --no-install-recommends \ 41 | python3.10 \ 42 | python3.10-dev \ 43 | python3.10-venv \ 44 | python3-pip \ 45 | g++ 46 | 47 | - name: Install GDAL 48 | shell: bash 49 | run: | 50 | apt-get update 51 | apt-get install -qq \ 52 | libcurl4-gnutls-dev \ 53 | libgeos-dev \ 54 | libjpeg-dev \ 55 | libnetcdf-dev \ 56 | libhdf4-alt-dev \ 57 | libhdf5-serial-dev \ 58 | libssl-dev \ 59 | libsqlite3-dev \ 60 | libexpat-dev \ 61 | libxerces-c-dev \ 62 | libpng-dev \ 63 | libopenjp2-7-dev \ 64 | libzstd-dev \ 65 | libwebp-dev \ 66 | cmake \ 67 | curl \ 68 | git 69 | bash ci/gdal-compile.sh git 70 | 71 | - name: Install dependencies 72 | run: | 73 | export PATH="${GDAL_DIR}/bin/:${PATH}" 74 | python3.10 -m venv testenv 75 | . testenv/bin/activate 76 | python -m pip install --upgrade pip 77 | python -m pip wheel -r requirements-dev.txt 78 | python -m pip install -r requirements-dev.txt 79 | python setup.py clean 80 | python -m pip install --no-deps --force-reinstall -e .[test] 81 | 82 | - name: Test 83 | shell: bash 84 | run: | 85 | export PATH="${GDAL_DIR}/bin/:${PATH}" 86 | . testenv/bin/activate 87 | python -m pytest -v -m "not wheel or gdal" -rxXs --cov fiona --cov-report term-missing 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | .libs 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | 47 | # Translations 48 | *.mo 49 | *.pot 50 | 51 | # Django stuff: 52 | *.log 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | # IDE's etc. 61 | .idea/ 62 | venv/ 63 | venv2/ 64 | 65 | # fiona 66 | VERSION.txt 67 | fiona/*.c 68 | fiona/*.cpp 69 | fiona/ograpi.pxd 70 | tests/data/coutwildrnp.json 71 | tests/data/coutwildrnp.tar 72 | tests/data/coutwildrnp.gpkg 73 | .DS_Store 74 | .ipynb_checkpoints 75 | .pytest_cache 76 | MANIFEST 77 | wheels/ 78 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Alan D. Snow 2 | Hannes Gräuler 3 | Hannes Gräuler 4 | Hannes Gräuler 5 | Kevin Wurster 6 | Kevin Wurster 7 | Kevin Wurster 8 | Matthew Perry 9 | Micah Cochran 10 | Michael Weisman 11 | Patrick Young 12 | Patrick Young 13 | René Buffat 14 | René Buffat 15 | Sean Gillies 16 | Sean Gillies 17 | Sean Gillies 18 | Sean Gillies 19 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "mambaforge-22.9" 7 | 8 | conda: 9 | environment: environment.yml 10 | 11 | python: 12 | install: 13 | - method: setuptools 14 | path: . 15 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "Please cite this software using these metadata." 3 | type: software 4 | title: Fiona 5 | version: "1.10.0" 6 | date-released: "2024-09-03" 7 | abstract: "Fiona streams simple feature data to and from GIS formats like GeoPackage and Shapefile." 8 | keywords: 9 | - cartography 10 | - GIS 11 | - OGR 12 | repository-artifact: https://pypi.org/project/Fiona 13 | repository-code: https://github.com/Toblerity/Fiona 14 | license: "BSD-3-Clause" 15 | authors: 16 | - given-names: Sean 17 | family-names: Gillies 18 | alias: sgillies 19 | orcid: https://orcid.org/0000-0002-8401-9184 20 | - given-names: René 21 | family-names: Buffat 22 | alias: rbuffat 23 | orcid: https://orcid.org/0000-0002-9836-3314 24 | - given-names: Joshua 25 | family-names: Arnott 26 | alias: snorfalorpagus 27 | - given-names: Mike W. 28 | family-names: Taves 29 | alias: mwtoews 30 | orcid: https://orcid.org/0000-0003-3657-7963 31 | - given-names: Kevin 32 | family-names: Wurster 33 | alias: geowurster 34 | orcid: https://orcid.org/0000-0001-9044-0832 35 | - given-names: Alan D. 36 | family-names: Snow 37 | alias: snowman2 38 | orcid: https://orcid.org/0000-0002-7333-3100 39 | - given-names: Micah 40 | family-names: Cochran 41 | alias: micahcochran 42 | - given-names: Elliott 43 | family-names: Sales de Andrade 44 | alias: QuLogic 45 | orcid: https://orcid.org/0000-0001-7310-8942 46 | - given-names: Matthew 47 | family-names: Perry 48 | alias: perrygeo 49 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Code of Conduct 3 | 4 | As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. 5 | 6 | We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. 7 | 8 | Examples of unacceptable behavior by participants include: 9 | 10 | * The use of sexualized language or imagery 11 | * Personal attacks 12 | * Trolling or insulting/derogatory comments 13 | * Public or private harassment 14 | * Publishing other's private information, such as physical or electronic addresses, without explicit permission 15 | * Other unethical or unprofessional conduct 16 | 17 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. 18 | 19 | This code of conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. 20 | 21 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers. 22 | 23 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.2.0, available at https://www.contributor-covenant.org/version/1/2/0/code-of-conduct.html 24 | 25 | -------------------------------------------------------------------------------- /CREDITS.txt: -------------------------------------------------------------------------------- 1 | Credits 2 | ======= 3 | 4 | Fiona is written by: 5 | 6 | - Adam J. Stewart 7 | - Alan D. Snow 8 | - Alexandre Detiste 9 | - Ariel Nunez 10 | - Ariki 11 | - Bas Couwenberg 12 | - Brandon Liu 13 | - Brendan Ward 14 | - Chad Hawkins 15 | - Chris Mutel 16 | - Christoph Gohlke 17 | - Dan "Ducky" Little 18 | - daryl herzmann 19 | - Denis 20 | - Denis Rykov 21 | - dimlev 22 | - Efrén 23 | - Egor Fedorov 24 | - Elliott Sales de Andrade 25 | - Even Rouault 26 | - Ewout ter Hoeven 27 | - Filipe Fernandes 28 | - fredj 29 | - Gavin S 30 | - Géraud 31 | - Hannes Gräuler 32 | - Hao Lyu <20434183+IncubatorShokuhou@users.noreply.github.com> 33 | - Herz 34 | - Ian Rose 35 | - Jacob Wasserman 36 | - James McBride 37 | - James Wilshaw 38 | - Jelle van der Waa 39 | - Jesse Crocker 40 | - joehuanguf <51337028+joehuanguf@users.noreply.github.com> 41 | - Johan Van de Wauw 42 | - Joris Van den Bossche 43 | - Joshua Arnott 44 | - Juan Luis Cano Rodríguez 45 | - Keith Jenkins 46 | - Kelsey Jordahl 47 | - Kevin Wurster 48 | - lgolston <30876419+lgolston@users.noreply.github.com> 49 | - Loïc Dutrieux 50 | - Ludovic Delauné 51 | - Martijn Visser 52 | - Matthew Perry 53 | - Micah Cochran 54 | - Michael Weisman 55 | - Michele Citterio 56 | - Mike Taves 57 | - Miro Hrončok 58 | - Oliver Tonnhofer 59 | - Patrick Young 60 | - Phillip Cloud <417981+cpcloud@users.noreply.github.com> 61 | - pmav99 62 | - qinfeng 63 | - René Buffat 64 | - Reuben Fletcher-Costin 65 | - Ryan Grout 66 | - Ryan Munro 67 | - Sandro Mani 68 | - Sean Gillies 69 | - Sid Kapur 70 | - Simon Norris 71 | - Stefan Brand 72 | - Stefano Costa 73 | - Stephane Poss 74 | - Tim Tröndle 75 | - wilsaj 76 | - Yann-Sebastien Tremblay-Johnston 77 | 78 | The GeoPandas project (Joris Van den Bossche et al.) has been a major driver 79 | for new features in 1.8.0. 80 | 81 | Fiona would not be possible without the great work of Frank Warmerdam and other 82 | GDAL/OGR developers. 83 | 84 | Some portions of this work were supported by a grant (for Pleiades_) from the 85 | U.S. National Endowment for the Humanities (https://www.neh.gov). 86 | 87 | .. _Pleiades: https://pleiades.stoa.org 88 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG GDAL=ubuntu-small-3.6.4 2 | FROM ghcr.io/osgeo/gdal:${GDAL} AS gdal 3 | ARG PYTHON_VERSION=3.10 4 | ENV LANG="C.UTF-8" LC_ALL="C.UTF-8" 5 | RUN apt-get update && apt-get install -y software-properties-common 6 | RUN add-apt-repository -y ppa:deadsnakes/ppa 7 | RUN apt-get update && \ 8 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 9 | g++ \ 10 | gdb \ 11 | make \ 12 | python3-pip \ 13 | python${PYTHON_VERSION} \ 14 | python${PYTHON_VERSION}-dev \ 15 | python${PYTHON_VERSION}-venv \ 16 | && rm -rf /var/lib/apt/lists/* 17 | 18 | WORKDIR /app 19 | COPY requirements*.txt ./ 20 | RUN python${PYTHON_VERSION} -m venv /venv && \ 21 | /venv/bin/python -m pip install -U pip && \ 22 | /venv/bin/python -m pip install build && \ 23 | /venv/bin/python -m pip install -r requirements-dev.txt && \ 24 | /venv/bin/python -m pip list 25 | 26 | FROM gdal 27 | COPY . . 28 | RUN /venv/bin/python -m build -o wheels 29 | RUN /venv/bin/python -m pip install --no-index -f wheels fiona[test] 30 | ENTRYPOINT ["/venv/bin/fio"] 31 | CMD ["--help"] 32 | -------------------------------------------------------------------------------- /FAQ.rst: -------------------------------------------------------------------------------- 1 | Frequently asked questions and answers 2 | ====================================== 3 | 4 | Can you add X format support in the Fiona wheels? 5 | ------------------------------------------------- 6 | 7 | The short answer is no, unless the question is about a completely builtin format driver with no extra library dependencies. 8 | 9 | The wheels on PyPI are already painfully big at 17-24 MB. Adding Xerces (for GML) and libkml, for example, increases the size 10 | of wheels and encumbers everyone whether they use these formats or not. That's one reason why the answer is no. The other 11 | reason is the expense of maintaining and updating Fiona's wheel building infrastructure. Unlike conda-forge, which is a 12 | fiscally sponsored project of NumFOCUS, the Fiona project has no budget for building wheels. We're at the limit of what we 13 | can do for free on volunteer time. 14 | 15 | Can you publish Fiona wheels for new platform X? 16 | ------------------------------------------------ 17 | 18 | The short answer is not until there is free native CI for that platform. Even then, the project may be slow to add a new platform 19 | to the existing matrix. As explained above, the project has no funding for building wheels. 20 | 21 | What does "ValueError: Invalid field type " mean? 22 | ------------------------------------------------------------------------ 23 | 24 | Fiona maps the built-in Python types to `field types of the OGR API `__ (``float`` to ``OFTReal``, etc.). Users may need to convert instances of other classes (like ``cx_Oracle.LOB``) to strings or bytes when writing data to new GIS datasets using fiona. 25 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 35 | 36 | ## Expected behavior and actual behavior. 37 | 38 | For example: I expected to read 10 features from a file and an exception occurred 39 | on the 3rd. 40 | 41 | ## Steps to reproduce the problem. 42 | 43 | For example: a script with required data. 44 | 45 | ## Operating system 46 | 47 | For example: Mac OS X 10.12.3. 48 | 49 | ## Fiona and GDAL version and provenance 50 | 51 | For example: the 1.7.10.post1 manylinux1 wheel installed from PyPI using pip version 9.0.1. 52 | 53 | For example: GDAL 2.1.0 installed via Homebrew 54 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | 2 | Copyright (c) 2007, Sean C. Gillies 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in the 12 | documentation and/or other materials provided with the distribution. 13 | * Neither the name of Sean C. Gillies nor the names of 14 | its contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 20 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE 21 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 24 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 25 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 26 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 27 | POSSIBILITY OF SUCH DAMAGE. 28 | 29 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-exclude .DS_Store 2 | global-exclude *.pyc 3 | recursive-exclude docs/data * 4 | recursive-exclude docs/_build * 5 | recursive-exclude _build * 6 | recursive-exclude venv * 7 | exclude *.txt *.py 8 | recursive-include docs *.rst *.txt 9 | recursive-include tests *.py 10 | recursive-include tests/data * 11 | exclude tests/data/coutwildrnp.gpkg 12 | exclude tests/data/coutwildrnp.json 13 | exclude tests/data/coutwildrnp.tar 14 | recursive-include fiona *.pyx *.pxd *.pxi 15 | recursive-exclude fiona *.c *.cpp 16 | include CHANGES.txt CITATION.cff CREDITS.txt LICENSE.txt README.rst 17 | include pyproject.toml setup.py requirements.txt 18 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PYTHON_VERSION ?= 3.12 2 | GDAL ?= ubuntu-small-3.9.2 3 | all: deps clean install test 4 | 5 | .PHONY: docs 6 | 7 | install: 8 | python setup.py build_ext 9 | pip install -e .[all] 10 | 11 | deps: 12 | pip install -r requirements-dev.txt 13 | 14 | clean: 15 | pip uninstall -y fiona || echo "no need to uninstall" 16 | python setup.py clean --all 17 | find . -name '__pycache__' -delete -print -o -name '*.pyc' -delete -print 18 | touch fiona/*.pyx 19 | 20 | sdist: 21 | python setup.py sdist 22 | 23 | test: 24 | python -m pytest --maxfail 1 -v --cov fiona --cov-report html --pdb tests 25 | 26 | docs: 27 | cd docs && make apidocs && make html 28 | 29 | doctest: 30 | py.test --doctest-modules fiona --doctest-glob='*.rst' docs/*.rst 31 | 32 | dockertestimage: 33 | docker build --target gdal --build-arg GDAL=$(GDAL) --build-arg PYTHON_VERSION=$(PYTHON_VERSION) -t fiona:$(GDAL)-py$(PYTHON_VERSION) . 34 | 35 | dockertest: dockertestimage 36 | docker run -it -v $(shell pwd):/app -v /tmp:/tmp --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint=/bin/bash fiona:$(GDAL)-py$(PYTHON_VERSION) -c '/venv/bin/python -m pip install -vvv --editable .[all] --no-build-isolation && /venv/bin/python -B -m pytest -m "not wheel" --cov fiona --cov-report term-missing $(OPTS)' 37 | 38 | dockershell: dockertestimage 39 | docker run -it -v $(shell pwd):/app --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint=/bin/bash fiona:$(GDAL)-py$(PYTHON_VERSION) -c '/venv/bin/python -m pip install --editable . --no-build-isolation && /bin/bash' 40 | 41 | dockersdist: dockertestimage 42 | docker run -it -v $(shell pwd):/app --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint=/bin/bash fiona:$(GDAL)-py$(PYTHON_VERSION) -c '/venv/bin/python -m build --sdist' 43 | 44 | dockergdb: dockertestimage 45 | docker run -it -v $(shell pwd):/app --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint=/bin/bash fiona:$(GDAL)-py$(PYTHON_VERSION) -c '/venv/bin/python -m pip install --editable . --no-build-isolation && gdb -ex=r --args /venv/bin/python -B -m pytest -m "not wheel" --cov fiona --cov-report term-missing $(OPTS)' 46 | 47 | dockerdocs: dockertestimage 48 | docker run -it -v $(shell pwd):/app --entrypoint=/bin/bash fiona:$(GDAL)-py$(PYTHON_VERSION) -c 'source /venv/bin/activate && cd docs && make clean && make html' 49 | 50 | dockertestimage-amd64: 51 | docker build --platform linux/amd64 --target gdal --build-arg GDAL=$(GDAL) --build-arg PYTHON_VERSION=$(PYTHON_VERSION) -t fiona-amd64:$(GDAL)-py$(PYTHON_VERSION) . 52 | 53 | dockertest-amd64: dockertestimage-amd64 54 | docker run -it -v $(shell pwd):/app -v /tmp:/tmp --env AWS_ACCESS_KEY_ID --env AWS_SECRET_ACCESS_KEY --entrypoint=/bin/bash fiona-amd64:$(GDAL)-py$(PYTHON_VERSION) -c '/venv/bin/python -m pip install tiledb && /venv/bin/python -m pip install --editable .[all] --no-build-isolation && /venv/bin/python -B -m pytest -m "not wheel" --cov fiona --cov-report term-missing $(OPTS)' 55 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 1.9.x | :white_check_mark: | 8 | | < 1.9 | :x: | 9 | 10 | ## Reporting a Vulnerability 11 | 12 | Fiona includes C extension modules that link [GDAL](https://gdal.org/), which in turn links a number of other libraries such as libgeos, libproj, and libcurl. 13 | The exact list depends on the features included when GDAL is built and varies across distributions. 14 | 15 | The Fiona team publishes binary wheels to the Python Package Index for 4 different platforms. The wheels contain 27-35 libraries. 16 | The exact list depends on the platform and the versions of package managers and tooling used for each platform. Details can be found at https://github.com/sgillies/fiona-wheels. 17 | 18 | To report a vulnerability in fiona or in one of the libraries that is included in a binary wheel on PyPI, please use the GitHub Security Advisory "Report a Vulnerability" tab. 19 | In the case of a vulnerability in a dependency, please provide a link to a published CVE or other description of the issue. 20 | 21 | The Fiona team will send a response indicating the next steps in handling your report. After the initial reply to your report, the security team will keep you informed of the 22 | progress towards a fix and full announcement at https://github.com/Toblerity/Fiona/discussions, and may ask for additional information or guidance. 23 | 24 | -------------------------------------------------------------------------------- /appveyor/run_with_env.cmd: -------------------------------------------------------------------------------- 1 | :: To build extensions for 64 bit Python 3, we need to configure environment 2 | :: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: 3 | :: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1) 4 | :: 5 | :: To build extensions for 64 bit Python 2, we need to configure environment 6 | :: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of: 7 | :: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0) 8 | :: 9 | :: 32 bit builds, and 64-bit builds for 3.5 and beyond, do not require specific 10 | :: environment configurations. 11 | :: 12 | :: Note: this script needs to be run with the /E:ON and /V:ON flags for the 13 | :: cmd interpreter, at least for (SDK v7.0) 14 | :: 15 | :: More details at: 16 | :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows 17 | :: http://stackoverflow.com/a/13751649/163740 18 | :: 19 | :: Author: Olivier Grisel 20 | :: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ 21 | :: 22 | :: Notes about batch files for Python people: 23 | :: 24 | :: Quotes in values are literally part of the values: 25 | :: SET FOO="bar" 26 | :: FOO is now five characters long: " b a r " 27 | :: If you don't want quotes, don't include them on the right-hand side. 28 | :: 29 | :: The CALL lines at the end of this file look redundant, but if you move them 30 | :: outside of the IF clauses, they do not run properly in the SET_SDK_64==Y 31 | :: case, I don't know why. 32 | @ECHO OFF 33 | 34 | SET COMMAND_TO_RUN=%* 35 | SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows 36 | SET WIN_WDK=c:\Program Files (x86)\Windows Kits\10\Include\wdf 37 | 38 | :: Extract the major and minor versions, and allow for the minor version to be 39 | :: more than 9. This requires the version number to have two dots in it. 40 | SET MAJOR_PYTHON_VERSION=%PYTHON_VERSION:~0,1% 41 | IF "%PYTHON_VERSION:~3,1%" == "." ( 42 | SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,1% 43 | ) ELSE ( 44 | SET MINOR_PYTHON_VERSION=%PYTHON_VERSION:~2,2% 45 | ) 46 | 47 | :: Based on the Python version, determine what SDK version to use, and whether 48 | :: to set the SDK for 64-bit. 49 | IF %MAJOR_PYTHON_VERSION% == 2 ( 50 | SET WINDOWS_SDK_VERSION="v7.0" 51 | SET SET_SDK_64=Y 52 | ) ELSE ( 53 | IF %MAJOR_PYTHON_VERSION% == 3 ( 54 | SET WINDOWS_SDK_VERSION="v7.1" 55 | IF %MINOR_PYTHON_VERSION% LEQ 4 ( 56 | SET SET_SDK_64=Y 57 | ) ELSE ( 58 | SET SET_SDK_64=N 59 | IF EXIST "%WIN_WDK%" ( 60 | :: See: https://connect.microsoft.com/VisualStudio/feedback/details/1610302/ 61 | REN "%WIN_WDK%" 0wdf 62 | ) 63 | ) 64 | ) ELSE ( 65 | ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%" 66 | EXIT 1 67 | ) 68 | ) 69 | 70 | IF %PYTHON_ARCH% == 64 ( 71 | IF %SET_SDK_64% == Y ( 72 | ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture 73 | SET DISTUTILS_USE_SDK=1 74 | SET MSSdk=1 75 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION% 76 | "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release 77 | ECHO Executing: %COMMAND_TO_RUN% 78 | call %COMMAND_TO_RUN% || EXIT 1 79 | ) ELSE ( 80 | ECHO Using default MSVC build environment for 64 bit architecture 81 | ECHO Executing: %COMMAND_TO_RUN% 82 | call %COMMAND_TO_RUN% || EXIT 1 83 | ) 84 | ) ELSE ( 85 | ECHO Using default MSVC build environment for 32 bit architecture 86 | ECHO Executing: %COMMAND_TO_RUN% 87 | call %COMMAND_TO_RUN% || EXIT 1 88 | ) 89 | -------------------------------------------------------------------------------- /ci/gdal-compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Example usage: 3 | # GDAL_DIR=$PWD/gdal bash gdal_compile.sh 3.6.0rc2 4 | set -e 5 | pushd . 6 | echo "Building GDAL ($1) from source..." 7 | BUILD_GDAL_DIR=gdal-${1:0:5} 8 | # Download PROJ 9 | if [[ $1 == "git" ]]; then 10 | git clone https://github.com/OSGeo/GDAL.git ${BUILD_GDAL_DIR} 11 | else 12 | curl https://download.osgeo.org/gdal/${1:0:5}/gdal-$1.tar.gz > ${BUILD_GDAL_DIR}.tar.gz 13 | tar zxf ${BUILD_GDAL_DIR}.tar.gz 14 | rm ${BUILD_GDAL_DIR}.tar.gz 15 | fi 16 | cd ${BUILD_GDAL_DIR} 17 | mkdir build 18 | cd build 19 | # build using cmake 20 | cmake .. \ 21 | -DCMAKE_INSTALL_PREFIX=$GDAL_DIR \ 22 | -DBUILD_SHARED_LIBS=ON \ 23 | -DCMAKE_BUILD_TYPE=Release \ 24 | -DGDAL_BUILD_OPTIONAL_DRIVERS=OFF \ 25 | -DGDAL_ENABLE_DRIVER_MBTILES=OFF \ 26 | -DOGR_BUILD_OPTIONAL_DRIVERS=OFF \ 27 | -DOGR_ENABLE_DRIVER_CSV=ON \ 28 | -DOGR_ENABLE_DRIVER_DGN=ON \ 29 | -DOGR_ENABLE_DRIVER_DXF=ON \ 30 | -DOGR_ENABLE_DRIVER_FLATGEOBUF=ON \ 31 | -DOGR_ENABLE_DRIVER_GEOJSON=ON \ 32 | -DOGR_ENABLE_DRIVER_GML=ON \ 33 | -DOGR_ENABLE_DRIVER_GMT=ON \ 34 | -DOGR_ENABLE_DRIVER_GPKG=ON \ 35 | -DOGR_ENABLE_DRIVER_GPX=ON \ 36 | -DOGR_ENABLE_DRIVER_OPENFILEGDB=ON \ 37 | -DGDAL_ENABLE_DRIVER_PCIDSK=ON \ 38 | -DOGR_ENABLE_DRIVER_S57=ON \ 39 | -DOGR_ENABLE_DRIVER_SHAPE=ON \ 40 | -DOGR_ENABLE_DRIVER_SQLITE=ON \ 41 | -DOGR_ENABLE_DRIVER_TAB=ON \ 42 | -DOGR_ENABLE_DRIVER_VRT=ON \ 43 | -DBUILD_CSHARP_BINDINGS=OFF \ 44 | -DBUILD_PYTHON_BINDINGS=OFF \ 45 | -DBUILD_JAVA_BINDINGS=OFF 46 | cmake --build . -j$(nproc) 47 | cmake --install . 48 | # cleanup 49 | cd ../.. 50 | rm -rf ${BUILD_GDAL_DIR} 51 | popd 52 | -------------------------------------------------------------------------------- /ci/rstcheck/requirements.in: -------------------------------------------------------------------------------- 1 | rstcheck[sphinx]==6.1.2 2 | -------------------------------------------------------------------------------- /ci/rstcheck/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.10 3 | # by the following command: 4 | # 5 | # pip-compile --strip-extras requirements.in 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | babel==2.12.1 10 | # via sphinx 11 | certifi==2024.7.4 12 | # via requests 13 | charset-normalizer==3.2.0 14 | # via requests 15 | click==8.1.7 16 | # via typer 17 | colorama==0.4.6 18 | # via typer 19 | commonmark==0.9.1 20 | # via rich 21 | docutils==0.19 22 | # via 23 | # rstcheck-core 24 | # sphinx 25 | idna==3.7 26 | # via requests 27 | imagesize==1.4.1 28 | # via sphinx 29 | jinja2==3.1.4 30 | # via sphinx 31 | markupsafe==2.1.3 32 | # via jinja2 33 | packaging==23.1 34 | # via sphinx 35 | pydantic==1.10.13 36 | # via rstcheck-core 37 | pygments==2.16.1 38 | # via 39 | # rich 40 | # sphinx 41 | requests==2.32.0 42 | # via sphinx 43 | rich==12.6.0 44 | # via typer 45 | rstcheck==6.1.2 46 | # via -r requirements.in 47 | rstcheck-core==1.0.3 48 | # via rstcheck 49 | shellingham==1.5.3 50 | # via typer 51 | snowballstemmer==2.2.0 52 | # via sphinx 53 | sphinx==7.2.5 54 | # via 55 | # rstcheck 56 | # sphinxcontrib-applehelp 57 | # sphinxcontrib-devhelp 58 | # sphinxcontrib-htmlhelp 59 | # sphinxcontrib-qthelp 60 | # sphinxcontrib-serializinghtml 61 | sphinxcontrib-applehelp==1.0.7 62 | # via sphinx 63 | sphinxcontrib-devhelp==1.0.5 64 | # via sphinx 65 | sphinxcontrib-htmlhelp==2.0.4 66 | # via sphinx 67 | sphinxcontrib-jsmath==1.0.1 68 | # via sphinx 69 | sphinxcontrib-qthelp==1.0.6 70 | # via sphinx 71 | sphinxcontrib-serializinghtml==1.1.9 72 | # via sphinx 73 | typer==0.7.0 74 | # via rstcheck 75 | types-docutils==0.19.1.9 76 | # via rstcheck-core 77 | typing-extensions==4.7.1 78 | # via pydantic 79 | urllib3==2.2.2 80 | # via requests 81 | -------------------------------------------------------------------------------- /docs/README.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | 3 | .. include:: ../CHANGES.txt 4 | 5 | .. include:: ../CREDITS.txt 6 | -------------------------------------------------------------------------------- /docs/encoding.txt: -------------------------------------------------------------------------------- 1 | ========================= 2 | Fiona and String Encoding 3 | ========================= 4 | 5 | Reading 6 | ------- 7 | 8 | With Fiona, all 'str' type record attributes are unicode strings. The source 9 | data is encoded in some way. It might be a standard encoding (ISO-8859-1 or 10 | UTF-8) or it might be a format-specific encoding. How do we get from encoded 11 | strings to Python unicode? :: 12 | 13 | encoded File | (decode?) OGR (encode?) | (decode) Fiona 14 | 15 | E_f R E_i 16 | 17 | The internal encoding `E_i` is used by the ``FeatureBuilder`` class to create 18 | Fiona's record dicts. `E_f` is the encoding of the data file. `R` is ``True`` 19 | if OGR is recoding record attribute values to UTF-8 (a recent feature that 20 | isn't implemented for all format drivers, hence the question marks in the 21 | sketch above), else ``False``. 22 | 23 | The value of E_i is determined like this:: 24 | 25 | E_i = (R and 'utf-8') or E_f 26 | 27 | In the real world of sloppy data, we may not know the exact encoding of the 28 | data file. Fiona's best guess at it is this:: 29 | 30 | E_f = E_u or (R and E_o) or (S and 'iso-8859-1') or E_p 31 | 32 | `E_u`, here, is any encoding provided by the programmer (through the 33 | ``Collection`` constructor). `E_o` is an encoding detected by OGR (which 34 | doesn't provide an API to get the detected encoding). `S` is ``True`` if the 35 | file is a Shapefile (because that's the format default). `E_p` is 36 | locale.getpreferredencoding(). 37 | 38 | Bottom line: if you know that your data file has an encoding other than 39 | ISO-8859-1, specify it. If you don't know what the encoding is, you can let the 40 | format driver try to figure it out (Requires GDAL 1.9.1+). 41 | 42 | Writing 43 | ------- 44 | 45 | On the writing side:: 46 | 47 | Fiona (encode) | (decode?) OGR (encode?) | encoded File 48 | 49 | E_i R E_f 50 | 51 | We derive `E_i` from `R` and `E_f` again as above. `E_f` is:: 52 | 53 | E_f = E_u or (S and 'iso-8859-1') or E_p 54 | 55 | Appending 56 | --------- 57 | 58 | The diagram is the same as above, but `E_f` is as in the Reading section. 59 | 60 | -------------------------------------------------------------------------------- /docs/fiona.fio.rst: -------------------------------------------------------------------------------- 1 | fiona.fio package 2 | ================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | fiona.fio.bounds module 8 | ----------------------- 9 | 10 | .. automodule:: fiona.fio.bounds 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | fiona.fio.calc module 16 | --------------------- 17 | 18 | .. automodule:: fiona.fio.calc 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | fiona.fio.cat module 24 | -------------------- 25 | 26 | .. automodule:: fiona.fio.cat 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | fiona.fio.collect module 32 | ------------------------ 33 | 34 | .. automodule:: fiona.fio.collect 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | fiona.fio.distrib module 40 | ------------------------ 41 | 42 | .. automodule:: fiona.fio.distrib 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | fiona.fio.dump module 48 | --------------------- 49 | 50 | .. automodule:: fiona.fio.dump 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | fiona.fio.env module 56 | -------------------- 57 | 58 | .. automodule:: fiona.fio.env 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | fiona.fio.filter module 64 | ----------------------- 65 | 66 | .. automodule:: fiona.fio.filter 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | fiona.fio.helpers module 72 | ------------------------ 73 | 74 | .. automodule:: fiona.fio.helpers 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | fiona.fio.info module 80 | --------------------- 81 | 82 | .. automodule:: fiona.fio.info 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | fiona.fio.insp module 88 | --------------------- 89 | 90 | .. automodule:: fiona.fio.insp 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | 95 | fiona.fio.load module 96 | --------------------- 97 | 98 | .. automodule:: fiona.fio.load 99 | :members: 100 | :undoc-members: 101 | :show-inheritance: 102 | 103 | fiona.fio.ls module 104 | ------------------- 105 | 106 | .. automodule:: fiona.fio.ls 107 | :members: 108 | :undoc-members: 109 | :show-inheritance: 110 | 111 | fiona.fio.main module 112 | --------------------- 113 | 114 | .. automodule:: fiona.fio.main 115 | :members: 116 | :undoc-members: 117 | :show-inheritance: 118 | 119 | fiona.fio.options module 120 | ------------------------ 121 | 122 | .. automodule:: fiona.fio.options 123 | :members: 124 | :undoc-members: 125 | :show-inheritance: 126 | 127 | fiona.fio.rm module 128 | ------------------- 129 | 130 | .. automodule:: fiona.fio.rm 131 | :members: 132 | :undoc-members: 133 | :show-inheritance: 134 | 135 | 136 | Module contents 137 | --------------- 138 | 139 | .. automodule:: fiona.fio 140 | :members: 141 | :undoc-members: 142 | :show-inheritance: 143 | -------------------------------------------------------------------------------- /docs/fiona.rst: -------------------------------------------------------------------------------- 1 | fiona package 2 | ============= 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | fiona.fio 10 | 11 | Submodules 12 | ---------- 13 | 14 | fiona.collection module 15 | ----------------------- 16 | 17 | .. automodule:: fiona.collection 18 | :members: 19 | :undoc-members: 20 | :show-inheritance: 21 | 22 | fiona.compat module 23 | ------------------- 24 | 25 | .. automodule:: fiona.compat 26 | :members: 27 | :undoc-members: 28 | :show-inheritance: 29 | 30 | fiona.crs module 31 | ---------------- 32 | 33 | .. automodule:: fiona.crs 34 | :members: 35 | :undoc-members: 36 | :show-inheritance: 37 | 38 | fiona.drvsupport module 39 | ----------------------- 40 | 41 | .. automodule:: fiona.drvsupport 42 | :members: 43 | :undoc-members: 44 | :show-inheritance: 45 | 46 | fiona.env module 47 | ---------------- 48 | 49 | .. automodule:: fiona.env 50 | :members: 51 | :undoc-members: 52 | :show-inheritance: 53 | 54 | fiona.errors module 55 | ------------------- 56 | 57 | .. automodule:: fiona.errors 58 | :members: 59 | :undoc-members: 60 | :show-inheritance: 61 | 62 | fiona.inspector module 63 | ---------------------- 64 | 65 | .. automodule:: fiona.inspector 66 | :members: 67 | :undoc-members: 68 | :show-inheritance: 69 | 70 | fiona.io module 71 | --------------- 72 | 73 | .. automodule:: fiona.io 74 | :members: 75 | :undoc-members: 76 | :show-inheritance: 77 | 78 | fiona.logutils module 79 | --------------------- 80 | 81 | .. automodule:: fiona.logutils 82 | :members: 83 | :undoc-members: 84 | :show-inheritance: 85 | 86 | fiona.ogrext module 87 | ------------------- 88 | 89 | .. automodule:: fiona.ogrext 90 | :members: 91 | :undoc-members: 92 | :show-inheritance: 93 | 94 | fiona.path module 95 | ----------------- 96 | 97 | .. automodule:: fiona.path 98 | :members: 99 | :undoc-members: 100 | :show-inheritance: 101 | 102 | fiona.rfc3339 module 103 | -------------------- 104 | 105 | .. automodule:: fiona.rfc3339 106 | :members: 107 | :undoc-members: 108 | :show-inheritance: 109 | 110 | fiona.schema module 111 | ------------------- 112 | 113 | .. automodule:: fiona.schema 114 | :members: 115 | :undoc-members: 116 | :show-inheritance: 117 | 118 | fiona.session module 119 | -------------------- 120 | 121 | .. automodule:: fiona.session 122 | :members: 123 | :undoc-members: 124 | :show-inheritance: 125 | 126 | fiona.transform module 127 | ---------------------- 128 | 129 | .. automodule:: fiona.transform 130 | :members: 131 | :undoc-members: 132 | :show-inheritance: 133 | 134 | fiona.vfs module 135 | ---------------- 136 | 137 | .. automodule:: fiona.vfs 138 | :members: 139 | :undoc-members: 140 | :show-inheritance: 141 | 142 | 143 | fiona module 144 | ------------ 145 | 146 | .. automodule:: fiona 147 | :members: 148 | :undoc-members: 149 | :show-inheritance: 150 | -------------------------------------------------------------------------------- /docs/img/concave.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/docs/img/concave.png -------------------------------------------------------------------------------- /docs/img/convex.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/docs/img/convex.png -------------------------------------------------------------------------------- /docs/img/simplified-buffer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/docs/img/simplified-buffer.png -------------------------------------------------------------------------------- /docs/img/zones.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/docs/img/zones.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | =============================================== 2 | Fiona: access to simple geospatial feature data 3 | =============================================== 4 | 5 | Fiona streams simple feature data to and from GIS formats like GeoPackage and 6 | Shapefile. Simple features are record, or row-like, and have a single geometry 7 | attribute. Fiona can read and write real-world simple feature data using 8 | multi-layered GIS formats, zipped and in-memory virtual file systems, from 9 | files on your hard drive or in cloud storage. This project includes Python 10 | modules and a command line interface (CLI). 11 | 12 | Here's an example of streaming and filtering features from a zipped dataset on 13 | the web and saving them to a new layer in a new Geopackage file. 14 | 15 | .. code-block:: python 16 | 17 | import fiona 18 | 19 | with fiona.open( 20 | "zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip" 21 | ) as src: 22 | profile = src.profile 23 | profile["driver"] = "GPKG" 24 | 25 | with fiona.open("example.gpkg", "w", layer="selection", **profile) as dst: 26 | dst.writerecords(feat in src.filter(bbox=(-107.0, 37.0, -105.0, 39.0))) 27 | 28 | The same result can be achieved on the command line using a combination of 29 | fio-cat and fio-load. 30 | 31 | .. code-block:: console 32 | 33 | fio cat zip+https://github.com/Toblerity/Fiona/files/11151652/coutwildrnp.zip --bbox "-107.0,37.0,-105.0,39.0" \ 34 | | fio load -f GPKG --layer selection example.gpkg 35 | 36 | .. toctree:: 37 | :maxdepth: 2 38 | 39 | Project Information 40 | Installation 41 | User Manual 42 | API Documentation 43 | CLI Documentation 44 | 45 | 46 | Indices and tables 47 | ================== 48 | 49 | * :ref:`genindex` 50 | * :ref:`modindex` 51 | * :ref:`search` 52 | 53 | -------------------------------------------------------------------------------- /docs/install.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Installation of the Fiona package is complicated by its dependency on libgdal 6 | and other C libraries. There are easy installations paths and an advanced 7 | installation path. 8 | 9 | Easy installation 10 | ================= 11 | 12 | Fiona has several `extension modules 13 | `__ which link against 14 | libgdal. This complicates installation. Binary distributions (wheels) 15 | containing libgdal and its own dependencies are available from the Python 16 | Package Index and can be installed using pip. 17 | 18 | .. code-block:: console 19 | 20 | pip install fiona 21 | 22 | These wheels are mainly intended to make installation easy for simple 23 | applications, not so much for production. They are not tested for compatibility 24 | with all other binary wheels, conda packages, or QGIS, and omit many of GDAL's 25 | optional format drivers. If you need, for example, GML support you will need to 26 | build and install Fiona from a source distribution. 27 | 28 | Many users find Anaconda and conda-forge a good way to install Fiona and get 29 | access to more optional format drivers (like GML). 30 | 31 | Fiona requires Python 3.7 or higher and GDAL 3.2 or higher. 32 | 33 | Fiona 2.0 (in development and unreleased) requires Python 3.8 or higher and GDAL 3.4 or higher. 34 | 35 | Advanced installation 36 | ===================== 37 | 38 | Once GDAL and its dependencies are installed on your computer (how to do this 39 | is documented at https://gdal.org) Fiona can be built and installed using 40 | setuptools or pip. If your GDAL installation provides the ``gdal-config`` 41 | program, the process is simpler. 42 | 43 | Without pip: 44 | 45 | .. code-block:: console 46 | 47 | GDAL_CONFIG=/path/to/gdal-config python setup.py install 48 | 49 | With pip (version >= 22.3 is required): 50 | 51 | .. code-block:: console 52 | 53 | python -m pip install --user -U pip 54 | GDAL_CONFIG=/path/to/gdal-config python -m pip install --user . 55 | 56 | These are pretty much equivalent. Pip will use setuptools as the build backend. 57 | If the gdal-config program is on your executable path, then you don't need to 58 | set the environment variable. 59 | 60 | Without gdal-config you will need to configure header and library locations for 61 | the build in another way. One way to do this is to create a setup.cfg file in 62 | the source directory with content like this: 63 | 64 | .. code-block:: ini 65 | 66 | [build_ext] 67 | include_dirs = C:/vcpkg/installed/x64-windows/include 68 | libraries = gdal 69 | library_dirs = C:/vcpkg/installed/x64-windows/lib 70 | 71 | This is the approach taken by Fiona's `wheel-building workflow 72 | `__. 73 | With this file in place you can run either ``python setup.py install`` or ``python 74 | -m pip install --user .``. 75 | 76 | You can also pass those three values on the command line following the 77 | `setuptools documentation 78 | `__. 79 | However, the setup.cfg approach is easier. 80 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | fiona 2 | ===== 3 | 4 | .. toctree:: 5 | 6 | fiona 7 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: _fiona 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - pip 7 | - python=3.9.* 8 | - libgdal=3.4.* 9 | - cython=3 10 | - sphinx-click 11 | - sphinx-rtd-theme 12 | - pip: 13 | - jinja2==3.0.3 14 | -------------------------------------------------------------------------------- /examples/open.py: -------------------------------------------------------------------------------- 1 | import fiona 2 | 3 | # This module contains examples of opening files to get feature collections in 4 | # different ways. 5 | # 6 | # It is meant to be run from the distribution root, the directory containing 7 | # setup.py. 8 | # 9 | # A ``path`` is always the ``open()`` function's first argument. It can be 10 | # absolute or relative to the working directory. It is the only positional 11 | # argument, though it is conventional to use the mode as a 2nd positional 12 | # argument. 13 | 14 | # 1. Opening a file with a single data layer (shapefiles, etc). 15 | # 16 | # args: path, mode 17 | # kwds: none 18 | # 19 | # The relative path to a file on the filesystem is given and its single layer 20 | # is selected implicitly (a shapefile has a single layer). The file is opened 21 | # for reading (mode 'r'), but since this is the default, we'll omit it in 22 | # following examples. 23 | 24 | with fiona.open('docs/data/test_uk.shp', 'r') as c: 25 | assert len(c) == 48 26 | 27 | # 2. Opening a file with explicit layer selection (FileGDB, etc). 28 | # 29 | # args: path 30 | # kwds: layer 31 | # 32 | # Same as above but layer specified explicitly by name.. 33 | 34 | with fiona.open('docs/data/test_uk.shp', layer='test_uk') as c: 35 | assert len(c) == 48 36 | 37 | # 3. Opening a directory for access to a single file. 38 | # 39 | # args: path 40 | # kwds: layer 41 | # 42 | # Same as above but using the path to the directory containing the shapefile, 43 | # specified explicitly by name. 44 | 45 | with fiona.open('docs/data', layer='test_uk') as c: 46 | assert len(c) == 48 47 | 48 | # 4. Opening a single file within a zip archive. 49 | # 50 | # args: path 51 | # kwds: vfs 52 | # 53 | # Open a file given its absolute path within a virtual filesystem. The VFS 54 | # is given an Apache Commons VFS identifier. It may contain either an absolute 55 | # path or a path relative to the working directory. 56 | # 57 | # Example archive: 58 | # 59 | # $ unzip -l docs/data/test_uk.zip 60 | # Archive: docs/data/test_uk.zip 61 | # Length Date Time Name 62 | # -------- ---- ---- ---- 63 | # 10129 04-08-13 20:49 test_uk.dbf 64 | # 143 04-08-13 20:49 test_uk.prj 65 | # 65156 04-08-13 20:49 test_uk.shp 66 | # 484 04-08-13 20:49 test_uk.shx 67 | # -------- ------- 68 | # 75912 4 files 69 | 70 | with fiona.open('/test_uk.shp', vfs='zip://docs/data/test_uk.zip') as c: 71 | assert len(c) == 48 72 | 73 | # 5. Opening a directory within a zip archive to select a layer. 74 | # 75 | # args: path 76 | # kwds: layer, vfs 77 | # 78 | # The most complicated case. As above, but specifying the root directory within 79 | # the virtual filesystem as the path and the layer by name (combination of 80 | # 4 and 3). It ought to be possible to open a file geodatabase within a zip 81 | # file like this. 82 | 83 | with fiona.open('/', layer='test_uk', vfs='zip://docs/data/test_uk.zip') as c: 84 | assert len(c) == 48 85 | 86 | -------------------------------------------------------------------------------- /examples/orient-ccw.py: -------------------------------------------------------------------------------- 1 | # An example of flipping feature polygons right side up. 2 | 3 | import datetime 4 | import logging 5 | import sys 6 | 7 | import fiona 8 | 9 | 10 | logging.basicConfig(stream=sys.stderr, level=logging.INFO) 11 | 12 | def signed_area(coords): 13 | """Return the signed area enclosed by a ring using the linear time 14 | algorithm at http://www.cgafaq.info/wiki/Polygon_Area. A value >= 0 15 | indicates a counter-clockwise oriented ring. 16 | """ 17 | xs, ys = map(list, zip(*coords)) 18 | xs.append(xs[1]) 19 | ys.append(ys[1]) 20 | return sum(xs[i]*(ys[i+1]-ys[i-1]) for i in range(1, len(coords)))/2.0 21 | 22 | 23 | with fiona.open('docs/data/test_uk.shp', 'r') as source: 24 | 25 | # Copy the source schema and add two new properties. 26 | schema = source.schema.copy() 27 | schema['properties']['s_area'] = 'float' 28 | schema['properties']['timestamp'] = 'str' 29 | 30 | # Create a sink for processed features with the same format and 31 | # coordinate reference system as the source. 32 | with fiona.open( 33 | 'oriented-ccw.shp', 'w', 34 | driver=source.driver, 35 | schema=schema, 36 | crs=source.crs 37 | ) as sink: 38 | 39 | for f in source: 40 | 41 | try: 42 | 43 | # If any feature's polygon is facing "down" (has rings 44 | # wound clockwise), its rings will be reordered to flip 45 | # it "up". 46 | g = f['geometry'] 47 | assert g['type'] == 'Polygon' 48 | rings = g['coordinates'] 49 | sa = sum(signed_area(r) for r in rings) 50 | if sa < 0.0: 51 | rings = [r[::-1] for r in rings] 52 | g['coordinates'] = rings 53 | f['geometry'] = g 54 | 55 | # Add the signed area of the polygon and a timestamp 56 | # to the feature properties map. 57 | f['properties'].update( 58 | s_area=sa, 59 | timestamp=datetime.datetime.now().isoformat() ) 60 | 61 | sink.write(f) 62 | 63 | except Exception as e: 64 | logging.exception("Error processing feature %s:", f['id']) 65 | 66 | -------------------------------------------------------------------------------- /examples/with-descartes-functional.py: -------------------------------------------------------------------------------- 1 | # Making maps with reduce() 2 | 3 | from matplotlib import pyplot 4 | from descartes import PolygonPatch 5 | import fiona 6 | 7 | BLUE = '#6699cc' 8 | 9 | def render(fig, rec): 10 | """Given matplotlib axes and a record, adds the record as a patch 11 | and returns the axes so that reduce() can accumulate more 12 | patches.""" 13 | fig.gca().add_patch( 14 | PolygonPatch(rec['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2)) 15 | return fig 16 | 17 | with fiona.open('docs/data/test_uk.shp', 'r') as source: 18 | fig = reduce(render, source, pyplot.figure(figsize=(8, 8))) 19 | fig.gca().autoscale(tight=False) 20 | fig.savefig('with-descartes-functional.png') 21 | 22 | -------------------------------------------------------------------------------- /examples/with-descartes.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | from matplotlib import pyplot 4 | from descartes import PolygonPatch 5 | 6 | import fiona 7 | 8 | # Set up the figure and axes. 9 | BLUE = '#6699cc' 10 | fig = pyplot.figure(1, figsize=(6, 6), dpi=90) 11 | ax = fig.add_subplot(111) 12 | 13 | with fiona.drivers(): 14 | 15 | # For each feature in the collection, add a patch to the axes. 16 | with fiona.open('docs/data/test_uk.shp', 'r') as input: 17 | for f in input: 18 | ax.add_patch( 19 | PolygonPatch( 20 | f['geometry'], fc=BLUE, ec=BLUE, alpha=0.5, zorder=2 )) 21 | 22 | # Should be able to get extents from the collection in a future version 23 | # of Fiona. 24 | ax.set_xlim(-9.25, 2.75) 25 | ax.set_ylim(49.5, 61.5) 26 | 27 | fig.savefig('test_uk.png') 28 | 29 | subprocess.call(['open', 'test_uk.png']) 30 | -------------------------------------------------------------------------------- /examples/with-pyproj.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | from pyproj import Proj, transform 5 | 6 | import fiona 7 | from fiona.crs import from_epsg 8 | 9 | logging.basicConfig(stream=sys.stderr, level=logging.INFO) 10 | 11 | with fiona.open('docs/data/test_uk.shp', 'r') as source: 12 | 13 | sink_schema = source.schema.copy() 14 | p_in = Proj(source.crs) 15 | 16 | with fiona.open( 17 | 'with-pyproj.shp', 'w', 18 | crs=from_epsg(27700), 19 | driver=source.driver, 20 | schema=sink_schema, 21 | ) as sink: 22 | 23 | p_out = Proj(sink.crs) 24 | 25 | for f in source: 26 | 27 | try: 28 | assert f['geometry']['type'] == "Polygon" 29 | new_coords = [] 30 | for ring in f['geometry']['coordinates']: 31 | x2, y2 = transform(p_in, p_out, *zip(*ring)) 32 | new_coords.append(zip(x2, y2)) 33 | f['geometry']['coordinates'] = new_coords 34 | sink.write(f) 35 | 36 | except Exception as e: 37 | # Writing uncleanable features to a different shapefile 38 | # is another option. 39 | logging.exception("Error transforming feature %s:", f['id']) 40 | 41 | -------------------------------------------------------------------------------- /examples/with-shapely.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | 4 | from shapely.geometry import mapping, shape 5 | 6 | import fiona 7 | 8 | logging.basicConfig(stream=sys.stderr, level=logging.INFO) 9 | 10 | with fiona.open('docs/data/test_uk.shp', 'r') as source: 11 | 12 | # **source.meta is a shortcut to get the crs, driver, and schema 13 | # keyword arguments from the source Collection. 14 | with fiona.open( 15 | 'with-shapely.shp', 'w', 16 | **source.meta) as sink: 17 | 18 | for f in source: 19 | 20 | try: 21 | geom = shape(f['geometry']) 22 | if not geom.is_valid: 23 | clean = geom.buffer(0.0) 24 | assert clean.is_valid 25 | assert clean.geom_type == 'Polygon' 26 | geom = clean 27 | f['geometry'] = mapping(geom) 28 | sink.write(f) 29 | 30 | except Exception as e: 31 | # Writing uncleanable features to a different shapefile 32 | # is another option. 33 | logging.exception("Error cleaning feature %s:", f['id']) 34 | 35 | -------------------------------------------------------------------------------- /fiona/_cpl.pxd: -------------------------------------------------------------------------------- 1 | # Cross-platform API functions. 2 | 3 | cdef extern from "cpl_conv.h": 4 | void * CPLMalloc (size_t) 5 | void CPLFree (void *ptr) 6 | void CPLSetThreadLocalConfigOption (char *key, char *val) 7 | const char *CPLGetConfigOption (char *, char *) 8 | 9 | cdef extern from "cpl_vsi.h": 10 | ctypedef struct VSILFILE: 11 | pass 12 | int VSIFCloseL (VSILFILE *) 13 | VSILFILE * VSIFileFromMemBuffer (const char * filename, 14 | unsigned char * data, 15 | int data_len, 16 | int take_ownership) 17 | int VSIUnlink (const char * pathname) 18 | 19 | ctypedef int OGRErr 20 | ctypedef struct OGREnvelope: 21 | double MinX 22 | double MaxX 23 | double MinY 24 | double MaxY 25 | -------------------------------------------------------------------------------- /fiona/_crs.pyx: -------------------------------------------------------------------------------- 1 | """Extension module supporting crs.py. 2 | 3 | Calls methods from GDAL's OSR module. 4 | """ 5 | 6 | import logging 7 | 8 | from fiona cimport _cpl 9 | from fiona._err cimport exc_wrap_pointer 10 | from fiona._err import CPLE_BaseError 11 | from fiona._shim cimport osr_get_name, osr_set_traditional_axis_mapping_strategy 12 | from fiona.compat import DICT_TYPES 13 | from fiona.errors import CRSError 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | cdef int OAMS_TRADITIONAL_GIS_ORDER = 0 19 | 20 | 21 | # Export a WKT string from input crs. 22 | def crs_to_wkt(crs): 23 | """Convert a Fiona CRS object to WKT format""" 24 | cdef OGRSpatialReferenceH cogr_srs = NULL 25 | cdef char *proj_c = NULL 26 | 27 | try: 28 | cogr_srs = exc_wrap_pointer(OSRNewSpatialReference(NULL)) 29 | except CPLE_BaseError as exc: 30 | raise CRSError(str(exc)) 31 | 32 | # check for other CRS classes 33 | if hasattr(crs, "to_wkt") and callable(crs.to_wkt): 34 | crs = crs.to_wkt() 35 | 36 | # First, check for CRS strings like "EPSG:3857". 37 | if isinstance(crs, str): 38 | proj_b = crs.encode('utf-8') 39 | proj_c = proj_b 40 | OSRSetFromUserInput(cogr_srs, proj_c) 41 | 42 | elif isinstance(crs, DICT_TYPES): 43 | # EPSG is a special case. 44 | init = crs.get('init') 45 | if init: 46 | logger.debug("Init: %s", init) 47 | auth, val = init.split(':') 48 | if auth.upper() == 'EPSG': 49 | logger.debug("Setting EPSG: %s", val) 50 | OSRImportFromEPSG(cogr_srs, int(val)) 51 | else: 52 | params = [] 53 | crs['wktext'] = True 54 | for k, v in crs.items(): 55 | if v is True or (k in ('no_defs', 'wktext') and v): 56 | params.append("+%s" % k) 57 | else: 58 | params.append("+%s=%s" % (k, v)) 59 | proj = " ".join(params) 60 | logger.debug("PROJ.4 to be imported: %r", proj) 61 | proj_b = proj.encode('utf-8') 62 | proj_c = proj_b 63 | OSRImportFromProj4(cogr_srs, proj_c) 64 | else: 65 | raise CRSError(f"Invalid input to create CRS: {crs}") 66 | 67 | osr_set_traditional_axis_mapping_strategy(cogr_srs) 68 | OSRExportToWkt(cogr_srs, &proj_c) 69 | 70 | if proj_c == NULL: 71 | raise CRSError(f"Invalid input to create CRS: {crs}") 72 | 73 | proj_b = proj_c 74 | _cpl.CPLFree(proj_c) 75 | 76 | if not proj_b: 77 | raise CRSError(f"Invalid input to create CRS: {crs}") 78 | 79 | return proj_b.decode('utf-8') 80 | -------------------------------------------------------------------------------- /fiona/_csl.pxd: -------------------------------------------------------------------------------- 1 | # String API functions. 2 | 3 | cdef extern from "cpl_string.h": 4 | char ** CSLAddNameValue (char **list, char *name, char *value) 5 | char ** CSLSetNameValue (char **list, char *name, char *value) 6 | void CSLDestroy (char **list) 7 | -------------------------------------------------------------------------------- /fiona/_env.pxd: -------------------------------------------------------------------------------- 1 | include "gdal.pxi" 2 | 3 | 4 | cdef class ConfigEnv(object): 5 | cdef public object options 6 | 7 | 8 | cdef class GDALEnv(ConfigEnv): 9 | cdef public object _have_registered_drivers 10 | 11 | 12 | cdef _safe_osr_release(OGRSpatialReferenceH srs) 13 | -------------------------------------------------------------------------------- /fiona/_err.pxd: -------------------------------------------------------------------------------- 1 | include "gdal.pxi" 2 | 3 | from libc.stdio cimport * 4 | 5 | cdef get_last_error_msg() 6 | cdef int exc_wrap_int(int retval) except -1 7 | cdef OGRErr exc_wrap_ogrerr(OGRErr retval) except -1 8 | cdef void *exc_wrap_pointer(void *ptr) except NULL 9 | cdef VSILFILE *exc_wrap_vsilfile(VSILFILE *f) except NULL 10 | 11 | cdef class StackChecker: 12 | cdef object error_stack 13 | cdef int exc_wrap_int(self, int retval) except -1 14 | cdef void *exc_wrap_pointer(self, void *ptr) except NULL 15 | -------------------------------------------------------------------------------- /fiona/_show_versions.py: -------------------------------------------------------------------------------- 1 | import platform 2 | import sys 3 | 4 | import fiona 5 | from fiona._env import get_gdal_release_name, get_proj_version_tuple 6 | 7 | 8 | def show_versions(): 9 | """ 10 | Prints information useful for bug reports 11 | """ 12 | 13 | print("Fiona version:", fiona.__version__) 14 | print("GDAL version:", get_gdal_release_name()) 15 | print("PROJ version:", ".".join(map(str, get_proj_version_tuple()))) 16 | print() 17 | print("OS:", platform.system(), platform.release()) 18 | print("Python:", platform.python_version()) 19 | print("Python executable:", sys.executable) 20 | -------------------------------------------------------------------------------- /fiona/_vendor/munch/LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010 David Schoonover 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /fiona/_vsiopener.pxd: -------------------------------------------------------------------------------- 1 | include "gdal.pxi" 2 | -------------------------------------------------------------------------------- /fiona/abc.py: -------------------------------------------------------------------------------- 1 | """Abstract base classes.""" 2 | 3 | from fiona._vsiopener import FileContainer, MultiByteRangeResourceContainer 4 | -------------------------------------------------------------------------------- /fiona/compat.py: -------------------------------------------------------------------------------- 1 | from collections import UserDict 2 | from collections.abc import Mapping 3 | 4 | DICT_TYPES = (dict, Mapping, UserDict) 5 | 6 | 7 | def strencode(instr, encoding="utf-8"): 8 | try: 9 | instr = instr.encode(encoding) 10 | except (UnicodeDecodeError, AttributeError): 11 | pass 12 | return instr 13 | -------------------------------------------------------------------------------- /fiona/crs.pxd: -------------------------------------------------------------------------------- 1 | include "gdal.pxi" 2 | 3 | 4 | cdef class CRS: 5 | cdef OGRSpatialReferenceH _osr 6 | cdef object _data 7 | cdef object _epsg 8 | cdef object _wkt 9 | 10 | 11 | cdef void osr_set_traditional_axis_mapping_strategy(OGRSpatialReferenceH hSrs) 12 | -------------------------------------------------------------------------------- /fiona/enums.py: -------------------------------------------------------------------------------- 1 | """Enumerations.""" 2 | 3 | from enum import Enum 4 | 5 | 6 | class WktVersion(Enum): 7 | """ 8 | .. versionadded:: 1.9.0 9 | 10 | Supported CRS WKT string versions. 11 | """ 12 | 13 | #: WKT Version 2 from 2015 14 | WKT2_2015 = "WKT2_2015" 15 | #: Alias for latest WKT Version 2 16 | WKT2 = "WKT2" 17 | #: WKT Version 2 from 2019 18 | WKT2_2019 = "WKT2_2018" 19 | #: WKT Version 1 GDAL Style 20 | WKT1_GDAL = "WKT1_GDAL" 21 | #: Alias for WKT Version 1 GDAL Style 22 | WKT1 = "WKT1" 23 | #: WKT Version 1 ESRI Style 24 | WKT1_ESRI = "WKT1_ESRI" 25 | 26 | @classmethod 27 | def _missing_(cls, value): 28 | if value == "WKT2_2019": 29 | # WKT2_2019 alias added in GDAL 3.2, use WKT2_2018 for compatibility 30 | return WktVersion.WKT2_2019 31 | raise ValueError(f"Invalid value for WktVersion: {value}") 32 | -------------------------------------------------------------------------------- /fiona/errors.py: -------------------------------------------------------------------------------- 1 | # Errors. 2 | 3 | 4 | class FionaError(Exception): 5 | """Base Fiona error""" 6 | 7 | 8 | class FionaValueError(FionaError, ValueError): 9 | """Fiona-specific value errors""" 10 | 11 | 12 | class AttributeFilterError(FionaValueError): 13 | """Error processing SQL WHERE clause with the dataset.""" 14 | 15 | 16 | class DriverError(FionaValueError): 17 | """Encapsulates unsupported driver and driver mode errors.""" 18 | 19 | 20 | class SchemaError(FionaValueError): 21 | """When a schema mapping has no properties or no geometry.""" 22 | 23 | 24 | class CRSError(FionaValueError): 25 | """When a crs mapping has neither init or proj items.""" 26 | 27 | 28 | class UnsupportedOperation(FionaError): 29 | """Raised when reading from a file opened in 'w' mode""" 30 | 31 | 32 | class DataIOError(OSError): 33 | """IO errors involving driver registration or availability.""" 34 | 35 | 36 | class DriverIOError(OSError): 37 | """A format specific driver error.""" 38 | 39 | 40 | class DriverSupportError(DriverIOError): 41 | """Driver does not support schema""" 42 | 43 | 44 | class DatasetDeleteError(OSError): 45 | """Failure to delete a dataset""" 46 | 47 | 48 | class FieldNameEncodeError(UnicodeEncodeError): 49 | """Failure to encode a field name.""" 50 | 51 | 52 | class UnsupportedGeometryTypeError(KeyError): 53 | """When a OGR geometry type isn't supported by Fiona.""" 54 | 55 | 56 | class GeometryTypeValidationError(FionaValueError): 57 | """Tried to write a geometry type not specified in the schema""" 58 | 59 | 60 | class TransactionError(RuntimeError): 61 | """Failure relating to GDAL transactions""" 62 | 63 | 64 | class EnvError(FionaError): 65 | """Environment Errors""" 66 | 67 | 68 | class GDALVersionError(FionaError): 69 | """Raised if the runtime version of GDAL does not meet the required 70 | version of GDAL. 71 | """ 72 | 73 | 74 | class TransformError(FionaError): 75 | """Raised if a coordinate transformation fails.""" 76 | 77 | 78 | class OpenerRegistrationError(FionaError): 79 | """Raised when a Python file opener can not be registered.""" 80 | 81 | 82 | class PathError(FionaError): 83 | """Raised when a dataset path is malformed or invalid""" 84 | 85 | 86 | class FionaDeprecationWarning(DeprecationWarning): 87 | """A warning about deprecation of Fiona features""" 88 | 89 | 90 | class FeatureWarning(UserWarning): 91 | """A warning about serialization of a feature""" 92 | 93 | 94 | class ReduceError(FionaError): 95 | """"Raised when reduce operation fails.""" 96 | -------------------------------------------------------------------------------- /fiona/fio/__init__.py: -------------------------------------------------------------------------------- 1 | """Fiona's command line interface""" 2 | 3 | from functools import wraps 4 | 5 | 6 | def with_context_env(f): 7 | """Pops the Fiona Env from the passed context and executes the 8 | wrapped func in the context of that obj. 9 | 10 | Click's pass_context decorator must precede this decorator, or else 11 | there will be no context in the wrapper args. 12 | """ 13 | @wraps(f) 14 | def wrapper(*args, **kwds): 15 | ctx = args[0] 16 | env = ctx.obj.pop('env') 17 | with env: 18 | return f(*args, **kwds) 19 | return wrapper 20 | -------------------------------------------------------------------------------- /fiona/fio/bounds.py: -------------------------------------------------------------------------------- 1 | """$ fio bounds""" 2 | 3 | import json 4 | 5 | import click 6 | from cligj import precision_opt, use_rs_opt 7 | 8 | import fiona 9 | from fiona.fio.helpers import obj_gen 10 | from fiona.fio import with_context_env 11 | from fiona.model import ObjectEncoder 12 | 13 | 14 | @click.command(short_help="Print the extent of GeoJSON objects") 15 | @precision_opt 16 | @click.option('--explode/--no-explode', default=False, 17 | help="Explode collections into features (default: no).") 18 | @click.option('--with-id/--without-id', default=False, 19 | help="Print GeoJSON ids and bounding boxes together " 20 | "(default: without).") 21 | @click.option('--with-obj/--without-obj', default=False, 22 | help="Print GeoJSON objects and bounding boxes together " 23 | "(default: without).") 24 | @use_rs_opt 25 | @click.pass_context 26 | @with_context_env 27 | def bounds(ctx, precision, explode, with_id, with_obj, use_rs): 28 | """Print the bounding boxes of GeoJSON objects read from stdin. 29 | 30 | Optionally explode collections and print the bounds of their 31 | features. 32 | 33 | To print identifiers for input objects along with their bounds 34 | as a {id: identifier, bbox: bounds} JSON object, use --with-id. 35 | 36 | To print the input objects themselves along with their bounds 37 | as GeoJSON object, use --with-obj. This has the effect of updating 38 | input objects with {id: identifier, bbox: bounds}. 39 | 40 | """ 41 | stdin = click.get_text_stream('stdin') 42 | source = obj_gen(stdin) 43 | 44 | for i, obj in enumerate(source): 45 | obj_id = obj.get("id", "collection:" + str(i)) 46 | xs = [] 47 | ys = [] 48 | features = obj.get("features") or [obj] 49 | 50 | for j, feat in enumerate(features): 51 | feat_id = feat.get("id", "feature:" + str(i)) 52 | w, s, e, n = fiona.bounds(feat) 53 | 54 | if precision > 0: 55 | w, s, e, n = (round(v, precision) for v in (w, s, e, n)) 56 | if explode: 57 | 58 | if with_id: 59 | rec = {"parent": obj_id, "id": feat_id, "bbox": (w, s, e, n)} 60 | elif with_obj: 61 | feat.update(parent=obj_id, bbox=(w, s, e, n)) 62 | rec = feat 63 | else: 64 | rec = (w, s, e, n) 65 | 66 | if use_rs: 67 | click.echo('\x1e', nl=False) 68 | 69 | click.echo(json.dumps(rec, cls=ObjectEncoder)) 70 | 71 | else: 72 | xs.extend([w, e]) 73 | ys.extend([s, n]) 74 | 75 | if not explode: 76 | w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) 77 | 78 | if with_id: 79 | rec = {"id": obj_id, "bbox": (w, s, e, n)} 80 | elif with_obj: 81 | obj.update(id=obj_id, bbox=(w, s, e, n)) 82 | rec = obj 83 | else: 84 | rec = (w, s, e, n) 85 | 86 | if use_rs: 87 | click.echo("\x1e", nl=False) 88 | 89 | click.echo(json.dumps(rec, cls=ObjectEncoder)) 90 | -------------------------------------------------------------------------------- /fiona/fio/calc.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import click 4 | from cligj import use_rs_opt 5 | 6 | from .helpers import obj_gen, eval_feature_expression 7 | from fiona.fio import with_context_env 8 | from fiona.model import ObjectEncoder 9 | 10 | 11 | @click.command(short_help="Calculate GeoJSON property by Python expression") 12 | @click.argument('property_name') 13 | @click.argument('expression') 14 | @click.option('--overwrite', is_flag=True, default=False, 15 | help="Overwrite properties, default: False") 16 | @use_rs_opt 17 | @click.pass_context 18 | @with_context_env 19 | def calc(ctx, property_name, expression, overwrite, use_rs): 20 | """ 21 | Create a new property on GeoJSON features using the specified expression. 22 | 23 | \b 24 | The expression is evaluated in a restricted namespace containing: 25 | - sum, pow, min, max and the imported math module 26 | - shape (optional, imported from shapely.geometry if available) 27 | - bool, int, str, len, float type conversions 28 | - f (the feature to be evaluated, 29 | allows item access via javascript-style dot notation using munch) 30 | 31 | The expression will be evaluated for each feature and its 32 | return value will be added to the properties 33 | as the specified property_name. Existing properties will not 34 | be overwritten by default (an Exception is raised). 35 | 36 | Example 37 | 38 | \b 39 | $ fio cat data.shp | fio calc sumAB "f.properties.A + f.properties.B" 40 | 41 | """ 42 | stdin = click.get_text_stream('stdin') 43 | source = obj_gen(stdin) 44 | 45 | for i, obj in enumerate(source): 46 | features = obj.get("features") or [obj] 47 | 48 | for j, feat in enumerate(features): 49 | 50 | if not overwrite and property_name in feat["properties"]: 51 | raise click.UsageError( 52 | f"{property_name} already exists in properties; " 53 | "rename or use --overwrite" 54 | ) 55 | 56 | feat["properties"][property_name] = eval_feature_expression( 57 | feat, expression 58 | ) 59 | 60 | if use_rs: 61 | click.echo("\x1e", nl=False) 62 | 63 | click.echo(json.dumps(feat, cls=ObjectEncoder)) 64 | -------------------------------------------------------------------------------- /fiona/fio/distrib.py: -------------------------------------------------------------------------------- 1 | """$ fio distrib""" 2 | 3 | import json 4 | 5 | import click 6 | import cligj 7 | 8 | from fiona.fio import helpers, with_context_env 9 | from fiona.model import ObjectEncoder 10 | 11 | 12 | @click.command() 13 | @cligj.use_rs_opt 14 | @click.pass_context 15 | @with_context_env 16 | def distrib(ctx, use_rs): 17 | """Distribute features from a collection. 18 | 19 | Print the features of GeoJSON objects read from stdin. 20 | 21 | """ 22 | stdin = click.get_text_stream('stdin') 23 | source = helpers.obj_gen(stdin) 24 | 25 | for i, obj in enumerate(source): 26 | obj_id = obj.get("id", "collection:" + str(i)) 27 | features = obj.get("features") or [obj] 28 | for j, feat in enumerate(features): 29 | if obj.get("type") == "FeatureCollection": 30 | feat["parent"] = obj_id 31 | feat_id = feat.get("id", "feature:" + str(i)) 32 | feat["id"] = feat_id 33 | if use_rs: 34 | click.echo("\x1e", nl=False) 35 | click.echo(json.dumps(feat, cls=ObjectEncoder)) 36 | -------------------------------------------------------------------------------- /fiona/fio/env.py: -------------------------------------------------------------------------------- 1 | """$ fio env""" 2 | 3 | import json 4 | import os 5 | 6 | import click 7 | 8 | import fiona 9 | from fiona._env import GDALDataFinder, PROJDataFinder 10 | 11 | 12 | @click.command(short_help="Print information about the fio environment.") 13 | @click.option('--formats', 'key', flag_value='formats', default=True, 14 | help="Enumerate the available formats.") 15 | @click.option('--credentials', 'key', flag_value='credentials', default=False, 16 | help="Print credentials.") 17 | @click.option('--gdal-data', 'key', flag_value='gdal_data', default=False, 18 | help="Print GDAL data path.") 19 | @click.option('--proj-data', 'key', flag_value='proj_data', default=False, 20 | help="Print PROJ data path.") 21 | @click.pass_context 22 | def env(ctx, key): 23 | """Print information about the Fiona environment: available 24 | formats, etc. 25 | """ 26 | stdout = click.get_text_stream('stdout') 27 | with ctx.obj['env'] as env: 28 | if key == 'formats': 29 | for k, v in sorted(fiona.supported_drivers.items()): 30 | modes = ', '.join("'" + m + "'" for m in v) 31 | stdout.write(f"{k} (modes {modes})\n") 32 | stdout.write('\n') 33 | elif key == 'credentials': 34 | click.echo(json.dumps(env.session.credentials)) 35 | elif key == 'gdal_data': 36 | click.echo(os.environ.get('GDAL_DATA') or GDALDataFinder().search()) 37 | elif key == 'proj_data': 38 | click.echo(os.environ.get('PROJ_DATA', os.environ.get('PROJ_LIB')) or PROJDataFinder().search()) 39 | -------------------------------------------------------------------------------- /fiona/fio/info.py: -------------------------------------------------------------------------------- 1 | """$ fio info""" 2 | 3 | 4 | import logging 5 | import json 6 | 7 | import click 8 | from cligj import indent_opt 9 | 10 | import fiona 11 | import fiona.crs 12 | from fiona.errors import DriverError 13 | from fiona.fio import options, with_context_env 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | @click.command() 19 | # One or more files. 20 | @click.argument('input', required=True) 21 | @click.option('--layer', metavar="INDEX|NAME", callback=options.cb_layer, 22 | help="Print information about a specific layer. The first " 23 | "layer is used by default. Layers use zero-based " 24 | "numbering when accessed by index.") 25 | @indent_opt 26 | # Options to pick out a single metadata item and print it as 27 | # a string. 28 | @click.option('--count', 'meta_member', flag_value='count', 29 | help="Print the count of features.") 30 | @click.option('-f', '--format', '--driver', 'meta_member', flag_value='driver', 31 | help="Print the format driver.") 32 | @click.option('--crs', 'meta_member', flag_value='crs', 33 | help="Print the CRS as a PROJ.4 string.") 34 | @click.option('--bounds', 'meta_member', flag_value='bounds', 35 | help="Print the boundary coordinates " 36 | "(left, bottom, right, top).") 37 | @click.option('--name', 'meta_member', flag_value='name', 38 | help="Print the datasource's name.") 39 | @options.open_opt 40 | @click.pass_context 41 | @with_context_env 42 | def info(ctx, input, indent, meta_member, layer, open_options): 43 | """ 44 | Print information about a dataset. 45 | 46 | When working with a multi-layer dataset the first layer is used by default. 47 | Use the '--layer' option to select a different layer. 48 | 49 | """ 50 | with fiona.open(input, layer=layer, **open_options) as src: 51 | info = src.meta 52 | info.update(name=src.name) 53 | 54 | try: 55 | info.update(bounds=src.bounds) 56 | except DriverError: 57 | info.update(bounds=None) 58 | logger.debug( 59 | "Setting 'bounds' to None - driver was not able to calculate bounds" 60 | ) 61 | 62 | try: 63 | info.update(count=len(src)) 64 | except TypeError: 65 | info.update(count=None) 66 | logger.debug( 67 | "Setting 'count' to None/null - layer does not support counting" 68 | ) 69 | 70 | info["crs"] = src.crs.to_string() 71 | 72 | if meta_member: 73 | if isinstance(info[meta_member], (list, tuple)): 74 | click.echo(" ".join(map(str, info[meta_member]))) 75 | else: 76 | click.echo(info[meta_member]) 77 | else: 78 | click.echo(json.dumps(info, indent=indent)) 79 | -------------------------------------------------------------------------------- /fiona/fio/insp.py: -------------------------------------------------------------------------------- 1 | """$ fio insp""" 2 | 3 | 4 | import code 5 | import sys 6 | 7 | import click 8 | 9 | import fiona 10 | from fiona.fio import options, with_context_env 11 | 12 | 13 | @click.command(short_help="Open a dataset and start an interpreter.") 14 | @click.argument("src_path", required=True) 15 | @click.option( 16 | "--ipython", "interpreter", flag_value="ipython", help="Use IPython as interpreter." 17 | ) 18 | @options.open_opt 19 | @click.pass_context 20 | @with_context_env 21 | def insp(ctx, src_path, interpreter, open_options): 22 | """Open a collection within an interactive interpreter.""" 23 | banner = ( 24 | "Fiona %s Interactive Inspector (Python %s)\n" 25 | 'Type "src.schema", "next(src)", or "help(src)" ' 26 | "for more information." 27 | % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))) 28 | ) 29 | 30 | with fiona.open(src_path, **open_options) as src: 31 | scope = locals() 32 | if not interpreter: 33 | code.interact(banner, local=scope) 34 | elif interpreter == "ipython": 35 | import IPython 36 | 37 | IPython.InteractiveShell.banner1 = banner 38 | IPython.start_ipython(argv=[], user_ns=scope) 39 | else: 40 | raise click.ClickException( 41 | f"Interpreter {interpreter} is unsupported or missing " 42 | "dependencies" 43 | ) 44 | -------------------------------------------------------------------------------- /fiona/fio/load.py: -------------------------------------------------------------------------------- 1 | """$ fio load""" 2 | 3 | from functools import partial 4 | 5 | import click 6 | import cligj 7 | 8 | import fiona 9 | from fiona.fio import options, with_context_env 10 | from fiona.model import Feature, Geometry 11 | from fiona.transform import transform_geom 12 | 13 | 14 | @click.command(short_help="Load GeoJSON to a dataset in another format.") 15 | @click.argument("output", required=True) 16 | @click.option("-f", "--format", "--driver", "driver", help="Output format driver name.") 17 | @options.src_crs_opt 18 | @click.option( 19 | "--dst-crs", 20 | "--dst_crs", 21 | help="Destination CRS. Defaults to --src-crs when not given.", 22 | ) 23 | @cligj.features_in_arg 24 | @click.option( 25 | "--layer", 26 | metavar="INDEX|NAME", 27 | callback=options.cb_layer, 28 | help="Load features into specified layer. Layers use " 29 | "zero-based numbering when accessed by index.", 30 | ) 31 | @options.creation_opt 32 | @options.open_opt 33 | @click.option("--append", is_flag=True, help="Open destination layer in append mode.") 34 | @click.pass_context 35 | @with_context_env 36 | def load( 37 | ctx, 38 | output, 39 | driver, 40 | src_crs, 41 | dst_crs, 42 | features, 43 | layer, 44 | creation_options, 45 | open_options, 46 | append, 47 | ): 48 | """Load features from JSON to a file in another format. 49 | 50 | The input is a GeoJSON feature collection or optionally a sequence of 51 | GeoJSON feature objects. 52 | 53 | """ 54 | dst_crs = dst_crs or src_crs 55 | 56 | if src_crs and dst_crs and src_crs != dst_crs: 57 | transformer = partial( 58 | transform_geom, src_crs, dst_crs, antimeridian_cutting=True 59 | ) 60 | else: 61 | 62 | def transformer(x): 63 | return Geometry.from_dict(**x) 64 | 65 | def feature_gen(): 66 | """Convert stream of JSON to features. 67 | 68 | Yields 69 | ------ 70 | Feature 71 | 72 | """ 73 | try: 74 | for feat in features: 75 | feat["geometry"] = transformer(Geometry.from_dict(**feat["geometry"])) 76 | yield Feature.from_dict(**feat) 77 | except TypeError: 78 | raise click.ClickException("Invalid input.") 79 | 80 | source = feature_gen() 81 | 82 | # Use schema of first feature as a template. 83 | # TODO: schema specified on command line? 84 | try: 85 | first = next(source) 86 | except TypeError: 87 | raise click.ClickException("Invalid input.") 88 | 89 | # TODO: this inference of a property's type from its value needs some 90 | # work. It works reliably only for the basic JSON serializable types. 91 | # The fio-load command does require JSON input but that may change 92 | # someday. 93 | schema = {"geometry": first.geometry.type} 94 | schema["properties"] = { 95 | k: type(v if v is not None else "").__name__ 96 | for k, v in first.properties.items() 97 | } 98 | 99 | if append: 100 | opener = fiona.open(output, "a", layer=layer, **open_options) 101 | else: 102 | opener = fiona.open( 103 | output, 104 | "w", 105 | driver=driver, 106 | crs=dst_crs, 107 | schema=schema, 108 | layer=layer, 109 | **creation_options 110 | ) 111 | 112 | with opener as dst: 113 | dst.write(first) 114 | dst.writerecords(source) 115 | -------------------------------------------------------------------------------- /fiona/fio/ls.py: -------------------------------------------------------------------------------- 1 | """$ fiona ls""" 2 | 3 | 4 | import json 5 | 6 | import click 7 | from cligj import indent_opt 8 | 9 | import fiona 10 | from fiona.fio import options, with_context_env 11 | 12 | 13 | @click.command() 14 | @click.argument('input', required=True) 15 | @indent_opt 16 | @options.open_opt 17 | @click.pass_context 18 | @with_context_env 19 | def ls(ctx, input, indent, open_options): 20 | """ 21 | List layers in a datasource. 22 | """ 23 | result = fiona.listlayers(input, **open_options) 24 | click.echo(json.dumps(result, indent=indent)) 25 | -------------------------------------------------------------------------------- /fiona/fio/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main click group for the CLI. Needs to be isolated for entry-point loading. 3 | """ 4 | 5 | 6 | import itertools 7 | import logging 8 | import sys 9 | 10 | import click 11 | from click_plugins import with_plugins 12 | from cligj import verbose_opt, quiet_opt 13 | 14 | if sys.version_info < (3, 10): 15 | from importlib_metadata import entry_points 16 | else: 17 | from importlib.metadata import entry_points 18 | 19 | import fiona 20 | from fiona import __version__ as fio_version 21 | from fiona.session import AWSSession, DummySession 22 | from fiona.fio.bounds import bounds 23 | from fiona.fio.calc import calc 24 | from fiona.fio.cat import cat 25 | from fiona.fio.collect import collect 26 | from fiona.fio.distrib import distrib 27 | from fiona.fio.dump import dump 28 | from fiona.fio.env import env 29 | from fiona.fio.info import info 30 | from fiona.fio.insp import insp 31 | from fiona.fio.load import load 32 | from fiona.fio.ls import ls 33 | from fiona.fio.rm import rm 34 | 35 | # The "calc" extras require pyparsing and shapely. 36 | try: 37 | import pyparsing 38 | import shapely 39 | from fiona.fio.features import filter_cmd, map_cmd, reduce_cmd 40 | 41 | supports_calc = True 42 | except ImportError: 43 | supports_calc = False 44 | 45 | 46 | def configure_logging(verbosity): 47 | log_level = max(10, 30 - 10 * verbosity) 48 | logging.basicConfig(stream=sys.stderr, level=log_level) 49 | 50 | 51 | @with_plugins( 52 | itertools.chain( 53 | entry_points(group="fiona.fio_plugins"), 54 | ) 55 | ) 56 | @click.group() 57 | @verbose_opt 58 | @quiet_opt 59 | @click.option( 60 | "--aws-profile", 61 | help="Select a profile from the AWS credentials file") 62 | @click.option( 63 | "--aws-no-sign-requests", 64 | is_flag=True, 65 | help="Make requests anonymously") 66 | @click.option( 67 | "--aws-requester-pays", 68 | is_flag=True, 69 | help="Requester pays data transfer costs") 70 | @click.version_option(fio_version) 71 | @click.version_option(fiona.__gdal_version__, '--gdal-version', 72 | prog_name='GDAL') 73 | @click.version_option(sys.version, '--python-version', prog_name='Python') 74 | @click.pass_context 75 | def main_group( 76 | ctx, verbose, quiet, aws_profile, aws_no_sign_requests, 77 | aws_requester_pays): 78 | """Fiona command line interface. 79 | """ 80 | verbosity = verbose - quiet 81 | configure_logging(verbosity) 82 | ctx.obj = {} 83 | ctx.obj["verbosity"] = verbosity 84 | ctx.obj["aws_profile"] = aws_profile 85 | envopts = {"CPL_DEBUG": (verbosity > 2)} 86 | if aws_profile or aws_no_sign_requests: 87 | session = AWSSession( 88 | profile_name=aws_profile, 89 | aws_unsigned=aws_no_sign_requests, 90 | requester_pays=aws_requester_pays, 91 | ) 92 | else: 93 | session = DummySession() 94 | ctx.obj["env"] = fiona.Env(session=session, **envopts) 95 | 96 | 97 | main_group.add_command(bounds) 98 | main_group.add_command(calc) 99 | main_group.add_command(cat) 100 | main_group.add_command(collect) 101 | main_group.add_command(distrib) 102 | main_group.add_command(dump) 103 | main_group.add_command(env) 104 | main_group.add_command(info) 105 | main_group.add_command(insp) 106 | main_group.add_command(load) 107 | main_group.add_command(ls) 108 | main_group.add_command(rm) 109 | 110 | if supports_calc: 111 | main_group.add_command(map_cmd) 112 | main_group.add_command(filter_cmd) 113 | main_group.add_command(reduce_cmd) 114 | -------------------------------------------------------------------------------- /fiona/fio/options.py: -------------------------------------------------------------------------------- 1 | """Common commandline options for `fio`""" 2 | 3 | 4 | from collections import defaultdict 5 | 6 | import click 7 | 8 | 9 | src_crs_opt = click.option('--src-crs', '--src_crs', help="Source CRS.") 10 | dst_crs_opt = click.option('--dst-crs', '--dst_crs', help="Destination CRS.") 11 | 12 | 13 | def cb_layer(ctx, param, value): 14 | """Let --layer be a name or index.""" 15 | if value is None or not value.isdigit(): 16 | return value 17 | else: 18 | return int(value) 19 | 20 | 21 | def cb_multilayer(ctx, param, value): 22 | """ 23 | Transform layer options from strings ("1:a,1:b", "2:a,2:c,2:z") to 24 | { 25 | '1': ['a', 'b'], 26 | '2': ['a', 'c', 'z'] 27 | } 28 | """ 29 | out = defaultdict(list) 30 | for raw in value: 31 | for v in raw.split(','): 32 | ds, name = v.split(':') 33 | out[ds].append(name) 34 | return out 35 | 36 | 37 | def cb_key_val(ctx, param, value): 38 | """ 39 | click callback to validate `--opt KEY1=VAL1 --opt KEY2=VAL2` and collect 40 | in a dictionary like the one below, which is what the CLI function receives. 41 | If no value or `None` is received then an empty dictionary is returned. 42 | 43 | { 44 | 'KEY1': 'VAL1', 45 | 'KEY2': 'VAL2' 46 | } 47 | 48 | Note: `==VAL` breaks this as `str.split('=', 1)` is used. 49 | 50 | """ 51 | if not value: 52 | return {} 53 | else: 54 | out = {} 55 | for pair in value: 56 | if "=" not in pair: 57 | raise click.BadParameter( 58 | f"Invalid syntax for KEY=VAL arg: {pair}" 59 | ) 60 | else: 61 | k, v = pair.split("=", 1) 62 | k = k.lower() 63 | v = v.lower() 64 | out[k] = None if v.lower() in ["none", "null", "nil", "nada"] else v 65 | return out 66 | 67 | 68 | def validate_multilayer_file_index(files, layerdict): 69 | """ 70 | Ensure file indexes provided in the --layer option are valid 71 | """ 72 | for key in layerdict.keys(): 73 | if key not in [str(k) for k in range(1, len(files) + 1)]: 74 | layer = key + ":" + layerdict[key][0] 75 | raise click.BadParameter(f"Layer {layer} does not exist") 76 | 77 | 78 | creation_opt = click.option( 79 | "--co", 80 | "--profile", 81 | "creation_options", 82 | metavar="NAME=VALUE", 83 | multiple=True, 84 | callback=cb_key_val, 85 | help="Driver specific creation options. See the documentation for the selected output driver for more information.", 86 | ) 87 | 88 | 89 | open_opt = click.option( 90 | "--oo", 91 | "open_options", 92 | metavar="NAME=VALUE", 93 | multiple=True, 94 | callback=cb_key_val, 95 | help="Driver specific open options. See the documentation for the selected output driver for more information.", 96 | ) 97 | -------------------------------------------------------------------------------- /fiona/fio/rm.py: -------------------------------------------------------------------------------- 1 | import click 2 | import logging 3 | 4 | import fiona 5 | from fiona.fio import with_context_env 6 | 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | @click.command(help="Remove a datasource or an individual layer.") 12 | @click.argument("input", required=True) 13 | @click.option("--layer", type=str, default=None, required=False, help="Name of layer to remove.") 14 | @click.option("--yes", is_flag=True) 15 | @click.pass_context 16 | @with_context_env 17 | def rm(ctx, input, layer, yes): 18 | if layer is None: 19 | kind = "datasource" 20 | else: 21 | kind = "layer" 22 | 23 | if not yes: 24 | click.confirm(f"The {kind} will be removed. Are you sure?", abort=True) 25 | 26 | try: 27 | fiona.remove(input, layer=layer) 28 | except Exception: 29 | logger.exception(f"Failed to remove {kind}.") 30 | raise click.Abort() 31 | -------------------------------------------------------------------------------- /fiona/inspector.py: -------------------------------------------------------------------------------- 1 | import code 2 | import logging 3 | import sys 4 | 5 | import fiona 6 | 7 | 8 | logging.basicConfig(stream=sys.stderr, level=logging.INFO) 9 | logger = logging.getLogger('fiona.inspector') 10 | 11 | 12 | def main(srcfile): 13 | """Open a dataset in an interactive session.""" 14 | with fiona.drivers(): 15 | with fiona.open(srcfile) as src: 16 | code.interact( 17 | 'Fiona %s Interactive Inspector (Python %s)\n' 18 | 'Type "src.schema", "next(src)", or "help(src)" ' 19 | "for more information." 20 | % (fiona.__version__, ".".join(map(str, sys.version_info[:3]))), 21 | local=locals(), 22 | ) 23 | 24 | return 1 25 | 26 | 27 | if __name__ == '__main__': 28 | import argparse 29 | 30 | parser = argparse.ArgumentParser( 31 | prog="python -m fiona.inspector", 32 | description="Open a data file and drop into an interactive interpreter", 33 | ) 34 | parser.add_argument("src", metavar="FILE", help="Input dataset file name") 35 | args = parser.parse_args() 36 | main(args.src) 37 | -------------------------------------------------------------------------------- /fiona/logutils.py: -------------------------------------------------------------------------------- 1 | """Logging helper classes.""" 2 | 3 | import logging 4 | 5 | 6 | class FieldSkipLogFilter(logging.Filter): 7 | """Filter field skip log messages. 8 | 9 | At most, one message per field skipped per loop will be passed. 10 | """ 11 | 12 | def __init__(self, name=''): 13 | super().__init__(name) 14 | self.seen_msgs = set() 15 | 16 | def filter(self, record): 17 | """Pass record if not seen.""" 18 | msg = record.getMessage() 19 | if msg.startswith("Skipping field"): 20 | retval = msg not in self.seen_msgs 21 | self.seen_msgs.add(msg) 22 | return retval 23 | else: 24 | return 1 25 | 26 | 27 | class LogFiltering: 28 | def __init__(self, logger, filter): 29 | self.logger = logger 30 | self.filter = filter 31 | 32 | def __enter__(self): 33 | self.logger.addFilter(self.filter) 34 | 35 | def __exit__(self, *args, **kwargs): 36 | self.logger.removeFilter(self.filter) 37 | -------------------------------------------------------------------------------- /fiona/path.py: -------------------------------------------------------------------------------- 1 | """Dataset paths, identifiers, and filenames 2 | 3 | Note well: this module is deprecated in 1.3.0 and will be removed in a 4 | future version. 5 | """ 6 | 7 | import warnings 8 | 9 | from fiona._path import _ParsedPath as ParsedPath 10 | from fiona._path import _UnparsedPath as UnparsedPath 11 | from fiona._path import _parse_path as parse_path 12 | from fiona._path import _vsi_path as vsi_path 13 | from fiona.errors import FionaDeprecationWarning 14 | 15 | warnings.warn( 16 | "fiona.path will be removed in version 2.0.", FionaDeprecationWarning 17 | ) 18 | -------------------------------------------------------------------------------- /fiona/rfc3339.py: -------------------------------------------------------------------------------- 1 | # Fiona's date and time is founded on RFC 3339. 2 | # 3 | # OGR knows 3 time "zones": GMT, "local time", amd "unknown". Fiona, when 4 | # writing will convert times with a timezone offset to GMT (Z) and otherwise 5 | # will write times with the unknown zone. 6 | 7 | import logging 8 | import re 9 | 10 | log = logging.getLogger("Fiona") 11 | 12 | pattern_date = re.compile(r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)") 13 | pattern_time = re.compile( 14 | r"(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") 15 | pattern_datetime = re.compile( 16 | r"(\d\d\d\d)(-)?(\d\d)(-)?(\d\d)(T)?(\d\d)(:)?(\d\d)(:)?(\d\d)?(\.\d+)?(Z|([+-])?(\d\d)?(:)?(\d\d))?") 17 | 18 | 19 | class group_accessor: 20 | def __init__(self, m): 21 | self.match = m 22 | 23 | def group(self, i): 24 | try: 25 | return self.match.group(i) or 0 26 | except IndexError: 27 | return 0 28 | 29 | 30 | def parse_time(text): 31 | """ Given a time, returns a datetime tuple 32 | 33 | Parameters 34 | ---------- 35 | text: string to be parsed 36 | 37 | Returns 38 | ------- 39 | (int, int , int, int, int, int, int, int): 40 | datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) 41 | 42 | """ 43 | match = re.search(pattern_time, text) 44 | if match is None: 45 | raise ValueError(f"Time data '{text}' does not match pattern") 46 | g = group_accessor(match) 47 | log.debug("Match groups: %s", match.groups()) 48 | 49 | if g.group(8) == '-': 50 | tz = -1.0 * (int(g.group(9)) * 60 + int(g.group(11))) 51 | elif g.group(8) == '+': 52 | tz = int(g.group(9)) * 60 + int(g.group(11)) 53 | else: 54 | tz = None 55 | 56 | return (0, 0, 0, 57 | int(g.group(1)), 58 | int(g.group(3)), 59 | int(g.group(5)), 60 | int(1000000.0 * float(g.group(6))), 61 | tz 62 | ) 63 | 64 | 65 | def parse_date(text): 66 | """Given a date, returns a datetime tuple 67 | 68 | Parameters 69 | ---------- 70 | text: string to be parsed 71 | 72 | Returns 73 | ------- 74 | (int, int , int, int, int, int, int, int): 75 | datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) 76 | """ 77 | match = re.search(pattern_date, text) 78 | if match is None: 79 | raise ValueError(f"Time data '{text}' does not match pattern") 80 | g = group_accessor(match) 81 | log.debug("Match groups: %s", match.groups()) 82 | return ( 83 | int(g.group(1)), 84 | int(g.group(3)), 85 | int(g.group(5)), 86 | 0, 0, 0, 0, None) 87 | 88 | 89 | def parse_datetime(text): 90 | """Given a datetime, returns a datetime tuple 91 | 92 | Parameters 93 | ---------- 94 | text: string to be parsed 95 | 96 | Returns 97 | ------- 98 | (int, int , int, int, int, int, int, int): 99 | datetime tuple: (year, month, day, hour, minute, second, microsecond, utcoffset in minutes or None) 100 | """ 101 | match = re.search(pattern_datetime, text) 102 | if match is None: 103 | raise ValueError(f"Time data '{text}' does not match pattern") 104 | g = group_accessor(match) 105 | log.debug("Match groups: %s", match.groups()) 106 | 107 | if g.group(14) == '-': 108 | tz = -1.0 * (int(g.group(15)) * 60 + int(g.group(17))) 109 | elif g.group(14) == '+': 110 | tz = int(g.group(15)) * 60 + int(g.group(17)) 111 | else: 112 | tz = None 113 | 114 | return ( 115 | int(g.group(1)), 116 | int(g.group(3)), 117 | int(g.group(5)), 118 | int(g.group(7)), 119 | int(g.group(9)), 120 | int(g.group(11)), 121 | int(1000000.0 * float(g.group(12))), 122 | tz) 123 | -------------------------------------------------------------------------------- /fiona/vfs.py: -------------------------------------------------------------------------------- 1 | """Implementation of Apache VFS schemes and URLs.""" 2 | 3 | import sys 4 | import re 5 | from urllib.parse import urlparse 6 | 7 | 8 | # Supported URI schemes and their mapping to GDAL's VSI suffix. 9 | # TODO: extend for other cloud platforms. 10 | SCHEMES = { 11 | 'ftp': 'curl', 12 | 'gzip': 'gzip', 13 | 'http': 'curl', 14 | 'https': 'curl', 15 | 's3': 's3', 16 | 'tar': 'tar', 17 | 'zip': 'zip', 18 | 'gs': 'gs', 19 | } 20 | 21 | CURLSCHEMES = {k for k, v in SCHEMES.items() if v == 'curl'} 22 | 23 | # TODO: extend for other cloud platforms. 24 | REMOTESCHEMES = {k for k, v in SCHEMES.items() if v in ('curl', 's3', 'gs')} 25 | 26 | 27 | def valid_vsi(vsi): 28 | """Ensures all parts of our vsi path are valid schemes.""" 29 | return all(p in SCHEMES for p in vsi.split('+')) 30 | 31 | 32 | def is_remote(scheme): 33 | if scheme is None: 34 | return False 35 | return any(p in REMOTESCHEMES for p in scheme.split('+')) 36 | 37 | 38 | def vsi_path(path, vsi=None, archive=None): 39 | # If a VSI and archive file are specified, we convert the path to 40 | # an OGR VSI path (see cpl_vsi.h). 41 | if vsi: 42 | prefix = '/'.join(f'vsi{SCHEMES[p]}' for p in vsi.split('+')) 43 | if archive: 44 | result = f'/{prefix}/{archive}{path}' 45 | else: 46 | result = f'/{prefix}/{path}' 47 | else: 48 | result = path 49 | 50 | return result 51 | 52 | 53 | def parse_paths(uri, vfs=None): 54 | """Parse a URI or Apache VFS URL into its parts 55 | 56 | Returns: tuple 57 | (path, scheme, archive) 58 | """ 59 | archive = scheme = None 60 | path = uri 61 | # Windows drive letters (e.g. "C:\") confuse `urlparse` as they look like 62 | # URL schemes 63 | if sys.platform == "win32" and re.match("^[a-zA-Z]\\:", path): 64 | return path, None, None 65 | if vfs: 66 | parts = urlparse(vfs) 67 | scheme = parts.scheme 68 | archive = parts.path 69 | if parts.netloc and parts.netloc != 'localhost': 70 | archive = parts.netloc + archive 71 | else: 72 | parts = urlparse(path) 73 | scheme = parts.scheme 74 | path = parts.path 75 | if parts.netloc and parts.netloc != 'localhost': 76 | if scheme.split("+")[-1] in CURLSCHEMES: 77 | # We need to deal with cases such as zip+https://server.com/data.zip 78 | path = f"{scheme.split('+')[-1]}://{parts.netloc}{path}" 79 | else: 80 | path = parts.netloc + path 81 | if scheme in SCHEMES: 82 | parts = path.split('!') 83 | path = parts.pop() if parts else None 84 | archive = parts.pop() if parts else None 85 | 86 | scheme = None if not scheme else scheme 87 | return path, scheme, archive 88 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=67.8", "cython~=3.0.2"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "fiona" 7 | dynamic = ["readme", "version"] 8 | authors = [ 9 | {name = "Sean Gillies"}, 10 | ] 11 | maintainers = [ 12 | {name = "Fiona contributors"}, 13 | ] 14 | description = "Fiona reads and writes spatial data files" 15 | keywords = ["gis", "vector", "feature", "data"] 16 | license = {text = "BSD 3-Clause"} 17 | classifiers = [ 18 | "Development Status :: 5 - Production/Stable", 19 | "Intended Audience :: Developers", 20 | "Intended Audience :: Science/Research", 21 | "License :: OSI Approved :: BSD License", 22 | "Operating System :: OS Independent", 23 | "Programming Language :: Python :: 3.8", 24 | "Programming Language :: Python :: 3.9", 25 | "Programming Language :: Python :: 3.10", 26 | "Programming Language :: Python :: 3.11", 27 | "Programming Language :: Python :: 3.12", 28 | "Topic :: Scientific/Engineering :: GIS", 29 | ] 30 | requires-python = ">=3.8" 31 | dependencies = [ 32 | "attrs>=19.2.0", 33 | "certifi", 34 | "click~=8.0", 35 | "click-plugins>=1.0", 36 | "cligj>=0.5", 37 | 'importlib-metadata;python_version<"3.10"', 38 | ] 39 | 40 | [project.optional-dependencies] 41 | all = ["fiona[calc,s3,test]"] 42 | calc = ["pyparsing", "shapely"] 43 | s3 = ["boto3>=1.3.1"] 44 | test = [ 45 | "aiohttp", 46 | "fsspec", 47 | "fiona[s3]", 48 | "pytest>=7", 49 | "pytest-cov", 50 | "pytz", 51 | ] 52 | 53 | [project.scripts] 54 | fio = "fiona.fio.main:main_group" 55 | 56 | [project.urls] 57 | Documentation = "https://fiona.readthedocs.io/" 58 | Repository = "https://github.com/Toblerity/Fiona" 59 | 60 | [tool.setuptools] 61 | include-package-data = false 62 | 63 | [tool.setuptools.dynamic] 64 | version = {attr = "fiona.__version__"} 65 | readme = {file = ["README.rst", "CHANGES.txt", "CREDITS.txt"]} 66 | 67 | [tool.setuptools.packages.find] 68 | include = ["fiona", "fiona.*"] 69 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | error 4 | ignore:.*Plugin file tracers.* 5 | ignore:.*Sequential read of iterator was interrupted*:RuntimeWarning 6 | ignore:.*negative slices or start values other than zero may be slow*:RuntimeWarning 7 | ignore:.*negative step size may be slow*:RuntimeWarning 8 | ignore:.*is buggy and will be removed in Fiona 2.0.* 9 | ignore:.*unclosed =1.3.1 5 | coverage~=6.5 6 | cython>=3 7 | fsspec 8 | pyparsing 9 | pytest~=7.2 10 | pytest-cov~=4.0 11 | pytz==2022.6 12 | requests 13 | setuptools 14 | shapely 15 | wheel 16 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | attrs>=19.2.0 2 | click~=8.0 3 | click-plugins 4 | cligj>=0.5.0 5 | importlib-metadata;python_version<"3.10" 6 | certifi 7 | -------------------------------------------------------------------------------- /scripts/check_deprecated.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | from collections import defaultdict 4 | import re 5 | 6 | ignored_files = {'_shim.pyx', '_shim1.pyx', '_shim1.pxd', 'ogrext1.pxd'} 7 | 8 | # List of deprecated methods from https://gdal.org/doxygen/deprecated.html#_deprecated000028 9 | deprecated = { 10 | 'CPL_LSBINT16PTR', 11 | 'CPL_LSBINT32PTR(x)', 12 | 'OGR_Dr_CopyDataSource', 13 | 'OGR_Dr_CreateDataSource', 14 | 'OGR_Dr_DeleteDataSource', 15 | 'OGR_Dr_Open', 16 | 'OGR_Dr_TestCapability', 17 | 'OGR_DS_CopyLayer', 18 | 'OGR_DS_CreateLayer', 19 | 'OGR_DS_DeleteLayer', 20 | 'OGR_DS_Destroy', 21 | 'OGR_DS_ExecuteSQL', 22 | 'OGR_DS_GetDriver', 23 | 'OGR_DS_GetLayer', 24 | 'OGR_DS_GetLayerByName', 25 | 'OGR_DS_GetLayerCount', 26 | 'OGR_DS_GetName', 27 | 'OGR_DS_ReleaseResultSet', 28 | 'OGR_DS_TestCapability', 29 | 'OGR_G_GetCoordinateDimension', 30 | 'OGR_G_SetCoordinateDimension', 31 | 'OGRGetDriver', 32 | 'OGRGetDriverByName', 33 | 'OGRGetDriverCount', 34 | 'OGROpen', 35 | 'OGROpenShared', 36 | 'OGRRegisterAll', 37 | 'OGRReleaseDataSource', 38 | } 39 | 40 | found_lines = defaultdict(list) 41 | files = glob.glob('fiona/*.pyx') + glob.glob('fiona/*.pxd') 42 | for path in files: 43 | if os.path.basename(path) in ignored_files: 44 | continue 45 | 46 | with open(path) as f: 47 | for i, line in enumerate(f): 48 | for deprecated_method in deprecated: 49 | match = re.search(fr'{deprecated_method}\s*\(', line) 50 | if match: 51 | found_lines[path].append((i+1, line.strip(), deprecated_method)) 52 | 53 | for path in sorted(found_lines): 54 | print(path) 55 | for line_nr, line, method in found_lines[path]: 56 | print(f"\t{line_nr}\t{line}") 57 | print("") 58 | -------------------------------------------------------------------------------- /scripts/check_urls.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import glob 3 | import re 4 | 5 | 6 | def test_urls(files): 7 | headers = {'User-Agent': 'Mozilla/5.0 (compatible; MSIE 6.0; Fiona CI check)'} 8 | 9 | for fpath in files: 10 | print(f"Processing: {fpath}") 11 | with open(fpath) as f: 12 | 13 | text = f.read() 14 | urls = re.findall('(https?:\\/\\/[^\\s`>\'"()]+)', text) 15 | 16 | for url in urls: 17 | http_code = None 18 | try: 19 | r = requests.get(url, headers=headers) 20 | http_code = r.status_code 21 | warn = '' 22 | if not http_code == 200: 23 | warn = ' <--- !!!' 24 | except Exception as e: 25 | warn = str(e) 26 | 27 | if len(warn) > 0: 28 | print(f"\t {url} HTTP code: {http_code} {warn}") 29 | 30 | 31 | print("Test URLs in documentation") 32 | test_urls(glob.glob('**/*.rst', recursive=True)) 33 | print('') 34 | print('Test URLs in code') 35 | test_urls(glob.glob('fiona/**/*.py', recursive=True)) 36 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Fiona's test package. Do not delete!""" 2 | 3 | -------------------------------------------------------------------------------- /tests/data/LICENSE.txt: -------------------------------------------------------------------------------- 1 | The coutwildrnp shapefile and all .txt files are extracts from the US National 2 | Map's 1:2M scale Wilderness Area boundaries [1] and are in the public domain. 3 | 4 | [1] http://nationalmap.gov/small_scale/atlasftp.html 5 | -------------------------------------------------------------------------------- /tests/data/coutwildrnp.cpg: -------------------------------------------------------------------------------- 1 | ISO-8859-1 2 | -------------------------------------------------------------------------------- /tests/data/coutwildrnp.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/coutwildrnp.dbf -------------------------------------------------------------------------------- /tests/data/coutwildrnp.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /tests/data/coutwildrnp.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/coutwildrnp.shp -------------------------------------------------------------------------------- /tests/data/coutwildrnp.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/coutwildrnp.shx -------------------------------------------------------------------------------- /tests/data/coutwildrnp.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/coutwildrnp.zip -------------------------------------------------------------------------------- /tests/data/example.topojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "Topology", 3 | "objects": { 4 | "example": { 5 | "type": "GeometryCollection", 6 | "geometries": [ 7 | { 8 | "type": "Point", 9 | "properties": { 10 | "prop0": "value0" 11 | }, 12 | "coordinates": [102, 0.5] 13 | }, 14 | { 15 | "type": "LineString", 16 | "properties": { 17 | "prop0": "value0", 18 | "prop1": 0 19 | }, 20 | "arcs": [0] 21 | }, 22 | { 23 | "type": "Polygon", 24 | "properties": { 25 | "prop0": "value0", 26 | "prop1": { 27 | "this": "that" 28 | } 29 | }, 30 | "arcs": [[-2]] 31 | } 32 | ] 33 | } 34 | }, 35 | "arcs": [ 36 | [[102, 0], [103, 1], [104, 0], [105, 1]], 37 | [[100, 0], [101, 0], [101, 1], [100, 1], [100, 0]] 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /tests/data/gre.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 2 | -------------------------------------------------------------------------------- /tests/data/gre.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/gre.dbf -------------------------------------------------------------------------------- /tests/data/gre.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /tests/data/gre.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/gre.shp -------------------------------------------------------------------------------- /tests/data/gre.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/gre.shx -------------------------------------------------------------------------------- /tests/data/multicurve.gml: -------------------------------------------------------------------------------- 1 | 2 | 8 | -0.9243407 46.27182575052962.70658958605966 47.6054714507864 9 | 10 | 11 | 12 | -0.9243407 46.27182575052962.70658958605966 47.6054714507864 13 | -0.9105691 47.21951 1.414634 47.170731.414634 47.17073 2.423818 47.48377 1.407531 46.726681.407531 46.72668 -0.9243407 46.72668 14 | MULTICURVE (COMPOUNDCURVE ((-0.9105691 47.21951,1.414634 47.17073),CIRCULARSTRING (1.414634 47.17073,2.423818 47.48377,1.407531 46.72668),(1.407531 46.72668,-0.9243407 46.72668))) 15 | 8.39459167219456 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /tests/data/multicurve.xsd: -------------------------------------------------------------------------------- 1 | 2 | 10 | 11 | 12 | 0 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /tests/data/test_gpx.gpx: -------------------------------------------------------------------------------- 1 | 2 | 9 | 10 | 11 | 12 | 13 | 11.0 14 | 10.9 15 | 10.7 16 | 10.5 17 | 10.4 18 | 10.2 19 | 10.0 20 | 10.0 21 | 10.0 22 | 10.2 23 | 10.4 24 | 10.5 25 | 10.5 26 | 10.1 27 | 9.6 28 | 9.1 29 | 8.3 30 | 7.2 31 | 6.6 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /tests/data/test_tin.csv: -------------------------------------------------------------------------------- 1 | WKT,id 2 | "TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0)))",1 3 | "TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0))",2 4 | "GEOMETRYCOLLECTION (TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0))), TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0)))",3 5 | -------------------------------------------------------------------------------- /tests/data/test_tin.dbf: -------------------------------------------------------------------------------- 1 | v AQWidCP 1  -------------------------------------------------------------------------------- /tests/data/test_tin.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/test_tin.shp -------------------------------------------------------------------------------- /tests/data/test_tin.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/test_tin.shx -------------------------------------------------------------------------------- /tests/data/test_tz.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { 5 | "type": "Feature", 6 | "properties": { 7 | "test": "2015-04-22T00:00:00+07:00" 8 | }, 9 | "geometry": { 10 | "type": "Point", 11 | "coordinates": [ 12 | -79.4, 13 | 43.6 14 | ] 15 | } 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /tests/data/testopenfilegdb.gdb.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/Fiona/ec9768a9389530a0570446e3c34ae91448d28cf4/tests/data/testopenfilegdb.gdb.zip -------------------------------------------------------------------------------- /tests/data/trio.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [ 4 | { 5 | "type": "Feature", 6 | "properties": { 7 | "name": "Le château d'eau" 8 | }, 9 | "geometry": { 10 | "type": "Point", 11 | "coordinates": [ 12 | 3.869011402130127, 13 | 43.611401128587104 14 | ] 15 | } 16 | }, 17 | { 18 | "type": "Feature", 19 | "properties": { 20 | "aqueduct": "yes" 21 | }, 22 | "geometry": { 23 | "type": "LineString", 24 | "coordinates": [ 25 | [ 26 | 3.8645052909851074, 27 | 43.61172738574996 28 | ], 29 | [ 30 | 3.868989944458008, 31 | 43.61140889663537 32 | ] 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": {"name": "promenade du Peyrou", "architect": "Giral"}, 39 | "geometry": { 40 | "type": "Polygon", 41 | "coordinates": [ 42 | [ 43 | [ 44 | 3.8684856891632085, 45 | 43.61205364114294 46 | ], 47 | [ 48 | 3.8683247566223145, 49 | 43.6108340583545 50 | ], 51 | [ 52 | 3.8685393333435054, 53 | 43.610748608951816 54 | ], 55 | [ 56 | 3.871554136276245, 57 | 43.610577709782206 58 | ], 59 | [ 60 | 3.871725797653198, 61 | 43.61063208684338 62 | ], 63 | [ 64 | 3.8719189167022705, 65 | 43.61183613774427 66 | ], 67 | [ 68 | 3.8684856891632085, 69 | 43.61205364114294 70 | ] 71 | ] 72 | ] 73 | } 74 | } 75 | ] 76 | } 77 | -------------------------------------------------------------------------------- /tests/data/trio.seq: -------------------------------------------------------------------------------- 1 | {"geometry": {"coordinates": [3.869011402130127, 43.611401128587104], "type": "Point"}, "id": "0", "properties": {"name": "Le ch\u00e2teau d'eau"}, "type": "Feature"} 2 | {"geometry": {"coordinates": [[3.8645052909851074, 43.61172738574996], [3.868989944458008, 43.61140889663537]], "type": "LineString"}, "id": "1", "properties": {"aqueduct": "yes"}, "type": "Feature"} 3 | {"geometry": {"coordinates": [[[3.8684856891632085, 43.61205364114294], [3.8683247566223145, 43.6108340583545], [3.8685393333435054, 43.610748608951816], [3.871554136276245, 43.610577709782206], [3.871725797653198, 43.61063208684338], [3.8719189167022705, 43.61183613774427], [3.8684856891632085, 43.61205364114294]]], "type": "Polygon"}, "id": "2", "properties": {"architect": "Giral", "name": "promenade du Peyrou"}, "type": "Feature"} 4 | -------------------------------------------------------------------------------- /tests/test__env.py: -------------------------------------------------------------------------------- 1 | """Tests of _env util module""" 2 | 3 | from unittest import mock 4 | 5 | import pytest 6 | 7 | from fiona._env import GDALDataFinder, PROJDataFinder 8 | 9 | from .conftest import gdal_version 10 | 11 | 12 | @pytest.fixture 13 | def mock_wheel(tmpdir): 14 | """A fake rasterio wheel""" 15 | moduledir = tmpdir.mkdir("rasterio") 16 | moduledir.ensure("__init__.py") 17 | moduledir.ensure("_env.py") 18 | moduledir.ensure("gdal_data/header.dxf") 19 | moduledir.ensure("proj_data/epsg") 20 | return moduledir 21 | 22 | 23 | @pytest.fixture 24 | def mock_fhs(tmpdir): 25 | """A fake FHS system""" 26 | tmpdir.ensure("share/gdal/header.dxf") 27 | tmpdir.ensure("share/proj/epsg") 28 | return tmpdir 29 | 30 | 31 | @pytest.fixture 32 | def mock_debian(tmpdir): 33 | """A fake Debian multi-install system""" 34 | tmpdir.ensure(f"share/gdal/{gdal_version.major}.{gdal_version.minor}/header.dxf") 35 | tmpdir.ensure("share/proj/epsg") 36 | return tmpdir 37 | 38 | 39 | def test_search_wheel_gdal_data_failure(tmpdir): 40 | """Fail to find GDAL data in a non-wheel""" 41 | finder = GDALDataFinder() 42 | assert not finder.search_wheel(str(tmpdir)) 43 | 44 | 45 | def test_search_wheel_gdal_data(mock_wheel): 46 | """Find GDAL data in a wheel""" 47 | finder = GDALDataFinder() 48 | assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) 49 | 50 | 51 | def test_search_prefix_gdal_data_failure(tmpdir): 52 | """Fail to find GDAL data in a bogus prefix""" 53 | finder = GDALDataFinder() 54 | assert not finder.search_prefix(str(tmpdir)) 55 | 56 | 57 | def test_search_prefix_gdal_data(mock_fhs): 58 | """Find GDAL data under prefix""" 59 | finder = GDALDataFinder() 60 | assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) 61 | 62 | 63 | def test_search_debian_gdal_data_failure(tmpdir): 64 | """Fail to find GDAL data in a bogus Debian location""" 65 | finder = GDALDataFinder() 66 | assert not finder.search_debian(str(tmpdir)) 67 | 68 | 69 | def test_search_debian_gdal_data(mock_debian): 70 | """Find GDAL data under Debian locations""" 71 | finder = GDALDataFinder() 72 | assert finder.search_debian(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join(f"{gdal_version.major}.{gdal_version.minor}")) 73 | 74 | 75 | def test_search_gdal_data_wheel(mock_wheel): 76 | finder = GDALDataFinder() 77 | assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("gdal_data")) 78 | 79 | 80 | def test_search_gdal_data_fhs(mock_fhs): 81 | finder = GDALDataFinder() 82 | assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("gdal")) 83 | 84 | 85 | def test_search_gdal_data_debian(mock_debian): 86 | """Find GDAL data under Debian locations""" 87 | finder = GDALDataFinder() 88 | assert finder.search(str(mock_debian)) == str(mock_debian.join("share").join("gdal").join(f"{gdal_version.major}.{gdal_version.minor}")) 89 | 90 | 91 | def test_search_wheel_proj_data_failure(tmpdir): 92 | """Fail to find GDAL data in a non-wheel""" 93 | finder = PROJDataFinder() 94 | assert not finder.search_wheel(str(tmpdir)) 95 | 96 | 97 | def test_search_wheel_proj_data(mock_wheel): 98 | """Find GDAL data in a wheel""" 99 | finder = PROJDataFinder() 100 | assert finder.search_wheel(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) 101 | 102 | 103 | def test_search_prefix_proj_data_failure(tmpdir): 104 | """Fail to find GDAL data in a bogus prefix""" 105 | finder = PROJDataFinder() 106 | assert not finder.search_prefix(str(tmpdir)) 107 | 108 | 109 | def test_search_prefix_proj_data(mock_fhs): 110 | """Find GDAL data under prefix""" 111 | finder = PROJDataFinder() 112 | assert finder.search_prefix(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) 113 | 114 | 115 | def test_search_proj_data_wheel(mock_wheel): 116 | finder = PROJDataFinder() 117 | assert finder.search(str(mock_wheel.join("_env.py"))) == str(mock_wheel.join("proj_data")) 118 | 119 | 120 | def test_search_proj_data_fhs(mock_fhs): 121 | finder = PROJDataFinder() 122 | assert finder.search(str(mock_fhs)) == str(mock_fhs.join("share").join("proj")) 123 | -------------------------------------------------------------------------------- /tests/test__path.py: -------------------------------------------------------------------------------- 1 | """_path tests.""" 2 | 3 | import sys 4 | 5 | from fiona._path import _parse_path, _vsi_path 6 | 7 | 8 | def test_parse_zip_windows(monkeypatch): 9 | """Parse a zip+ Windows path.""" 10 | monkeypatch.setattr(sys, "platform", "win32") 11 | path = _parse_path("zip://D:\\a\\Fiona\\Fiona\\tests\\data\\coutwildrnp.zip!coutwildrnp.shp") 12 | vsi_path = _vsi_path(path) 13 | assert vsi_path.startswith("/vsizip/D") 14 | assert vsi_path.endswith("coutwildrnp.zip/coutwildrnp.shp") 15 | 16 | 17 | def test_parse_zip_windows(monkeypatch): 18 | """Parse a tar+ Windows path.""" 19 | monkeypatch.setattr(sys, "platform", "win32") 20 | path = _parse_path("tar://D:\\a\\Fiona\\Fiona\\tests\\data\\coutwildrnp.tar!testing/coutwildrnp.shp") 21 | vsi_path = _vsi_path(path) 22 | assert vsi_path.startswith("/vsitar/D") 23 | assert vsi_path.endswith("coutwildrnp.tar/testing/coutwildrnp.shp") 24 | -------------------------------------------------------------------------------- /tests/test_bigint.py: -------------------------------------------------------------------------------- 1 | """OGR 64bit handling: https://trac.osgeo.org/gdal/wiki/rfc31_ogr_64 2 | 3 | Shapefile: OFTInteger fields are created by default with a width of 9 4 | characters, so to be unambiguously read as OFTInteger (and if specifying 5 | integer that require 10 or 11 characters. the field is dynamically extended 6 | like managed since a few versions). OFTInteger64 fields are created by default 7 | with a width of 18 digits, so to be unambiguously read as OFTInteger64, and 8 | extended to 19 or 20 if needed. Integer fields of width between 10 and 18 9 | will be read as OFTInteger64. Above they will be treated as OFTReal. In 10 | previous GDAL versions, Integer fields were created with a default with of 10, 11 | and thus will be now read as OFTInteger64. An open option, DETECT_TYPE=YES, can 12 | be specified so as OGR does a full scan of the DBF file to see if integer 13 | fields of size 10 or 11 hold 32 bit or 64 bit values and adjust the type 14 | accordingly (and same for integer fields of size 19 or 20, in case of overflow 15 | of 64 bit integer, OFTReal is chosen) 16 | """ 17 | 18 | import pytest 19 | 20 | import fiona 21 | from fiona.env import calc_gdal_version_num, get_gdal_version_num 22 | from fiona.model import Feature 23 | 24 | 25 | def testCreateBigIntSchema(tmpdir): 26 | name = str(tmpdir.join("output1.shp")) 27 | 28 | a_bigint = 10 ** 18 - 1 29 | fieldname = "abigint" 30 | 31 | kwargs = { 32 | "driver": "ESRI Shapefile", 33 | "crs": "EPSG:4326", 34 | "schema": {"geometry": "Point", "properties": [(fieldname, "int:10")]}, 35 | } 36 | 37 | with fiona.open(name, "w", **kwargs) as dst: 38 | rec = {} 39 | rec["geometry"] = {"type": "Point", "coordinates": (0, 0)} 40 | rec["properties"] = {fieldname: a_bigint} 41 | dst.write(Feature.from_dict(**rec)) 42 | 43 | with fiona.open(name) as src: 44 | if fiona.gdal_version >= (2, 0, 0): 45 | first = next(iter(src)) 46 | assert first["properties"][fieldname] == a_bigint 47 | 48 | 49 | @pytest.mark.parametrize("dtype", ["int", "int64"]) 50 | def test_issue691(tmpdir, dtype): 51 | """Type 'int' maps to 'int64'""" 52 | schema = {"geometry": "Any", "properties": {"foo": dtype}} 53 | with fiona.open( 54 | str(tmpdir.join("test.shp")), 55 | "w", 56 | driver="Shapefile", 57 | schema=schema, 58 | crs="epsg:4326", 59 | ) as dst: 60 | dst.write( 61 | Feature.from_dict( 62 | **{ 63 | "type": "Feature", 64 | "geometry": { 65 | "type": "Point", 66 | "coordinates": (-122.278015, 37.868995), 67 | }, 68 | "properties": {"foo": 3694063472}, 69 | } 70 | ) 71 | ) 72 | 73 | with fiona.open(str(tmpdir.join("test.shp"))) as src: 74 | assert src.schema["properties"]["foo"] == "int:18" 75 | first = next(iter(src)) 76 | assert first["properties"]["foo"] == 3694063472 77 | -------------------------------------------------------------------------------- /tests/test_binary_field.py: -------------------------------------------------------------------------------- 1 | """Binary BLOB field testing.""" 2 | 3 | import struct 4 | 5 | import fiona 6 | from fiona.model import Feature 7 | 8 | from .conftest import requires_gpkg 9 | 10 | 11 | @requires_gpkg 12 | def test_binary_field(tmpdir): 13 | meta = { 14 | "driver": "GPKG", 15 | "schema": { 16 | "geometry": "Point", 17 | "properties": {"name": "str", "data": "bytes"}, 18 | }, 19 | } 20 | 21 | # create some binary data 22 | input_data = struct.pack("256B", *range(256)) 23 | 24 | # write the binary data to a BLOB field 25 | filename = str(tmpdir.join("binary_test.gpkg")) 26 | with fiona.open(filename, "w", **meta) as dst: 27 | feature = Feature.from_dict( 28 | **{ 29 | "geometry": {"type": "Point", "coordinates": ((0, 0))}, 30 | "properties": { 31 | "name": "test", 32 | "data": input_data, 33 | }, 34 | } 35 | ) 36 | dst.write(feature) 37 | 38 | # read the data back and check consistency 39 | with fiona.open(filename, "r") as src: 40 | feature = next(iter(src)) 41 | assert feature.properties["name"] == "test" 42 | output_data = feature.properties["data"] 43 | assert output_data == input_data 44 | -------------------------------------------------------------------------------- /tests/test_bounds.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from fiona._env import get_gdal_version_tuple 3 | 4 | import fiona 5 | from fiona.drvsupport import supported_drivers, _driver_supports_mode 6 | from fiona.errors import DriverError 7 | from fiona.env import GDALVersion 8 | from tests.conftest import get_temp_filename 9 | 10 | 11 | def test_bounds_point(): 12 | g = {"type": "Point", "coordinates": [10, 10]} 13 | assert fiona.bounds(g) == (10, 10, 10, 10) 14 | 15 | 16 | def test_bounds_line(): 17 | g = {"type": "LineString", "coordinates": [[0, 0], [10, 10]]} 18 | assert fiona.bounds(g) == (0, 0, 10, 10) 19 | 20 | 21 | def test_bounds_polygon(): 22 | g = {"type": "Polygon", "coordinates": [[[0, 0], [10, 10], [10, 0]]]} 23 | assert fiona.bounds(g) == (0, 0, 10, 10) 24 | 25 | 26 | def test_bounds_z(): 27 | g = {"type": "Point", "coordinates": [10, 10, 10]} 28 | assert fiona.bounds(g) == (10, 10, 10, 10) 29 | 30 | 31 | # MapInfo File driver requires that the bounds (geographical extents) of a new file 32 | # be set before writing the first feature (https://gdal.org/drivers/vector/mitab.html) 33 | 34 | 35 | @pytest.mark.parametrize( 36 | "driver", 37 | [ 38 | driver 39 | for driver in supported_drivers 40 | if _driver_supports_mode(driver, "w") and not driver == "MapInfo File" 41 | ], 42 | ) 43 | def test_bounds(tmpdir, driver, testdata_generator): 44 | """Test if bounds are correctly calculated after writing.""" 45 | if driver == "BNA" and GDALVersion.runtime() < GDALVersion(2, 0): 46 | pytest.skip("BNA driver segfaults with gdal 1.11") 47 | if driver == "ESRI Shapefile" and get_gdal_version_tuple() < (3, 1): 48 | pytest.skip( 49 | "Bug in GDALs Shapefile driver: https://github.com/OSGeo/gdal/issues/2269" 50 | ) 51 | 52 | range1 = list(range(0, 5)) 53 | range2 = list(range(5, 10)) 54 | schema, crs, records1, records2, test_equal = testdata_generator( 55 | driver, range1, range2 56 | ) 57 | 58 | if not schema["geometry"] == "Point": 59 | pytest.skip("Driver does not support point geometries") 60 | 61 | filename = get_temp_filename(driver) 62 | path = str(tmpdir.join(filename)) 63 | 64 | def calc_bounds(records): 65 | xs = [] 66 | ys = [] 67 | for r in records: 68 | xs.append(r.geometry["coordinates"][0]) 69 | ys.append(r.geometry["coordinates"][1]) 70 | return min(xs), max(xs), min(ys), max(ys) 71 | 72 | with fiona.open(path, "w", crs="OGC:CRS84", driver=driver, schema=schema) as c: 73 | c.writerecords(records1) 74 | 75 | try: 76 | bounds = c.bounds 77 | assert bounds == calc_bounds(records1) 78 | except Exception as e: 79 | assert isinstance(e, DriverError) 80 | 81 | c.writerecords(records2) 82 | 83 | try: 84 | bounds = c.bounds 85 | assert bounds == calc_bounds(records1 + records2) 86 | except Exception as e: 87 | assert isinstance(e, DriverError) 88 | -------------------------------------------------------------------------------- /tests/test_collection_crs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | import pytest 5 | 6 | import fiona 7 | import fiona.crs 8 | from fiona.errors import CRSError 9 | from .conftest import WGS84PATTERN 10 | 11 | 12 | def test_collection_crs_wkt(path_coutwildrnp_shp): 13 | with fiona.open(path_coutwildrnp_shp) as src: 14 | assert re.match(WGS84PATTERN, src.crs_wkt) 15 | 16 | 17 | def test_collection_no_crs_wkt(tmpdir, path_coutwildrnp_shp): 18 | """crs members of a dataset with no crs can be accessed safely.""" 19 | filename = str(tmpdir.join("test.shp")) 20 | with fiona.open(path_coutwildrnp_shp) as src: 21 | profile = src.meta 22 | del profile['crs'] 23 | del profile['crs_wkt'] 24 | with fiona.open(filename, 'w', **profile) as dst: 25 | assert dst.crs_wkt == "" 26 | assert dst.crs == fiona.crs.CRS() 27 | 28 | 29 | def test_collection_create_crs_wkt(tmpdir): 30 | """A collection can be created using crs_wkt""" 31 | filename = str(tmpdir.join("test.geojson")) 32 | wkt = 'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],AUTHORITY["EPSG","4326"]]' 33 | with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt=wkt, driver='GeoJSON') as dst: 34 | assert dst.crs_wkt.startswith('GEOGCS["WGS 84') or dst.crs_wkt.startswith('GEOGCS["GCS_WGS_1984') 35 | 36 | with fiona.open(filename) as col: 37 | assert col.crs_wkt.startswith('GEOGCS["WGS 84') or col.crs_wkt.startswith('GEOGCS["GCS_WGS_1984') 38 | 39 | 40 | def test_collection_urn_crs(tmpdir): 41 | filename = str(tmpdir.join("test.geojson")) 42 | crs = "urn:ogc:def:crs:OGC:1.3:CRS84" 43 | with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs=crs, driver='GeoJSON') as dst: 44 | assert dst.crs_wkt.startswith('GEOGCS["WGS 84') 45 | 46 | with fiona.open(filename) as col: 47 | assert col.crs_wkt.startswith('GEOGCS["WGS 84') 48 | 49 | 50 | 51 | def test_collection_invalid_crs(tmpdir): 52 | filename = str(tmpdir.join("test.geojson")) 53 | with pytest.raises(CRSError): 54 | with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs="12ab-invalid", driver='GeoJSON') as dst: 55 | pass 56 | 57 | def test_collection_invalid_crs_wkt(tmpdir): 58 | filename = str(tmpdir.join("test.geojson")) 59 | with pytest.raises(CRSError): 60 | with fiona.open(filename, 'w', schema={'geometry': 'Point', 'properties': {'foo': 'int'}}, crs_wkt="12ab-invalid", driver='GeoJSON') as dst: 61 | pass 62 | -------------------------------------------------------------------------------- /tests/test_compound_crs.py: -------------------------------------------------------------------------------- 1 | """Test of compound CRS crash avoidance""" 2 | 3 | import fiona 4 | from fiona.crs import CRS 5 | 6 | 7 | def test_compound_crs(data): 8 | """Don't crash""" 9 | prj = data.join("coutwildrnp.prj") 10 | prj.write("""COMPD_CS["unknown",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],VERT_CS["unknown",VERT_DATUM["unknown",2005],UNIT["metre",1.0,AUTHORITY["EPSG","9001"]],AXIS["Up",UP]]]""") 11 | with fiona.open(str(data.join("coutwildrnp.shp"))) as collection: 12 | assert isinstance(collection.crs, CRS) 13 | -------------------------------------------------------------------------------- /tests/test_curve_geometries.py: -------------------------------------------------------------------------------- 1 | """Tests of features related to GDAL RFC 49 2 | 3 | See https://trac.osgeo.org/gdal/wiki/rfc49_curve_geometries. 4 | """ 5 | 6 | import fiona 7 | 8 | 9 | def test_line_curve_conversion(path_curves_line_csv): 10 | """Convert curved geometries to linear approximations""" 11 | with fiona.open(path_curves_line_csv) as col: 12 | assert col.schema["geometry"] == "Unknown" 13 | features = list(col) 14 | assert len(features) == 9 15 | 16 | 17 | def test_multicurve_conversion(path_multicurve_gml): 18 | """Convert curved geometries to linear approximations""" 19 | with fiona.open(path_multicurve_gml) as col: 20 | assert col.schema["geometry"] == "MultiLineString" 21 | features = list(col) 22 | assert len(features) == 1 23 | -------------------------------------------------------------------------------- /tests/test_data_paths.py: -------------------------------------------------------------------------------- 1 | """Tests of GDAL and PROJ data finding""" 2 | 3 | import os.path 4 | 5 | from click.testing import CliRunner 6 | import pytest 7 | 8 | import fiona 9 | from fiona._env import GDALDataFinder, PROJDataFinder 10 | from fiona.fio.main import main_group 11 | 12 | 13 | @pytest.mark.wheel 14 | def test_gdal_data_wheel(): 15 | """Get GDAL data path from a wheel""" 16 | assert GDALDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') 17 | 18 | 19 | @pytest.mark.wheel 20 | def test_proj_data_wheel(): 21 | """Get GDAL data path from a wheel""" 22 | assert PROJDataFinder().search() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') 23 | 24 | 25 | @pytest.mark.wheel 26 | def test_env_gdal_data_wheel(): 27 | runner = CliRunner() 28 | result = runner.invoke(main_group, ['env', '--gdal-data']) 29 | assert result.exit_code == 0 30 | assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'gdal_data') 31 | 32 | 33 | @pytest.mark.wheel 34 | def test_env_proj_data_wheel(): 35 | runner = CliRunner() 36 | result = runner.invoke(main_group, ['env', '--proj-data']) 37 | assert result.exit_code == 0 38 | assert result.output.strip() == os.path.join(os.path.dirname(fiona.__file__), 'proj_data') 39 | 40 | 41 | def test_env_gdal_data_environ(monkeypatch): 42 | monkeypatch.setenv('GDAL_DATA', '/foo/bar') 43 | runner = CliRunner() 44 | result = runner.invoke(main_group, ['env', '--gdal-data']) 45 | assert result.exit_code == 0 46 | assert result.output.strip() == '/foo/bar' 47 | 48 | 49 | @pytest.mark.parametrize("data_directory_env", ["PROJ_LIB", "PROJ_DATA"]) 50 | def test_env_proj_data_environ(data_directory_env, monkeypatch): 51 | monkeypatch.delenv('PROJ_DATA', raising=False) 52 | monkeypatch.delenv('PROJ_LIB', raising=False) 53 | monkeypatch.setenv(data_directory_env, '/foo/bar') 54 | runner = CliRunner() 55 | result = runner.invoke(main_group, ['env', '--proj-data']) 56 | assert result.exit_code == 0 57 | assert result.output.strip() == '/foo/bar' 58 | -------------------------------------------------------------------------------- /tests/test_driver_options.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import tempfile 4 | 5 | import fiona 6 | from fiona.model import Feature 7 | 8 | from .conftest import get_temp_filename, requires_gdal2 9 | 10 | 11 | def test_gml_format_option(tmp_path): 12 | """Test GML dataset creation option FORMAT (see gh-968)""" 13 | 14 | schema = {"geometry": "Point", "properties": {"position": "int"}} 15 | records = [ 16 | Feature.from_dict( 17 | **{ 18 | "geometry": {"type": "Point", "coordinates": (0.0, float(i))}, 19 | "properties": {"position": i}, 20 | } 21 | ) 22 | for i in range(10) 23 | ] 24 | 25 | fpath = tmp_path.joinpath(get_temp_filename("GML")) 26 | 27 | with fiona.open(fpath, "w", driver="GML", schema=schema, FORMAT="GML3") as out: 28 | out.writerecords(records) 29 | 30 | xsd_path = list(tmp_path.glob("*.xsd"))[0] 31 | 32 | with open(xsd_path) as f: 33 | xsd = f.read() 34 | assert "http://schemas.opengis.net/gml/3.1.1" in xsd 35 | -------------------------------------------------------------------------------- /tests/test_drivers.py: -------------------------------------------------------------------------------- 1 | """Tests for Fiona's OGR driver interface.""" 2 | 3 | 4 | import logging 5 | 6 | import pytest 7 | 8 | import fiona 9 | from fiona.errors import FionaDeprecationWarning 10 | 11 | 12 | def test_options(tmpdir, path_coutwildrnp_shp): 13 | """Test that setting CPL_DEBUG=ON works and that a warning is raised.""" 14 | logfile = str(tmpdir.mkdir('tests').join('test_options.log')) 15 | logger = logging.getLogger() 16 | logger.setLevel(logging.DEBUG) 17 | fh = logging.FileHandler(logfile) 18 | fh.setLevel(logging.DEBUG) 19 | logger.addHandler(fh) 20 | 21 | # fiona.drivers() will be deprecated. 22 | with pytest.warns(FionaDeprecationWarning): 23 | with fiona.drivers(CPL_DEBUG=True): 24 | c = fiona.open(path_coutwildrnp_shp) 25 | c.close() 26 | with open(logfile) as f: 27 | log = f.read() 28 | if fiona.gdal_version.major >= 2: 29 | assert "GDALOpen" in log 30 | else: 31 | assert "OGROpen" in log 32 | -------------------------------------------------------------------------------- /tests/test_encoding.py: -------------------------------------------------------------------------------- 1 | """Encoding tests""" 2 | 3 | from glob import glob 4 | import os 5 | import shutil 6 | 7 | import pytest 8 | 9 | import fiona 10 | 11 | from .conftest import requires_gdal2 12 | 13 | 14 | @pytest.fixture(scope='function') 15 | def gre_shp_cp1252(tmpdir): 16 | """A tempdir containing copies of gre.* files, .cpg set to cp1252 17 | 18 | The shapefile attributes are in fact utf-8 encoded. 19 | """ 20 | test_files = glob(os.path.join(os.path.dirname(__file__), 'data/gre.*')) 21 | tmpdir = tmpdir.mkdir('data') 22 | for filename in test_files: 23 | shutil.copy(filename, str(tmpdir)) 24 | tmpdir.join('gre.cpg').write('CP1252') 25 | yield tmpdir.join('gre.shp') 26 | 27 | 28 | @requires_gdal2 29 | def test_broken_encoding(gre_shp_cp1252): 30 | """Reading as cp1252 mis-encodes a Russian name""" 31 | with fiona.open(str(gre_shp_cp1252)) as src: 32 | assert src.session._get_internal_encoding() == 'utf-8' 33 | feat = next(iter(src)) 34 | assert feat['properties']['name_ru'] != 'Гренада' 35 | 36 | 37 | @requires_gdal2 38 | def test_cpg_encoding(gre_shp_cp1252): 39 | """Reads a Russian name""" 40 | gre_shp_cp1252.join('../gre.cpg').write('UTF-8') 41 | with fiona.open(str(gre_shp_cp1252)) as src: 42 | assert src.session._get_internal_encoding() == 'utf-8' 43 | feat = next(iter(src)) 44 | assert feat['properties']['name_ru'] == 'Гренада' 45 | 46 | 47 | @requires_gdal2 48 | def test_override_encoding(gre_shp_cp1252): 49 | """utf-8 override succeeds""" 50 | with fiona.open(str(gre_shp_cp1252), encoding='utf-8') as src: 51 | assert src.session._get_internal_encoding() == 'utf-8' 52 | assert next(iter(src))['properties']['name_ru'] == 'Гренада' 53 | -------------------------------------------------------------------------------- /tests/test_fio_bounds.py: -------------------------------------------------------------------------------- 1 | """Tests for `$ fio bounds`.""" 2 | 3 | 4 | import re 5 | 6 | from fiona.fio import bounds 7 | from fiona.fio.main import main_group 8 | 9 | 10 | def test_fail(runner): 11 | result = runner.invoke(main_group, ['bounds', ], '5') 12 | assert result.exit_code == 1 13 | 14 | 15 | def test_seq(feature_seq, runner): 16 | result = runner.invoke(main_group, ['bounds', ], feature_seq) 17 | assert result.exit_code == 0 18 | assert result.output.count('[') == result.output.count(']') == 2 19 | assert len(re.findall(r'\d*\.\d*', result.output)) == 8 20 | 21 | 22 | def test_seq_rs(feature_seq_pp_rs, runner): 23 | result = runner.invoke(main_group, ['bounds', ], feature_seq_pp_rs) 24 | assert result.exit_code == 0 25 | assert result.output.count('[') == result.output.count(']') == 2 26 | assert len(re.findall(r'\d*\.\d*', result.output)) == 8 27 | 28 | 29 | def test_precision(feature_seq, runner): 30 | result = runner.invoke(main_group, ['bounds', '--precision', 1], feature_seq) 31 | assert result.exit_code == 0 32 | assert result.output.count('[') == result.output.count(']') == 2 33 | assert len(re.findall(r'\d*\.\d{1}\D', result.output)) == 8 34 | 35 | 36 | def test_explode(feature_collection, runner): 37 | result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection) 38 | assert result.exit_code == 0 39 | assert result.output.count('[') == result.output.count(']') == 2 40 | assert len(re.findall(r'\d*\.\d*', result.output)) == 8 41 | 42 | 43 | def test_explode_pp(feature_collection_pp, runner): 44 | result = runner.invoke(main_group, ['bounds', '--explode'], feature_collection_pp) 45 | assert result.exit_code == 0 46 | assert result.output.count('[') == result.output.count(']') == 2 47 | assert len(re.findall(r'\d*\.\d*', result.output)) == 8 48 | 49 | 50 | def test_with_id(feature_seq, runner): 51 | result = runner.invoke(main_group, ['bounds', '--with-id'], feature_seq) 52 | assert result.exit_code == 0 53 | assert result.output.count('id') == result.output.count('bbox') == 2 54 | 55 | 56 | def test_explode_with_id(feature_collection, runner): 57 | result = runner.invoke( 58 | main_group, ['bounds', '--explode', '--with-id'], feature_collection) 59 | assert result.exit_code == 0 60 | assert result.output.count('id') == result.output.count('bbox') == 2 61 | 62 | 63 | def test_with_obj(feature_seq, runner): 64 | result = runner.invoke(main_group, ['bounds', '--with-obj'], feature_seq) 65 | assert result.exit_code == 0 66 | assert result.output.count('geometry') == result.output.count('bbox') == 2 67 | 68 | 69 | def test_bounds_explode_with_obj(feature_collection, runner): 70 | result = runner.invoke( 71 | main_group, ['bounds', '--explode', '--with-obj'], feature_collection) 72 | assert result.exit_code == 0 73 | assert result.output.count('geometry') == result.output.count('bbox') == 2 74 | 75 | 76 | def test_explode_output_rs(feature_collection, runner): 77 | result = runner.invoke(main_group, ['bounds', '--explode', '--rs'], feature_collection) 78 | assert result.exit_code == 0 79 | assert result.output.count('\x1e') == 2 80 | assert result.output.count('[') == result.output.count(']') == 2 81 | assert len(re.findall(r'\d*\.\d*', result.output)) == 8 82 | -------------------------------------------------------------------------------- /tests/test_fio_calc.py: -------------------------------------------------------------------------------- 1 | """Tests for `$ fio calc`.""" 2 | 3 | 4 | import json 5 | 6 | from click.testing import CliRunner 7 | 8 | from fiona.fio.main import main_group 9 | 10 | 11 | def test_fail(): 12 | runner = CliRunner() 13 | result = runner.invoke(main_group, ['calc', "TEST", "f.properties.test > 5"], 14 | '{"type": "no_properties"}') 15 | assert result.exit_code == 1 16 | 17 | 18 | def _load(output): 19 | features = [] 20 | for x in output.splitlines(): 21 | try: 22 | features.append(json.loads(x)) 23 | except: 24 | # Click combines stdout and stderr and shapely dumps logs to 25 | # stderr that are not JSON 26 | # https://github.com/pallets/click/issues/371 27 | pass 28 | return features 29 | 30 | 31 | def test_calc_seq(feature_seq, runner): 32 | result = runner.invoke(main_group, ['calc', 33 | "TEST", 34 | "f.properties.AREA / f.properties.PERIMETER"], 35 | feature_seq) 36 | assert result.exit_code == 0 37 | 38 | feats = _load(result.output) 39 | assert len(feats) == 2 40 | for feat in feats: 41 | assert feat['properties']['TEST'] == \ 42 | feat['properties']['AREA'] / feat['properties']['PERIMETER'] 43 | 44 | 45 | def test_bool_seq(feature_seq, runner): 46 | result = runner.invoke(main_group, ['calc', "TEST", "f.properties.AREA > 0.015"], 47 | feature_seq) 48 | assert result.exit_code == 0 49 | feats = _load(result.output) 50 | assert len(feats) == 2 51 | assert feats[0]['properties']['TEST'] 52 | assert not feats[1]['properties']['TEST'] 53 | 54 | 55 | def test_existing_property(feature_seq, runner): 56 | result = runner.invoke( 57 | main_group, ["calc", "AREA", "f.properties.AREA * 2"], feature_seq 58 | ) 59 | assert result.exit_code == 2 60 | 61 | result = runner.invoke(main_group, ['calc', "--overwrite", "AREA", "f.properties.AREA * 2"], 62 | feature_seq) 63 | assert result.exit_code == 0 64 | feats = _load(result.output) 65 | assert len(feats) == 2 66 | for feat in feats: 67 | assert 'AREA' in feat['properties'] 68 | -------------------------------------------------------------------------------- /tests/test_fio_collect.py: -------------------------------------------------------------------------------- 1 | """Tests for `$ fio collect`.""" 2 | 3 | 4 | import json 5 | import sys 6 | 7 | from click.testing import CliRunner 8 | import pytest 9 | 10 | # from fiona.fio import collect 11 | from fiona.fio.main import main_group 12 | 13 | 14 | def test_collect_rs(feature_seq_pp_rs): 15 | runner = CliRunner() 16 | result = runner.invoke( 17 | main_group, ['collect', '--src-crs', 'EPSG:3857'], 18 | feature_seq_pp_rs, 19 | catch_exceptions=False) 20 | assert result.exit_code == 0 21 | assert result.output.count('"Feature"') == 2 22 | 23 | 24 | def test_collect_no_rs(feature_seq): 25 | runner = CliRunner() 26 | result = runner.invoke( 27 | main_group, ['collect', '--src-crs', 'EPSG:3857'], 28 | feature_seq, 29 | catch_exceptions=False) 30 | assert result.exit_code == 0 31 | assert result.output.count('"Feature"') == 2 32 | 33 | 34 | def test_collect_ld(feature_seq): 35 | runner = CliRunner() 36 | result = runner.invoke( 37 | main_group, ['collect', '--with-ld-context', '--add-ld-context-item', 'foo=bar'], 38 | feature_seq, 39 | catch_exceptions=False) 40 | assert result.exit_code == 0 41 | assert '"@context": {' in result.output 42 | assert '"foo": "bar"' in result.output 43 | 44 | 45 | def test_collect_rec_buffered(feature_seq): 46 | runner = CliRunner() 47 | result = runner.invoke(main_group, ['collect', '--record-buffered'], feature_seq) 48 | assert result.exit_code == 0 49 | assert '"FeatureCollection"' in result.output 50 | 51 | 52 | def test_collect_noparse(feature_seq): 53 | runner = CliRunner() 54 | result = runner.invoke( 55 | main_group, ['collect', '--no-parse'], 56 | feature_seq, 57 | catch_exceptions=False) 58 | assert result.exit_code == 0 59 | assert result.output.count('"Feature"') == 2 60 | assert len(json.loads(result.output)['features']) == 2 61 | 62 | 63 | def test_collect_noparse_records(feature_seq): 64 | runner = CliRunner() 65 | result = runner.invoke( 66 | main_group, ['collect', '--no-parse', '--record-buffered'], 67 | feature_seq, 68 | catch_exceptions=False) 69 | assert result.exit_code == 0 70 | assert result.output.count('"Feature"') == 2 71 | assert len(json.loads(result.output)['features']) == 2 72 | 73 | 74 | def test_collect_src_crs(feature_seq): 75 | runner = CliRunner() 76 | result = runner.invoke( 77 | main_group, ['collect', '--no-parse', '--src-crs', 'epsg:4326'], 78 | feature_seq, 79 | catch_exceptions=False) 80 | assert result.exit_code == 2 81 | 82 | 83 | def test_collect_noparse_rs(feature_seq_pp_rs): 84 | runner = CliRunner() 85 | result = runner.invoke( 86 | main_group, ['collect', '--no-parse'], 87 | feature_seq_pp_rs, 88 | catch_exceptions=False) 89 | assert result.exit_code == 0 90 | assert result.output.count('"Feature"') == 2 91 | assert len(json.loads(result.output)['features']) == 2 92 | -------------------------------------------------------------------------------- /tests/test_fio_distrib.py: -------------------------------------------------------------------------------- 1 | """Tests for `$ fio distrib`.""" 2 | 3 | 4 | from click.testing import CliRunner 5 | 6 | from fiona.fio.main import main_group 7 | 8 | 9 | def test_distrib(feature_collection_pp): 10 | runner = CliRunner() 11 | result = runner.invoke(main_group, ['distrib', ], feature_collection_pp) 12 | assert result.exit_code == 0 13 | assert result.output.count('"Feature"') == 2 14 | 15 | 16 | def test_distrib_no_rs(feature_collection): 17 | runner = CliRunner() 18 | result = runner.invoke(main_group, ['distrib', ], feature_collection) 19 | assert result.exit_code == 0 20 | assert result.output.count('"Feature"') == 2 21 | -------------------------------------------------------------------------------- /tests/test_fio_dump.py: -------------------------------------------------------------------------------- 1 | """Unittests for $ fio dump""" 2 | 3 | 4 | import json 5 | 6 | from click.testing import CliRunner 7 | import pytest 8 | 9 | import fiona 10 | from fiona.fio.main import main_group 11 | 12 | 13 | def test_dump(path_coutwildrnp_shp): 14 | runner = CliRunner() 15 | result = runner.invoke(main_group, ['dump', path_coutwildrnp_shp]) 16 | assert result.exit_code == 0 17 | assert '"FeatureCollection"' in result.output 18 | 19 | 20 | @pytest.mark.parametrize("layer", ["routes", "1", "tracks", "track_points"]) 21 | def test_dump_layer(path_gpx, layer): 22 | runner = CliRunner() 23 | result = runner.invoke(main_group, ["dump", path_gpx, "--layer", layer]) 24 | assert result.exit_code == 0 25 | assert '"FeatureCollection"' in result.output 26 | 27 | 28 | def test_dump_layer_vfs(path_coutwildrnp_zip): 29 | path = f"zip://{path_coutwildrnp_zip}" 30 | result = CliRunner().invoke(main_group, ["dump", path]) 31 | assert result.exit_code == 0 32 | loaded = json.loads(result.output) 33 | with fiona.open(path) as src: 34 | assert len(loaded['features']) == len(src) 35 | assert len(loaded['features']) > 0 36 | -------------------------------------------------------------------------------- /tests/test_fio_features.py: -------------------------------------------------------------------------------- 1 | # CLI tests 2 | 3 | from click.testing import CliRunner 4 | 5 | from fiona.fio.main import main_group # type: ignore 6 | import pytest # type: ignore 7 | 8 | 9 | def test_map_count(): 10 | """fio-map prints correct number of results.""" 11 | with open("tests/data/trio.seq") as seq: 12 | data = seq.read() 13 | 14 | runner = CliRunner() 15 | result = runner.invoke( 16 | main_group, 17 | ["map", "centroid (buffer g 1.0)"], 18 | input=data, 19 | ) 20 | 21 | assert result.exit_code == 0 22 | assert result.output.count('"type": "Point"') == 3 23 | 24 | 25 | @pytest.mark.parametrize("raw_opt", ["--raw", "-r"]) 26 | def test_reduce_area(raw_opt): 27 | """Reduce features to their (raw) area.""" 28 | with open("tests/data/trio.seq") as seq: 29 | data = seq.read() 30 | 31 | runner = CliRunner() 32 | result = runner.invoke( 33 | main_group, 34 | ["reduce", raw_opt, "area (unary_union c) :projected false"], 35 | input=data, 36 | ) 37 | assert result.exit_code == 0 38 | assert 0 < float(result.output) < 1e-5 39 | 40 | 41 | def test_reduce_union(): 42 | """Reduce features to one single feature.""" 43 | with open("tests/data/trio.seq") as seq: 44 | data = seq.read() 45 | 46 | # Define our reduce command using a mkdocs snippet. 47 | arg = """ 48 | --8<-- [start:reduce] 49 | unary_union c 50 | --8<-- [end:reduce] 51 | """.splitlines()[ 52 | 2 53 | ].strip() 54 | 55 | runner = CliRunner() 56 | result = runner.invoke(main_group, ["reduce", arg], input=data) 57 | assert result.exit_code == 0 58 | assert result.output.count('"type": "Polygon"') == 1 59 | assert result.output.count('"type": "LineString"') == 1 60 | assert result.output.count('"type": "GeometryCollection"') == 1 61 | 62 | 63 | def test_reduce_union_zip_properties(): 64 | """Reduce features to one single feature, zipping properties.""" 65 | with open("tests/data/trio.seq") as seq: 66 | data = seq.read() 67 | 68 | runner = CliRunner() 69 | result = runner.invoke( 70 | main_group, ["reduce", "--zip-properties", "unary_union c"], input=data 71 | ) 72 | assert result.exit_code == 0 73 | assert result.output.count('"type": "Polygon"') == 1 74 | assert result.output.count('"type": "LineString"') == 1 75 | assert result.output.count('"type": "GeometryCollection"') == 1 76 | assert ( 77 | """"name": ["Le ch\\u00e2teau d\'eau", "promenade du Peyrou"]""" 78 | in result.output 79 | ) 80 | 81 | 82 | def test_filter(): 83 | """Filter features by distance.""" 84 | with open("tests/data/trio.seq") as seq: 85 | data = seq.read() 86 | 87 | # Define our reduce command using a mkdocs snippet. 88 | arg = """ 89 | --8<-- [start:filter] 90 | < (distance g (Point 4 43)) 62.5E3 91 | --8<-- [end:filter] 92 | """.splitlines()[ 93 | 2 94 | ].strip() 95 | 96 | runner = CliRunner() 97 | result = runner.invoke( 98 | main_group, 99 | ["filter", arg], 100 | input=data, 101 | catch_exceptions=False, 102 | ) 103 | assert result.exit_code == 0 104 | assert result.output.count('"type": "Polygon"') == 1 105 | 106 | 107 | @pytest.mark.parametrize("opts", [["--no-input", "--raw"], ["-rn"]]) 108 | def test_map_no_input(opts): 109 | runner = CliRunner() 110 | result = runner.invoke(main_group, ["map"] + opts + ["(Point 4 43)"]) 111 | assert result.exit_code == 0 112 | assert result.output.count('"type": "Point"') == 1 113 | -------------------------------------------------------------------------------- /tests/test_fio_filter.py: -------------------------------------------------------------------------------- 1 | """Tests for the legacy fio-filter.""" 2 | 3 | import pytest 4 | 5 | from fiona.fio.main import main_group 6 | 7 | 8 | def test_fail(runner): 9 | with pytest.warns(FutureWarning): 10 | result = runner.invoke(main_group, ['filter', 11 | "f.properties.test > 5" 12 | ], "{'type': 'no_properties'}") 13 | assert result.exit_code == 1 14 | 15 | 16 | def test_seq(feature_seq, runner): 17 | with pytest.warns(FutureWarning): 18 | result = runner.invoke(main_group, ['filter', 19 | "f.properties.AREA > 0.01"], feature_seq, catch_exceptions=False) 20 | assert result.exit_code == 0 21 | assert result.output.count('Feature') == 2 22 | 23 | with pytest.warns(FutureWarning): 24 | result = runner.invoke(main_group, ['filter', 25 | "f.properties.AREA > 0.015"], feature_seq) 26 | assert result.exit_code == 0 27 | assert result.output.count('Feature') == 1 28 | 29 | with pytest.warns(FutureWarning): 30 | result = runner.invoke(main_group, ['filter', 31 | "f.properties.AREA > 0.02"], feature_seq) 32 | assert result.exit_code == 0 33 | assert result.output.count('Feature') == 0 34 | -------------------------------------------------------------------------------- /tests/test_fio_info.py: -------------------------------------------------------------------------------- 1 | """Tests for ``$ fio info``.""" 2 | 3 | 4 | import json 5 | import re 6 | import sys 7 | 8 | if sys.version_info < (3, 10): 9 | from importlib_metadata import entry_points 10 | else: 11 | from importlib.metadata import entry_points 12 | 13 | from click.testing import CliRunner 14 | import pytest 15 | 16 | if sys.version_info < (3, 10): 17 | from importlib_metadata import entry_points 18 | else: 19 | from importlib.metadata import entry_points 20 | 21 | from fiona.fio.main import main_group 22 | 23 | 24 | def test_info_json(path_coutwildrnp_shp): 25 | runner = CliRunner() 26 | result = runner.invoke(main_group, ['info', path_coutwildrnp_shp]) 27 | assert result.exit_code == 0 28 | assert '"count": 67' in result.output 29 | assert '"crs": "EPSG:4326"' in result.output 30 | assert '"driver": "ESRI Shapefile"' in result.output 31 | assert '"name": "coutwildrnp"' in result.output 32 | 33 | 34 | def test_info_count(path_coutwildrnp_shp): 35 | runner = CliRunner() 36 | result = runner.invoke( 37 | main_group, ['info', '--count', path_coutwildrnp_shp]) 38 | assert result.exit_code == 0 39 | assert result.output == "67\n" 40 | 41 | 42 | def test_info_bounds(path_coutwildrnp_shp): 43 | runner = CliRunner() 44 | result = runner.invoke( 45 | main_group, ['info', '--bounds', path_coutwildrnp_shp]) 46 | assert result.exit_code == 0 47 | assert len(re.findall(r'\d*\.\d*', result.output)) == 4 48 | 49 | 50 | def test_all_registered(): 51 | """Make sure all the subcommands are actually registered to the main CLI 52 | group.""" 53 | for ep in entry_points(group="fiona.fio_commands"): 54 | assert ep.name in main_group.commands 55 | 56 | 57 | def _filter_info_warning(lines): 58 | """$ fio info can issue a RuntimeWarning, but click adds stderr to stdout 59 | so we have to filter it out before decoding JSON lines.""" 60 | lines = list(filter(lambda x: 'RuntimeWarning' not in x, lines)) 61 | return lines 62 | 63 | 64 | def test_info_no_count(path_gpx): 65 | """Make sure we can still get a `$ fio info` report on datasources that do 66 | not support feature counting, AKA `len(collection)`. 67 | """ 68 | runner = CliRunner() 69 | result = runner.invoke(main_group, ['info', path_gpx]) 70 | assert result.exit_code == 0 71 | lines = _filter_info_warning(result.output.splitlines()) 72 | assert len(lines) == 1, "First line is warning & second is JSON. No more." 73 | assert json.loads(lines[0])['count'] is None 74 | 75 | 76 | def test_info_layer(path_gpx): 77 | for layer in ('routes', '1'): 78 | runner = CliRunner() 79 | result = runner.invoke(main_group, [ 80 | 'info', 81 | path_gpx, 82 | '--layer', layer]) 83 | assert result.exit_code == 0 84 | lines = _filter_info_warning(result.output.splitlines()) 85 | assert len(lines) == 1, "1st line is warning & 2nd is JSON - no more." 86 | assert json.loads(lines[0])['name'] == 'routes' 87 | 88 | 89 | def test_info_vfs(path_coutwildrnp_zip, path_coutwildrnp_shp): 90 | runner = CliRunner() 91 | zip_result = runner.invoke(main_group, [ 92 | 'info', f'zip://{path_coutwildrnp_zip}']) 93 | shp_result = runner.invoke(main_group, [ 94 | 'info', path_coutwildrnp_shp]) 95 | assert zip_result.exit_code == shp_result.exit_code == 0 96 | assert zip_result.output == shp_result.output 97 | -------------------------------------------------------------------------------- /tests/test_fio_ls.py: -------------------------------------------------------------------------------- 1 | """Unittests for `$ fio ls`""" 2 | 3 | 4 | import json 5 | import sys 6 | import os 7 | from click.testing import CliRunner 8 | import pytest 9 | import fiona 10 | from fiona.fio.main import main_group 11 | 12 | 13 | def test_fio_ls_single_layer(data_dir): 14 | 15 | result = CliRunner().invoke(main_group, ['ls', data_dir]) 16 | assert result.exit_code == 0 17 | assert len(result.output.splitlines()) == 1 18 | assert sorted(json.loads(result.output)) == ['coutwildrnp', 'gre', 'test_tin'] 19 | 20 | 21 | def test_fio_ls_indent(path_coutwildrnp_shp): 22 | 23 | result = CliRunner().invoke(main_group, [ 24 | 'ls', 25 | '--indent', '4', 26 | path_coutwildrnp_shp]) 27 | assert result.exit_code == 0 28 | assert len(result.output.strip().splitlines()) == 3 29 | assert json.loads(result.output) == ['coutwildrnp'] 30 | 31 | 32 | def test_fio_ls_multi_layer(path_coutwildrnp_shp, tmpdir): 33 | outdir = str(tmpdir.mkdir('test_fio_ls_multi_layer')) 34 | 35 | # Copy test shapefile into new directory 36 | # Shapefile driver treats a directory of shapefiles as a single 37 | # multi-layer datasource 38 | layer_names = ['l1', 'l2'] 39 | for layer in layer_names: 40 | with fiona.open(path_coutwildrnp_shp) as src, \ 41 | fiona.open(outdir, 'w', layer=layer, **src.meta) as dst: 42 | for feat in src: 43 | dst.write(feat) 44 | 45 | # Run CLI test 46 | result = CliRunner().invoke(main_group, [ 47 | 'ls', outdir]) 48 | assert result.exit_code == 0 49 | json_result = json.loads(result.output) 50 | assert sorted(json_result) == sorted(layer_names) 51 | 52 | 53 | def test_fio_ls_vfs(path_coutwildrnp_zip): 54 | runner = CliRunner() 55 | result = runner.invoke(main_group, [ 56 | 'ls', f'zip://{path_coutwildrnp_zip}']) 57 | assert result.exit_code == 0 58 | loaded = json.loads(result.output) 59 | assert len(loaded) == 1 60 | assert loaded[0] == 'coutwildrnp' 61 | -------------------------------------------------------------------------------- /tests/test_fio_rm.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from click.testing import CliRunner 5 | 6 | import fiona 7 | from fiona.model import Feature 8 | from fiona.fio.main import main_group 9 | 10 | 11 | def create_sample_data(filename, driver, **extra_meta): 12 | meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {}}} 13 | meta.update(extra_meta) 14 | with fiona.open(filename, "w", **meta) as dst: 15 | dst.write( 16 | Feature.from_dict( 17 | **{ 18 | "geometry": { 19 | "type": "Point", 20 | "coordinates": (0, 0), 21 | }, 22 | "properties": {}, 23 | } 24 | ) 25 | ) 26 | assert os.path.exists(filename) 27 | 28 | 29 | drivers = ["ESRI Shapefile", "GeoJSON"] 30 | 31 | 32 | @pytest.mark.parametrize("driver", drivers) 33 | def test_remove(tmpdir, driver): 34 | extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] 35 | filename = f"delete_me.{extension}" 36 | filename = str(tmpdir.join(filename)) 37 | create_sample_data(filename, driver) 38 | 39 | result = CliRunner().invoke(main_group, ["rm", filename, "--yes"]) 40 | assert result.exit_code == 0 41 | assert not os.path.exists(filename) 42 | 43 | 44 | has_gpkg = "GPKG" in fiona.supported_drivers.keys() 45 | 46 | 47 | @pytest.mark.skipif(not has_gpkg, reason="Requires GPKG driver") 48 | def test_remove_layer(tmpdir): 49 | filename = str(tmpdir.join("a_filename.gpkg")) 50 | create_sample_data(filename, "GPKG", layer="layer1") 51 | create_sample_data(filename, "GPKG", layer="layer2") 52 | assert fiona.listlayers(filename) == ["layer1", "layer2"] 53 | 54 | result = CliRunner().invoke( 55 | main_group, ["rm", filename, "--layer", "layer2", "--yes"] 56 | ) 57 | assert result.exit_code == 0 58 | assert os.path.exists(filename) 59 | assert fiona.listlayers(filename) == ["layer1"] 60 | -------------------------------------------------------------------------------- /tests/test_geopackage.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import fiona 4 | from fiona.model import Feature 5 | 6 | from .conftest import requires_gpkg 7 | 8 | example_schema = { 9 | "geometry": "Point", 10 | "properties": [("title", "str")], 11 | } 12 | 13 | example_crs = { 14 | "a": 6370997, 15 | "lon_0": -100, 16 | "y_0": 0, 17 | "no_defs": True, 18 | "proj": "laea", 19 | "x_0": 0, 20 | "units": "m", 21 | "b": 6370997, 22 | "lat_0": 45, 23 | } 24 | 25 | example_features = [ 26 | Feature.from_dict( 27 | **{ 28 | "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, 29 | "properties": {"title": "One"}, 30 | } 31 | ), 32 | Feature.from_dict( 33 | **{ 34 | "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, 35 | "properties": {"title": "Two"}, 36 | } 37 | ), 38 | Feature.from_dict( 39 | **{ 40 | "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, 41 | "properties": {"title": "Three"}, 42 | } 43 | ), 44 | ] 45 | 46 | 47 | @requires_gpkg 48 | def test_read_gpkg(path_coutwildrnp_gpkg): 49 | """ 50 | Implicitly tests writing gpkg as the fixture will create the data source on 51 | first request 52 | """ 53 | with fiona.open(path_coutwildrnp_gpkg, "r") as src: 54 | assert len(src) == 67 55 | feature = next(iter(src)) 56 | assert feature.geometry["type"] == "Polygon" 57 | assert feature.properties["NAME"] == "Mount Naomi Wilderness" 58 | 59 | 60 | @requires_gpkg 61 | def test_write_gpkg(tmpdir): 62 | path = str(tmpdir.join("foo.gpkg")) 63 | 64 | with fiona.open( 65 | path, "w", driver="GPKG", schema=example_schema, crs=example_crs 66 | ) as dst: 67 | dst.writerecords(example_features) 68 | 69 | with fiona.open(path) as src: 70 | assert src.schema["geometry"] == "Point" 71 | assert len(src) == 3 72 | 73 | 74 | @requires_gpkg 75 | def test_write_multilayer_gpkg(tmpdir): 76 | """ 77 | Test that writing a second layer to an existing geopackage doesn't remove 78 | and existing layer for the dataset. 79 | """ 80 | path = str(tmpdir.join("foo.gpkg")) 81 | 82 | with fiona.open( 83 | path, "w", driver="GPKG", schema=example_schema, layer="layer1", crs=example_crs 84 | ) as dst: 85 | dst.writerecords(example_features[0:2]) 86 | 87 | with fiona.open( 88 | path, "w", driver="GPKG", schema=example_schema, layer="layer2", crs=example_crs 89 | ) as dst: 90 | dst.writerecords(example_features[2:]) 91 | 92 | with fiona.open(path, layer="layer1") as src: 93 | assert src.schema["geometry"] == "Point" 94 | assert len(src) == 2 95 | 96 | with fiona.open(path, layer="layer2") as src: 97 | assert src.schema["geometry"] == "Point" 98 | assert len(src) == 1 99 | -------------------------------------------------------------------------------- /tests/test_integration.py: -------------------------------------------------------------------------------- 1 | """Unittests to verify Fiona is functioning properly with other software.""" 2 | 3 | 4 | from collections import UserDict 5 | 6 | import fiona 7 | from fiona.model import Feature 8 | 9 | 10 | def test_dict_subclass(tmpdir): 11 | """Rasterio now has a `CRS()` class that subclasses 12 | `collections.UserDict()`. Make sure we can receive it. 13 | 14 | `UserDict()` is a good class to test against because in Python 2 it is 15 | not a subclass of `collections.Mapping()`, so it provides an edge case. 16 | """ 17 | 18 | class CRS(UserDict): 19 | pass 20 | 21 | outfile = str(tmpdir.join("test_UserDict.geojson")) 22 | 23 | profile = { 24 | "crs": CRS(init="EPSG:4326"), 25 | "driver": "GeoJSON", 26 | "schema": {"geometry": "Point", "properties": {}}, 27 | } 28 | 29 | with fiona.open(outfile, "w", **profile) as dst: 30 | dst.write( 31 | Feature.from_dict( 32 | **{ 33 | "type": "Feature", 34 | "properties": {}, 35 | "geometry": {"type": "Point", "coordinates": (10, -10)}, 36 | } 37 | ) 38 | ) 39 | 40 | with fiona.open(outfile) as src: 41 | assert len(src) == 1 42 | assert src.crs == {"init": "epsg:4326"} 43 | -------------------------------------------------------------------------------- /tests/test_layer.py: -------------------------------------------------------------------------------- 1 | """Layer tests.""" 2 | 3 | import pytest 4 | 5 | import fiona 6 | from .test_collection import TestReading 7 | 8 | 9 | def test_index_selection(path_coutwildrnp_shp): 10 | with fiona.open(path_coutwildrnp_shp, 'r', layer=0) as c: 11 | assert len(c) == 67 12 | 13 | 14 | class TestFileReading(TestReading): 15 | @pytest.fixture(autouse=True) 16 | def shapefile(self, path_coutwildrnp_shp): 17 | self.c = fiona.open(path_coutwildrnp_shp, 'r', layer='coutwildrnp') 18 | yield 19 | self.c.close() 20 | 21 | def test_open_repr(self, path_coutwildrnp_shp): 22 | assert repr(self.c) == ( 23 | f"" 25 | ) 26 | 27 | def test_closed_repr(self, path_coutwildrnp_shp): 28 | self.c.close() 29 | assert repr(self.c) == ( 30 | f"" 32 | ) 33 | 34 | def test_name(self): 35 | assert self.c.name == 'coutwildrnp' 36 | 37 | 38 | class TestDirReading(TestReading): 39 | @pytest.fixture(autouse=True) 40 | def shapefile(self, data_dir): 41 | self.c = fiona.open(data_dir, "r", layer="coutwildrnp") 42 | yield 43 | self.c.close() 44 | 45 | def test_open_repr(self, data_dir): 46 | assert repr(self.c) == ( 47 | f"" 49 | ) 50 | 51 | def test_closed_repr(self, data_dir): 52 | self.c.close() 53 | assert repr(self.c) == ( 54 | f"" 56 | ) 57 | 58 | def test_name(self): 59 | assert self.c.name == 'coutwildrnp' 60 | 61 | def test_path(self, data_dir): 62 | assert self.c.path == data_dir 63 | 64 | 65 | def test_invalid_layer(path_coutwildrnp_shp): 66 | with pytest.raises(ValueError): 67 | fiona.open(path_coutwildrnp_shp, layer="foo") 68 | 69 | 70 | def test_write_invalid_numeric_layer(path_coutwildrnp_shp, tmpdir): 71 | with pytest.raises(ValueError): 72 | fiona.open(str(tmpdir.join("test-no-iter.shp")), mode='w', layer=0) 73 | -------------------------------------------------------------------------------- /tests/test_listing.py: -------------------------------------------------------------------------------- 1 | """Test listing a datasource's layers.""" 2 | 3 | from pathlib import Path 4 | import os 5 | 6 | import pytest 7 | 8 | import fiona 9 | import fiona.ogrext 10 | from fiona.errors import DriverError, FionaDeprecationWarning, FionaValueError 11 | from fiona.io import ZipMemoryFile 12 | 13 | 14 | def test_single_file_private(path_coutwildrnp_shp): 15 | with fiona.Env(): 16 | assert fiona.ogrext._listlayers( 17 | path_coutwildrnp_shp) == ['coutwildrnp'] 18 | 19 | 20 | def test_single_file(path_coutwildrnp_shp): 21 | assert fiona.listlayers(path_coutwildrnp_shp) == ['coutwildrnp'] 22 | 23 | 24 | def test_directory(data_dir): 25 | assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] 26 | 27 | 28 | def test_directory_trailing_slash(data_dir): 29 | assert sorted(fiona.listlayers(data_dir)) == ['coutwildrnp', 'gre', 'test_tin'] 30 | 31 | 32 | def test_zip_path(path_coutwildrnp_zip): 33 | assert fiona.listlayers( 34 | f'zip://{path_coutwildrnp_zip}') == ['coutwildrnp'] 35 | 36 | 37 | def test_zip_path_arch(path_coutwildrnp_zip): 38 | vfs = f'zip://{path_coutwildrnp_zip}' 39 | with pytest.warns(FionaDeprecationWarning): 40 | assert fiona.listlayers('/coutwildrnp.shp', vfs=vfs) == ['coutwildrnp'] 41 | 42 | 43 | def test_list_not_existing(data_dir): 44 | """Test underlying Cython function correctly raises""" 45 | path = os.path.join(data_dir, "does_not_exist.geojson") 46 | with pytest.raises(DriverError): 47 | fiona.ogrext._listlayers(path) 48 | 49 | 50 | def test_invalid_path(): 51 | with pytest.raises(TypeError): 52 | fiona.listlayers(1) 53 | 54 | 55 | def test_invalid_vfs(): 56 | with pytest.raises(TypeError): 57 | fiona.listlayers("/", vfs=1) 58 | 59 | 60 | def test_invalid_path_ioerror(): 61 | with pytest.raises(DriverError): 62 | fiona.listlayers("foobar") 63 | 64 | 65 | def test_path_object(path_coutwildrnp_shp): 66 | path_obj = Path(path_coutwildrnp_shp) 67 | assert fiona.listlayers(path_obj) == ['coutwildrnp'] 68 | 69 | 70 | def test_listing_file(path_coutwildrnp_json): 71 | """list layers from an open file object""" 72 | with open(path_coutwildrnp_json, "rb") as f: 73 | assert len(fiona.listlayers(f)) == 1 74 | 75 | 76 | def test_listing_pathobj(path_coutwildrnp_json): 77 | """list layers from a Path object""" 78 | pathlib = pytest.importorskip("pathlib") 79 | assert len(fiona.listlayers(pathlib.Path(path_coutwildrnp_json))) == 1 80 | 81 | 82 | def test_listdir_path(path_coutwildrnp_zip): 83 | """List directories in a path""" 84 | assert sorted(fiona.listdir(f"zip://{path_coutwildrnp_zip}")) == [ 85 | "coutwildrnp.dbf", 86 | "coutwildrnp.prj", 87 | "coutwildrnp.shp", 88 | "coutwildrnp.shx", 89 | ] 90 | 91 | 92 | def test_listdir_path_not_existing(data_dir): 93 | """Test listing of a non existent directory""" 94 | path = os.path.join(data_dir, "does_not_exist.zip") 95 | with pytest.raises(FionaValueError): 96 | fiona.listdir(path) 97 | 98 | 99 | def test_listdir_invalid_path(): 100 | """List directories with invalid path""" 101 | with pytest.raises(TypeError): 102 | assert fiona.listdir(1) 103 | 104 | 105 | def test_listdir_file(path_coutwildrnp_zip): 106 | """Test list directories of a file""" 107 | with pytest.raises(FionaValueError): 108 | fiona.listdir(f"zip://{path_coutwildrnp_zip}/coutwildrnp.shp") 109 | 110 | 111 | def test_listdir_zipmemoryfile(bytes_coutwildrnp_zip): 112 | """Test list directories of a zipped memory file.""" 113 | with ZipMemoryFile(bytes_coutwildrnp_zip) as memfile: 114 | print(memfile.name) 115 | assert sorted(fiona.listdir(memfile.name)) == [ 116 | "coutwildrnp.dbf", 117 | "coutwildrnp.prj", 118 | "coutwildrnp.shp", 119 | "coutwildrnp.shx", 120 | ] 121 | -------------------------------------------------------------------------------- /tests/test_logutils.py: -------------------------------------------------------------------------------- 1 | """Tests of skipped field log message filtering""" 2 | 3 | import logging 4 | import os 5 | 6 | import fiona 7 | from fiona.logutils import LogFiltering, FieldSkipLogFilter 8 | 9 | 10 | def test_filtering(caplog): 11 | """Test that ordinary log messages pass""" 12 | logger = logging.getLogger() 13 | with LogFiltering(logger, FieldSkipLogFilter()): 14 | logger.warning("Attention!") 15 | logger.warning("Skipping field 1") 16 | logger.warning("Skipping field 2") 17 | logger.warning("Danger!") 18 | logger.warning("Skipping field 1") 19 | 20 | assert len(caplog.records) == 4 21 | assert caplog.records[0].getMessage() == "Attention!" 22 | assert caplog.records[1].getMessage() == "Skipping field 1" 23 | assert caplog.records[2].getMessage() == "Skipping field 2" 24 | assert caplog.records[3].getMessage() == "Danger!" 25 | 26 | 27 | def test_skipping_slice(caplog, data_dir): 28 | """Collection filters out all but one warning message""" 29 | with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: 30 | results = list(src) 31 | assert len(results) == 3 32 | assert not any(['skip_me' in f['properties'] for f in results]) 33 | assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 34 | 35 | 36 | def test_skipping_list(caplog, data_dir): 37 | """Collection filters out all but one warning message""" 38 | with fiona.open(os.path.join(data_dir, "issue627.geojson")) as src: 39 | results = list(src) 40 | assert len(results) == 3 41 | assert not any(['skip_me' in f['properties'] for f in results]) 42 | assert len([rec for rec in caplog.records if rec.getMessage().startswith('Skipping')]) == 1 43 | 44 | 45 | def test_log_filter_exception(caplog): 46 | """FieldSkipLogFilter handles exceptions from log.exception().""" 47 | logger = logging.getLogger() 48 | with LogFiltering(logger, FieldSkipLogFilter()): 49 | logger.exception(ValueError("Oh no")) 50 | 51 | assert len(caplog.records) == 1 52 | assert caplog.records[0].getMessage() == "Oh no" 53 | -------------------------------------------------------------------------------- /tests/test_meta.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import fiona 3 | import fiona.drvsupport 4 | import fiona.meta 5 | from fiona.drvsupport import supported_drivers 6 | from fiona.errors import FionaValueError 7 | from .conftest import requires_gdal2, requires_gdal23, requires_gdal31 8 | 9 | 10 | @requires_gdal31 11 | @pytest.mark.parametrize("driver", supported_drivers) 12 | def test_print_driver_options(driver): 13 | """ Test fiona.meta.print_driver_options(driver) """ 14 | # do not fail 15 | fiona.meta.print_driver_options(driver) 16 | 17 | 18 | @requires_gdal2 19 | def test_metadata_wrong_driver(): 20 | """ Test that FionaValueError is raised for non existing driver""" 21 | with pytest.raises(FionaValueError): 22 | fiona.meta.print_driver_options("Not existing driver") 23 | 24 | 25 | @requires_gdal2 26 | @pytest.mark.parametrize("driver", supported_drivers) 27 | def test_extension(driver): 28 | """ Test fiona.meta.extension(driver) """ 29 | # do not fail 30 | extension = fiona.meta.extension(driver) 31 | assert extension is None or isinstance(extension, str) 32 | 33 | 34 | @requires_gdal2 35 | @pytest.mark.parametrize("driver", supported_drivers) 36 | def test_extensions(driver): 37 | """ Test fiona.meta.extensions(driver) """ 38 | # do not fail 39 | extensions = fiona.meta.extensions(driver) 40 | assert extensions is None or isinstance(extensions, list) 41 | 42 | 43 | @requires_gdal2 44 | @pytest.mark.parametrize("driver", supported_drivers) 45 | def test_supports_vsi(driver): 46 | """ Test fiona.meta.supports_vsi(driver) """ 47 | # do not fail 48 | assert fiona.meta.supports_vsi(driver) in (True, False) 49 | 50 | 51 | @requires_gdal2 52 | @pytest.mark.parametrize("driver", supported_drivers) 53 | def test_supported_field_types(driver): 54 | """ Test fiona.meta.supported_field_types(driver) """ 55 | # do not fail 56 | field_types = fiona.meta.supported_field_types(driver) 57 | assert field_types is None or isinstance(field_types, list) 58 | 59 | 60 | @requires_gdal23 61 | @pytest.mark.parametrize("driver", supported_drivers) 62 | def test_supported_sub_field_types(driver): 63 | """ Test fiona.meta.supported_sub_field_types(driver) """ 64 | # do not fail 65 | sub_field_types = fiona.meta.supported_sub_field_types(driver) 66 | assert sub_field_types is None or isinstance(sub_field_types, list) 67 | -------------------------------------------------------------------------------- /tests/test_non_counting_layer.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import pytest 4 | 5 | import fiona 6 | from fiona.errors import FionaDeprecationWarning 7 | 8 | 9 | @pytest.mark.usefixtures('uttc_path_gpx') 10 | class TestNonCountingLayer(unittest.TestCase): 11 | def setUp(self): 12 | self.c = fiona.open(self.path_gpx, "r", layer="track_points") 13 | 14 | def tearDown(self): 15 | self.c.close() 16 | 17 | def test_len_fail(self): 18 | with pytest.raises(TypeError): 19 | len(self.c) 20 | 21 | def test_list(self): 22 | features = list(self.c) 23 | assert len(features) == 19 24 | 25 | def test_getitem(self): 26 | self.c[2] 27 | 28 | def test_fail_getitem_negative_index(self): 29 | with pytest.raises(IndexError): 30 | self.c[-1] 31 | 32 | def test_slice(self): 33 | with pytest.warns(FionaDeprecationWarning): 34 | features = self.c[2:5] 35 | assert len(features) == 3 36 | 37 | def test_warn_slice_negative_index(self): 38 | with pytest.warns((FionaDeprecationWarning, RuntimeWarning)): 39 | self.c[2:-4] 40 | -------------------------------------------------------------------------------- /tests/test_open.py: -------------------------------------------------------------------------------- 1 | """Tests of file opening""" 2 | 3 | import io 4 | import os 5 | 6 | import pytest 7 | 8 | import fiona 9 | from fiona.crs import CRS 10 | from fiona.errors import DriverError 11 | from fiona.model import Feature 12 | 13 | 14 | def test_open_shp(path_coutwildrnp_shp): 15 | """Open a shapefile""" 16 | assert fiona.open(path_coutwildrnp_shp) 17 | 18 | 19 | def test_open_filename_with_exclamation(data_dir): 20 | path = os.path.relpath(os.path.join(data_dir, "!test.geojson")) 21 | assert os.path.exists(path), "Missing test data" 22 | assert fiona.open(path), "Failed to open !test.geojson" 23 | 24 | 25 | def test_write_memfile_crs_wkt(): 26 | example_schema = { 27 | "geometry": "Point", 28 | "properties": [("title", "str")], 29 | } 30 | 31 | example_features = [ 32 | Feature.from_dict( 33 | **{ 34 | "geometry": {"type": "Point", "coordinates": [0.0, 0.0]}, 35 | "properties": {"title": "One"}, 36 | } 37 | ), 38 | Feature.from_dict( 39 | **{ 40 | "geometry": {"type": "Point", "coordinates": [1.0, 2.0]}, 41 | "properties": {"title": "Two"}, 42 | } 43 | ), 44 | Feature.from_dict( 45 | **{ 46 | "geometry": {"type": "Point", "coordinates": [3.0, 4.0]}, 47 | "properties": {"title": "Three"}, 48 | } 49 | ), 50 | ] 51 | 52 | with io.BytesIO() as fd: 53 | with fiona.open( 54 | fd, 55 | "w", 56 | driver="GPKG", 57 | schema=example_schema, 58 | crs_wkt=CRS.from_epsg(32611).to_wkt(), 59 | ) as dst: 60 | dst.writerecords(example_features) 61 | 62 | fd.seek(0) 63 | with fiona.open(fd) as src: 64 | assert src.driver == "GPKG" 65 | assert src.crs == "EPSG:32611" 66 | -------------------------------------------------------------------------------- /tests/test_profile.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | import fiona 5 | 6 | from .conftest import WGS84PATTERN 7 | 8 | def test_profile(path_coutwildrnp_shp): 9 | with fiona.open(path_coutwildrnp_shp) as src: 10 | assert re.match(WGS84PATTERN, src.crs_wkt) 11 | 12 | 13 | def test_profile_creation_wkt(tmpdir, path_coutwildrnp_shp): 14 | outfilename = str(tmpdir.join("test.shp")) 15 | with fiona.open(path_coutwildrnp_shp) as src: 16 | profile = src.meta 17 | profile['crs'] = 'bogus' 18 | with fiona.open(outfilename, 'w', **profile) as dst: 19 | assert dst.crs == {'init': 'epsg:4326'} 20 | assert re.match(WGS84PATTERN, dst.crs_wkt) 21 | -------------------------------------------------------------------------------- /tests/test_read_drivers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import fiona 4 | from fiona.errors import FionaValueError 5 | 6 | 7 | def test_read_fail(path_coutwildrnp_shp): 8 | with pytest.raises(FionaValueError): 9 | fiona.open(path_coutwildrnp_shp, driver='GeoJSON') 10 | with pytest.raises(FionaValueError): 11 | fiona.open(path_coutwildrnp_shp, enabled_drivers=['GeoJSON']) 12 | 13 | 14 | def test_read(path_coutwildrnp_shp): 15 | with fiona.open(path_coutwildrnp_shp, driver='ESRI Shapefile') as src: 16 | assert src.driver == 'ESRI Shapefile' 17 | with fiona.open( 18 | path_coutwildrnp_shp, 19 | enabled_drivers=['GeoJSON', 'ESRI Shapefile']) as src: 20 | assert src.driver == 'ESRI Shapefile' 21 | -------------------------------------------------------------------------------- /tests/test_remove.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | import os 4 | import itertools 5 | from .conftest import requires_gpkg 6 | 7 | import pytest 8 | 9 | import fiona 10 | from fiona.errors import DatasetDeleteError 11 | from fiona.model import Feature 12 | 13 | 14 | def create_sample_data(filename, driver, **extra_meta): 15 | meta = {"driver": driver, "schema": {"geometry": "Point", "properties": {}}} 16 | meta.update(extra_meta) 17 | with fiona.open(filename, "w", **meta) as dst: 18 | dst.write( 19 | Feature.from_dict( 20 | **{ 21 | "geometry": { 22 | "type": "Point", 23 | "coordinates": (0, 0), 24 | }, 25 | "properties": {}, 26 | } 27 | ) 28 | ) 29 | assert os.path.exists(filename) 30 | 31 | 32 | drivers = ["ESRI Shapefile", "GeoJSON"] 33 | kinds = ["path", "collection"] 34 | specify_drivers = [True, False] 35 | test_data = itertools.product(drivers, kinds, specify_drivers) 36 | 37 | 38 | @pytest.mark.parametrize("driver, kind, specify_driver", test_data) 39 | def test_remove(tmpdir, kind, driver, specify_driver): 40 | """Test various dataset removal operations""" 41 | extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver] 42 | filename = f"delete_me.{extension}" 43 | output_filename = str(tmpdir.join(filename)) 44 | 45 | create_sample_data(output_filename, driver=driver) 46 | if kind == "collection": 47 | to_delete = fiona.open(output_filename, "r") 48 | else: 49 | to_delete = output_filename 50 | 51 | assert os.path.exists(output_filename) 52 | if specify_driver: 53 | fiona.remove(to_delete, driver=driver) 54 | else: 55 | fiona.remove(to_delete) 56 | assert not os.path.exists(output_filename) 57 | 58 | 59 | def test_remove_nonexistent(tmpdir): 60 | """Attempting to remove a file that does not exist results in an OSError""" 61 | filename = str(tmpdir.join("does_not_exist.shp")) 62 | assert not os.path.exists(filename) 63 | with pytest.raises(OSError): 64 | fiona.remove(filename) 65 | 66 | 67 | @requires_gpkg 68 | def test_remove_layer(tmpdir): 69 | filename = str(tmpdir.join("a_filename.gpkg")) 70 | create_sample_data(filename, "GPKG", layer="layer1") 71 | create_sample_data(filename, "GPKG", layer="layer2") 72 | create_sample_data(filename, "GPKG", layer="layer3") 73 | create_sample_data(filename, "GPKG", layer="layer4") 74 | assert fiona.listlayers(filename) == ["layer1", "layer2", "layer3", "layer4"] 75 | 76 | # remove by index 77 | fiona.remove(filename, layer=2) 78 | assert fiona.listlayers(filename) == ["layer1", "layer2", "layer4"] 79 | 80 | # remove by name 81 | fiona.remove(filename, layer="layer2") 82 | assert fiona.listlayers(filename) == ["layer1", "layer4"] 83 | 84 | # remove by negative index 85 | fiona.remove(filename, layer=-1) 86 | assert fiona.listlayers(filename) == ["layer1"] 87 | 88 | # invalid layer name 89 | with pytest.raises(ValueError): 90 | fiona.remove(filename, layer="invalid_layer_name") 91 | 92 | # invalid layer index 93 | with pytest.raises(DatasetDeleteError): 94 | fiona.remove(filename, layer=999) 95 | 96 | 97 | def test_remove_layer_shapefile(tmpdir): 98 | """Removal of layer in shapefile actually deletes the datasource""" 99 | filename = str(tmpdir.join("a_filename.shp")) 100 | create_sample_data(filename, "ESRI Shapefile") 101 | fiona.remove(filename, layer=0) 102 | assert not os.path.exists(filename) 103 | 104 | 105 | def test_remove_layer_geojson(tmpdir): 106 | """Removal of layers is not supported by GeoJSON driver 107 | 108 | The reason for failure is slightly different between GDAL 2.2+ and < 2.2. 109 | With < 2.2 the datasource will fail to open in write mode (OSError), while 110 | with 2.2+ the datasource will open but the removal operation will fail (not 111 | supported). 112 | """ 113 | filename = str(tmpdir.join("a_filename.geojson")) 114 | create_sample_data(filename, "GeoJSON") 115 | with pytest.raises((RuntimeError, OSError)): 116 | fiona.remove(filename, layer=0) 117 | assert os.path.exists(filename) 118 | -------------------------------------------------------------------------------- /tests/test_revolvingdoor.py: -------------------------------------------------------------------------------- 1 | # Test of opening and closing and opening 2 | 3 | import fiona 4 | 5 | 6 | def test_write_revolving_door(tmpdir, path_coutwildrnp_shp): 7 | with fiona.open(path_coutwildrnp_shp) as src: 8 | meta = src.meta 9 | features = list(src) 10 | 11 | shpname = str(tmpdir.join('foo.shp')) 12 | 13 | with fiona.open(shpname, 'w', **meta) as dst: 14 | dst.writerecords(features) 15 | 16 | with fiona.open(shpname) as src: 17 | pass 18 | -------------------------------------------------------------------------------- /tests/test_rfc3339.py: -------------------------------------------------------------------------------- 1 | """Tests for Fiona's RFC 3339 support.""" 2 | 3 | 4 | import re 5 | 6 | import pytest 7 | 8 | from fiona.rfc3339 import parse_date, parse_datetime, parse_time 9 | from fiona.rfc3339 import group_accessor, pattern_date 10 | 11 | 12 | class TestDateParse: 13 | 14 | def test_yyyymmdd(self): 15 | assert parse_date("2012-01-29") == (2012, 1, 29, 0, 0, 0, 0.0, None) 16 | 17 | def test_error(self): 18 | with pytest.raises(ValueError): 19 | parse_date("xxx") 20 | 21 | 22 | class TestTimeParse: 23 | 24 | def test_hhmmss(self): 25 | assert parse_time("10:11:12") == (0, 0, 0, 10, 11, 12, 0.0, None) 26 | 27 | def test_hhmm(self): 28 | assert parse_time("10:11") == (0, 0, 0, 10, 11, 0, 0.0, None) 29 | 30 | def test_hhmmssff(self): 31 | assert parse_time("10:11:12.42") == (0, 0, 0, 10, 11, 12, 0.42*1000000, None) 32 | 33 | def test_hhmmssz(self): 34 | assert parse_time("10:11:12Z") == (0, 0, 0, 10, 11, 12, 0.0, None) 35 | 36 | def test_hhmmssoff(self): 37 | assert parse_time("10:11:12-01:30") == (0, 0, 0, 10, 11, 12, 0.0, -90) 38 | 39 | def test_hhmmssoff2(self): 40 | assert parse_time("10:11:12+01:30") == (0, 0, 0, 10, 11, 12, 0.0, 90) 41 | 42 | def test_error(self): 43 | with pytest.raises(ValueError): 44 | parse_time("xxx") 45 | 46 | 47 | class TestDatetimeParse: 48 | 49 | def test_yyyymmdd(self): 50 | assert ( 51 | parse_datetime("2012-01-29T10:11:12") == 52 | (2012, 1, 29, 10, 11, 12, 0.0, None)) 53 | 54 | def test_yyyymmddTZ(self): 55 | assert ( 56 | parse_datetime("2012-01-29T10:11:12+01:30") == 57 | (2012, 1, 29, 10, 11, 12, 0.0, 90)) 58 | 59 | def test_yyyymmddTZ2(self): 60 | assert ( 61 | parse_datetime("2012-01-29T10:11:12-01:30") == 62 | (2012, 1, 29, 10, 11, 12, 0.0, -90)) 63 | 64 | def test_error(self): 65 | with pytest.raises(ValueError): 66 | parse_datetime("xxx") 67 | 68 | 69 | def test_group_accessor_indexerror(): 70 | match = re.search(pattern_date, '2012-01-29') 71 | g = group_accessor(match) 72 | assert g.group(-1) == 0 73 | assert g.group(6) == 0 74 | -------------------------------------------------------------------------------- /tests/test_rfc64_tin.py: -------------------------------------------------------------------------------- 1 | """Tests of features related to GDAL RFC 64 2 | 3 | See https://trac.osgeo.org/gdal/wiki/rfc64_triangle_polyhedralsurface_tin. 4 | """ 5 | 6 | import fiona 7 | from fiona.model import Geometry 8 | 9 | 10 | def _test_tin(geometry: Geometry) -> None: 11 | """Test if TIN (((0 0 0, 0 0 1, 0 1 0, 0 0 0)), ((0 0 0, 0 1 0, 1 1 0, 0 0 0))) 12 | is correctly converted to MultiPolygon. 13 | """ 14 | assert geometry["type"] == "MultiPolygon" 15 | assert geometry["coordinates"] == [ 16 | [[(0.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], 17 | [[(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)]], 18 | ] 19 | 20 | 21 | def _test_triangle(geometry: Geometry) -> None: 22 | """Test if TRIANGLE((0 0 0,0 1 0,1 1 0,0 0 0)) 23 | is correctly converted to MultiPolygon.""" 24 | assert geometry["type"] == "Polygon" 25 | assert geometry["coordinates"] == [ 26 | [(0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 1.0, 0.0), (0.0, 0.0, 0.0)] 27 | ] 28 | 29 | 30 | def test_tin_shp(path_test_tin_shp): 31 | """Convert TIN to MultiPolygon""" 32 | with fiona.open(path_test_tin_shp) as col: 33 | assert col.schema["geometry"] == "Unknown" 34 | features = list(col) 35 | assert len(features) == 1 36 | _test_tin(features[0]["geometry"]) 37 | 38 | 39 | def test_tin_csv(path_test_tin_csv): 40 | """Convert TIN to MultiPolygon and Triangle to Polygon""" 41 | with fiona.open(path_test_tin_csv) as col: 42 | assert col.schema["geometry"] == "Unknown" 43 | 44 | feature1 = next(col) 45 | _test_tin(feature1["geometry"]) 46 | 47 | feature2 = next(col) 48 | _test_triangle(feature2["geometry"]) 49 | 50 | feature3 = next(col) 51 | assert feature3["geometry"]["type"] == "GeometryCollection" 52 | assert len(feature3["geometry"]["geometries"]) == 2 53 | 54 | _test_tin(feature3["geometry"]["geometries"][0]) 55 | _test_triangle(feature3["geometry"]["geometries"][1]) 56 | -------------------------------------------------------------------------------- /tests/test_session.py: -------------------------------------------------------------------------------- 1 | """Tests of the ogrext.Session class""" 2 | import pytest 3 | 4 | import fiona 5 | from fiona.errors import GDALVersionError, UnsupportedOperation 6 | from .conftest import gdal_version 7 | 8 | 9 | def test_get(path_coutwildrnp_shp): 10 | with fiona.open(path_coutwildrnp_shp) as col: 11 | feat3 = col.get(2) 12 | assert feat3['properties']['NAME'] == 'Mount Zirkel Wilderness' 13 | 14 | 15 | @pytest.mark.parametrize("layer, namespace, tags", [ 16 | (None, None, {"test_tag1": "test_value1", "test_tag2": "test_value2"}), 17 | (None, "test", {"test_tag1": "test_value1", "test_tag2": "test_value2"}), 18 | (None, None, {}), 19 | (None, "test", {}), 20 | ("layer", None, {"test_tag1": "test_value1", "test_tag2": "test_value2"}), 21 | ("layer", "test", {"test_tag1": "test_value1", "test_tag2": "test_value2"}), 22 | ("layer", None, {}), 23 | ("layer", "test", {}), 24 | ]) 25 | @pytest.mark.skipif(gdal_version.major < 2, reason="Broken on GDAL 1.x") 26 | def test_update_tags(layer, namespace, tags, tmpdir): 27 | test_geopackage = str(tmpdir.join("test.gpkg")) 28 | schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} 29 | with fiona.Env(), fiona.open( 30 | test_geopackage, "w", driver="GPKG", schema=schema, layer=layer) as gpkg: 31 | assert gpkg.tags() == {} 32 | gpkg.update_tags(tags, ns=namespace) 33 | 34 | with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg: 35 | assert gpkg.tags(ns=namespace) == tags 36 | if namespace is not None: 37 | assert gpkg.tags() == {} 38 | with pytest.raises(UnsupportedOperation): 39 | gpkg.update_tags({}, ns=namespace) 40 | 41 | 42 | @pytest.mark.parametrize("layer, namespace", [ 43 | (None, None), 44 | (None, "test"), 45 | ("test", None), 46 | ("test", "test"), 47 | ]) 48 | @pytest.mark.skipif(gdal_version.major < 2, reason="Broken on GDAL 1.x") 49 | def test_update_tag_item(layer, namespace, tmpdir): 50 | test_geopackage = str(tmpdir.join("test.gpkg")) 51 | schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} 52 | with fiona.Env(), fiona.open( 53 | test_geopackage, "w", driver="GPKG", schema=schema, layer=layer) as gpkg: 54 | assert gpkg.get_tag_item("test_tag1", ns=namespace) is None 55 | gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace) 56 | 57 | with fiona.Env(), fiona.open(test_geopackage, layer=layer) as gpkg: 58 | if namespace is not None: 59 | assert gpkg.get_tag_item("test_tag1") is None 60 | assert gpkg.get_tag_item("test_tag1", ns=namespace) == "test_value1" 61 | with pytest.raises(UnsupportedOperation): 62 | gpkg.update_tag_item("test_tag1", "test_value1", ns=namespace) 63 | 64 | 65 | @pytest.mark.skipif(gdal_version.major >= 2, reason="Only raises on GDAL 1.x") 66 | def test_gdal_version_error(tmpdir): 67 | test_geopackage = str(tmpdir.join("test.gpkg")) 68 | schema = {'properties': {'CDATA1': 'str:254'}, 'geometry': 'Polygon'} 69 | with fiona.Env(), fiona.open( 70 | test_geopackage, "w", driver="GPKG", schema=schema, layer="layer") as gpkg: 71 | with pytest.raises(GDALVersionError): 72 | gpkg.update_tags({"test_tag1": "test_value1"}, ns="test") 73 | with pytest.raises(GDALVersionError): 74 | gpkg.update_tag_item("test_tag1", "test_value1", ns="test") 75 | with pytest.raises(GDALVersionError): 76 | gpkg.tags() 77 | with pytest.raises(GDALVersionError): 78 | gpkg.get_tag_item("test_tag1") 79 | -------------------------------------------------------------------------------- /tests/test_snuggs.py: -------------------------------------------------------------------------------- 1 | # Python module tests 2 | 3 | """Tests of the snuggs module.""" 4 | 5 | import pytest # type: ignore 6 | 7 | from fiona._vendor import snuggs 8 | 9 | 10 | @pytest.mark.parametrize("arg", ["''", "null", "false", 0]) 11 | def test_truth_false(arg): 12 | """Expression is not true.""" 13 | assert not snuggs.eval(f"(truth {arg})") 14 | 15 | 16 | @pytest.mark.parametrize("arg", ["'hi'", "true", 1]) 17 | def test_truth(arg): 18 | """Expression is true.""" 19 | assert snuggs.eval(f"(truth {arg})") 20 | 21 | 22 | @pytest.mark.parametrize("arg", ["''", "null", "false", 0]) 23 | def test_not(arg): 24 | """Expression is true.""" 25 | assert snuggs.eval(f"(not {arg})") 26 | -------------------------------------------------------------------------------- /tests/test_subtypes.py: -------------------------------------------------------------------------------- 1 | """Tests of schema sub-types.""" 2 | 3 | import os 4 | 5 | import fiona 6 | from fiona.model import Feature 7 | 8 | 9 | def test_read_bool_subtype(tmp_path): 10 | test_data = """{"type": "FeatureCollection", "features": [{"type": "Feature", "properties": {"bool": true, "not_bool": 1, "float": 42.5}, "geometry": null}]}""" 11 | path = tmp_path.joinpath("test_read_bool_subtype.geojson") 12 | 13 | with open(os.fspath(path), "w") as f: 14 | f.write(test_data) 15 | 16 | with fiona.open(path, "r") as src: 17 | feature = next(iter(src)) 18 | 19 | assert type(feature["properties"]["bool"]) is bool 20 | assert isinstance(feature["properties"]["not_bool"], int) 21 | assert type(feature["properties"]["float"]) is float 22 | 23 | 24 | def test_write_bool_subtype(tmp_path): 25 | path = tmp_path.joinpath("test_write_bool_subtype.geojson") 26 | 27 | schema = { 28 | "geometry": "Point", 29 | "properties": { 30 | "bool": "bool", 31 | "not_bool": "int", 32 | "float": "float", 33 | }, 34 | } 35 | 36 | feature = Feature.from_dict( 37 | **{ 38 | "geometry": None, 39 | "properties": { 40 | "bool": True, 41 | "not_bool": 1, 42 | "float": 42.5, 43 | }, 44 | } 45 | ) 46 | 47 | with fiona.open(path, "w", driver="GeoJSON", schema=schema) as dst: 48 | dst.write(feature) 49 | 50 | with open(os.fspath(path)) as f: 51 | data = f.read() 52 | 53 | assert """"bool": true""" in data 54 | assert """"not_bool": 1""" in data 55 | 56 | 57 | def test_write_int16_subtype(tmp_path): 58 | path = tmp_path.joinpath("test_write_bool_subtype.gpkg") 59 | 60 | schema = { 61 | "geometry": "Point", 62 | "properties": { 63 | "a": "int", 64 | "b": "int16", 65 | }, 66 | } 67 | 68 | feature = Feature.from_dict( 69 | **{ 70 | "geometry": None, 71 | "properties": { 72 | "a": 1, 73 | "b": 2, 74 | }, 75 | } 76 | ) 77 | 78 | with fiona.open(path, "w", driver="GPKG", schema=schema) as colxn: 79 | colxn.write(feature) 80 | 81 | with fiona.open(path) as colxn: 82 | assert colxn.schema["properties"]["a"] == "int" 83 | assert colxn.schema["properties"]["b"] == "int16" 84 | -------------------------------------------------------------------------------- /tests/test_topojson.py: -------------------------------------------------------------------------------- 1 | """ 2 | Support for TopoJSON was added in OGR 1.11 to the `GeoJSON` driver. 3 | Starting at GDAL 2.3 support was moved to the `TopoJSON` driver. 4 | """ 5 | 6 | import os 7 | import pytest 8 | 9 | import fiona 10 | from fiona.env import GDALVersion 11 | from fiona.model import Properties 12 | 13 | 14 | gdal_version = GDALVersion.runtime() 15 | 16 | driver = "TopoJSON" if gdal_version.at_least((2, 3)) else "GeoJSON" 17 | has_driver = driver in fiona.drvsupport.supported_drivers.keys() 18 | 19 | 20 | @pytest.mark.skipif(not gdal_version.at_least((1, 11)), reason="Requires GDAL >= 1.11") 21 | @pytest.mark.skipif(not has_driver, reason=f"Requires {driver} driver") 22 | def test_read_topojson(data_dir): 23 | """Test reading a TopoJSON file 24 | 25 | The TopoJSON support in GDAL is a little unpredictable. In some versions 26 | the geometries or properties aren't parsed correctly. Here we just check 27 | that we can open the file, get the right number of features out, and 28 | that they have a geometry and some properties. See GH#722. 29 | """ 30 | with fiona.open(os.path.join(data_dir, "example.topojson"), "r") as collection: 31 | features = list(collection) 32 | 33 | assert len(features) == 3, "unexpected number of features" 34 | for feature in features: 35 | assert isinstance(feature.properties, Properties) 36 | assert len(feature.properties) > 0 37 | assert feature.geometry.type in {"Point", "LineString", "Polygon"} 38 | -------------------------------------------------------------------------------- /tests/test_transactions.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | import logging 3 | import os 4 | import pytest 5 | from random import uniform, randint 6 | 7 | import fiona 8 | from fiona.model import Feature 9 | import fiona.ogrext 10 | 11 | from tests.conftest import requires_gdal2 12 | 13 | has_gpkg = "GPKG" in fiona.supported_drivers.keys() 14 | 15 | 16 | def create_records(count): 17 | for n in range(count): 18 | record = { 19 | "geometry": { 20 | "type": "Point", 21 | "coordinates": [uniform(-180, 180), uniform(-90, 90)], 22 | }, 23 | "properties": {"value": randint(0, 1000)}, 24 | } 25 | yield Feature.from_dict(**record) 26 | 27 | 28 | class DebugHandler(logging.Handler): 29 | def __init__(self, pattern): 30 | logging.Handler.__init__(self) 31 | self.pattern = pattern 32 | self.history = defaultdict(lambda: 0) 33 | 34 | def emit(self, record): 35 | if self.pattern in record.msg: 36 | self.history[record.msg] += 1 37 | 38 | 39 | log = logging.getLogger() 40 | 41 | 42 | @requires_gdal2 43 | @pytest.mark.skipif(not has_gpkg, reason="Requires geopackage driver") 44 | class TestTransaction: 45 | def setup_method(self): 46 | self.handler = DebugHandler(pattern="transaction") 47 | self.handler.setLevel(logging.DEBUG) 48 | log.setLevel(logging.DEBUG) 49 | log.addHandler(self.handler) 50 | 51 | def teardown_method(self): 52 | log.removeHandler(self.handler) 53 | 54 | def test_transaction(self, tmpdir): 55 | """ 56 | Test transaction start/commit is called the expected number of times, 57 | and that the default transaction size can be overloaded. The test uses 58 | a custom logging handler to listen for the debug messages produced 59 | when the transaction is started/committed. 60 | """ 61 | num_records = 250 62 | transaction_size = 100 63 | 64 | assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == 20000 65 | fiona.ogrext.DEFAULT_TRANSACTION_SIZE = transaction_size 66 | assert fiona.ogrext.DEFAULT_TRANSACTION_SIZE == transaction_size 67 | 68 | path = str(tmpdir.join("output.gpkg")) 69 | 70 | schema = {"geometry": "Point", "properties": {"value": "int"}} 71 | 72 | with fiona.open(path, "w", driver="GPKG", schema=schema) as dst: 73 | dst.writerecords(create_records(num_records)) 74 | 75 | assert self.handler.history["Starting transaction (initial)"] == 1 76 | assert ( 77 | self.handler.history["Starting transaction (intermediate)"] 78 | == num_records // transaction_size 79 | ) 80 | assert ( 81 | self.handler.history["Committing transaction (intermediate)"] 82 | == num_records // transaction_size 83 | ) 84 | assert self.handler.history["Committing transaction (final)"] == 1 85 | 86 | with fiona.open(path, "r") as src: 87 | assert len(src) == num_records 88 | -------------------------------------------------------------------------------- /tests/test_version.py: -------------------------------------------------------------------------------- 1 | import fiona 2 | import platform 3 | import re 4 | import os 5 | import sys 6 | from tests.conftest import travis_only 7 | from fiona._env import GDALVersion, get_gdal_release_name 8 | 9 | 10 | def test_version_tuple(): 11 | version = fiona.gdal_version 12 | assert version.major >= 1 and isinstance(version.major, int) 13 | assert version.minor >= 0 and isinstance(version.minor, int) 14 | assert version.revision >= 0 and isinstance(version.revision, int) 15 | 16 | 17 | def test_version_comparison(): 18 | # version against version 19 | assert GDALVersion(4, 0, 0) > GDALVersion(3, 2, 1) 20 | assert GDALVersion(2, 0, 0) < GDALVersion(3, 2, 1) 21 | assert GDALVersion(3, 2, 2) > GDALVersion(3, 2, 1) 22 | assert GDALVersion(3, 2, 0) < GDALVersion(3, 2, 1) 23 | 24 | # tuple against version 25 | assert (4, 0, 0) > GDALVersion(3, 2, 1) 26 | assert (2, 0, 0) < GDALVersion(3, 2, 1) 27 | assert (3, 2, 2) > GDALVersion(3, 2, 1) 28 | assert (3, 2, 0) < GDALVersion(3, 2, 1) 29 | 30 | 31 | @travis_only 32 | def test_show_versions(capsys): 33 | version_pattern = re.compile(r"(\d+).(\d+).(\d+)") 34 | 35 | os_info = f"{platform.system()} {platform.release()}" 36 | python_version = platform.python_version() 37 | python_exec = sys.executable 38 | 39 | msg = ("Fiona version: {fiona_version}" 40 | "\nGDAL version: {gdal_release_name}" 41 | "\nPROJ version: {proj_version}" 42 | "\n" 43 | "\nOS: {os_info}" 44 | "\nPython: {python_version}" 45 | "\nPython executable: '{python_exec}'" 46 | "\n" 47 | ) 48 | 49 | if fiona.gdal_version < GDALVersion(3, 0, 1): 50 | proj_version = "Proj version not available" 51 | else: 52 | proj_version = os.getenv("PROJVERSION") 53 | proj_version = re.match(version_pattern, proj_version).group(0) 54 | 55 | gdal_version = os.getenv("GDALVERSION") 56 | if not gdal_version == "master": 57 | gdal_version = re.match(version_pattern, gdal_version).group(0) 58 | else: 59 | gdal_version = get_gdal_release_name() 60 | 61 | msg_formatted = msg.format(fiona_version=fiona.__version__, 62 | gdal_release_name=gdal_version, 63 | proj_version=proj_version, 64 | os_info=os_info, 65 | python_version=python_version, 66 | python_exec=python_exec) 67 | 68 | fiona.show_versions() 69 | captured = capsys.readouterr() 70 | 71 | assert captured.out.strip() == msg_formatted.strip() 72 | --------------------------------------------------------------------------------