├── .github └── workflows │ ├── python-CD.yml │ └── python-CI.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── Pipfile ├── Pipfile.lock ├── PyPi.md ├── README.md ├── data ├── external │ └── .gitkeep ├── interim │ └── .gitkeep └── raw │ ├── .gitkeep │ ├── GT.png │ ├── RGB.png │ ├── TIF │ ├── GT5k.tif │ └── RGB5k.tif │ └── readme.txt ├── docs ├── .nojekyll ├── about.md ├── gis.md ├── img │ ├── favicon.ico │ ├── gdalicon.png │ ├── output-grid.png │ ├── output_img_gt.png │ └── split_raster_sample.png ├── index.md ├── sphinx │ ├── Makefile │ ├── commands.rst │ ├── conf.py │ ├── getting-started.rst │ ├── index.rst │ └── make.bat └── tutorial.md ├── mkdocs.yml ├── notebooks ├── .gitkeep ├── Tutorial.ipynb └── Tutorial_II.ipynb ├── requirements.txt ├── setup.py ├── src ├── data │ ├── .gitkeep │ ├── __init__.py │ └── make_dataset.py └── splitraster │ ├── __init__.py │ ├── geo.py │ └── io.py ├── test.py ├── test_environment.py └── tox.ini /.github/workflows/python-CD.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: SplitRaster Package Release and Publish 5 | 6 | on: 7 | push: 8 | branches: [master] 9 | tags: 10 | - v* 11 | pull_request: 12 | branches: [master] 13 | jobs: 14 | format_and_check: 15 | runs-on: ubuntu-22.04 16 | environment: ${{ github.ref == 'refs/heads/master' && 'production' || 'staging' }} 17 | steps: 18 | - uses: actions/checkout@v3 19 | - name: Set up Python 20 | uses: actions/setup-python@v4 21 | with: 22 | python-version: "3.10" 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install --upgrade pip 26 | pip install black flake8 27 | - name: Format with Black 28 | run: | 29 | black --check . 30 | 31 | build: 32 | needs: format_and_check 33 | runs-on: ubuntu-22.04 34 | environment: ${{ github.ref == 'refs/heads/master' && 'production' || 'staging' }} 35 | strategy: 36 | matrix: 37 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] 38 | 39 | steps: 40 | - uses: actions/checkout@v3 41 | - name: Set up Python ${{ matrix.python-version }} 42 | uses: actions/setup-python@v4 43 | with: 44 | python-version: ${{ matrix.python-version }} 45 | - name: Install dependencies 46 | run: | 47 | python -m pip install --upgrade pip 48 | pip install pytest 49 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 50 | - name: Install setuptools 51 | run: | 52 | pip install setuptools wheel 53 | - name: Build Python package 54 | run: | 55 | python setup.py sdist bdist_wheel 56 | - name: Install Python Package 57 | run: | 58 | pip install dist/*.whl 59 | - name: Test with pytest 60 | run: | 61 | pytest test.py -v 62 | 63 | deploy: 64 | needs: build 65 | runs-on: ubuntu-22.04 66 | environment: ${{ github.ref == 'refs/heads/master' && 'production' || 'staging' }} 67 | steps: 68 | - uses: actions/checkout@v3 69 | - name: Set up Python 70 | uses: actions/setup-python@v4 71 | with: 72 | python-version: "3.10" 73 | - name: Install dependencies 74 | run: | 75 | python -m pip install --upgrade pip 76 | pip install pytest 77 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 78 | - name: Install setuptools 79 | run: | 80 | pip install setuptools wheel 81 | - name: re-Build Python package 82 | run: | 83 | python setup.py sdist bdist_wheel 84 | - name: Install Python Package 85 | run: | 86 | pip install dist/*.whl 87 | - name: Install twine 88 | run: | 89 | python -m pip install --upgrade pip 90 | pip install twine 91 | - name: Upload to PyPI 92 | env: 93 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 94 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 95 | run: | 96 | python -m twine upload --skip-existing dist/* 97 | 98 | release: 99 | needs: deploy 100 | runs-on: ubuntu-22.04 101 | environment: ${{ github.ref == 'refs/heads/master' && 'production' || 'staging' }} 102 | steps: 103 | - uses: actions/checkout@v3 104 | - name: Set up Python 105 | uses: actions/setup-python@v4 106 | with: 107 | python-version: "3.10" 108 | - name: Get Package version 109 | id: get_version 110 | run: | 111 | echo "version=$(python setup.py --version)" >> $GITHUB_ENV 112 | - name: Check if release exists 113 | id: check_release 114 | run: | 115 | RELEASE_ID=$(curl --silent --show-error --location --fail --retry 3 --output /dev/null --write-out "%{http_code}" --header "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" "https://api.github.com/repos/${{ github.repository }}/releases/tags/${{ env.version }}") 116 | echo "exists=$([[ "$RELEASE_ID" != "404" ]] && echo true || echo false)" >> $GITHUB_ENV 117 | - name: Create Release 118 | id: create_release 119 | if: ${{ !env.exists }} 120 | uses: actions/create-release@v1 121 | env: 122 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 123 | with: 124 | tag_name: ${{ env.version }} 125 | release_name: Release ${{ env.version }} 126 | draft: false 127 | prerelease: false 128 | 129 | deploy_docs: 130 | needs: build 131 | runs-on: ubuntu-22.04 132 | environment: ${{ github.ref == 'refs/heads/master' && 'production' || 'staging' }} 133 | steps: 134 | - uses: actions/checkout@v3 135 | - name: Set up Python 136 | uses: actions/setup-python@v4 137 | with: 138 | python-version: "3.10" 139 | - name: Install dependencies 140 | run: | 141 | python -m pip install --upgrade pip 142 | pip install mkdocs 143 | - name: Deploy to GitHub Pages 144 | run: | 145 | mkdocs gh-deploy --force 146 | -------------------------------------------------------------------------------- /.github/workflows/python-CI.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: SplitRaster Developmnet Build Install and Test 5 | 6 | on: 7 | push: 8 | branches: [develop, feature/*] 9 | 10 | pull_request: 11 | branches: [develop, feature/*] 12 | jobs: 13 | format_and_check: 14 | runs-on: ubuntu-22.04 15 | environment: development 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Set up Python 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: "3.10" 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install black flake8 26 | - name: Format with Black 27 | run: | 28 | black --check . 29 | 30 | build: 31 | needs: format_and_check 32 | runs-on: ubuntu-22.04 33 | environment: development 34 | strategy: 35 | matrix: 36 | python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] 37 | steps: 38 | - uses: actions/checkout@v3 39 | - name: Set up Python ${{ matrix.python-version }} 40 | uses: actions/setup-python@v4 41 | with: 42 | python-version: ${{ matrix.python-version }} 43 | - name: Install dependencies 44 | run: | 45 | python -m pip install --upgrade pip 46 | pip install pytest 47 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 48 | - name: Install setuptools 49 | run: | 50 | pip install setuptools wheel 51 | - name: Build Python package 52 | run: | 53 | python setup.py sdist bdist_wheel 54 | - name: Install Python Package 55 | run: | 56 | pip install dist/*.whl 57 | - name: Test with pytest 58 | run: | 59 | pytest test.py -v 60 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .coverage.* 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | *.cover 45 | 46 | # Translations 47 | *.mo 48 | *.pot 49 | 50 | # Django stuff: 51 | *.log 52 | 53 | # Sphinx documentation 54 | docs/_build/ 55 | 56 | # PyBuilder 57 | target/ 58 | 59 | # DotEnv configuration 60 | .env 61 | 62 | # Database 63 | *.db 64 | *.rdb 65 | 66 | # Pycharm 67 | .idea 68 | 69 | # VS Code 70 | .vscode/ 71 | 72 | # Spyder 73 | .spyproject/ 74 | 75 | # Jupyter NB Checkpoints 76 | .ipynb_checkpoints/ 77 | 78 | # exclude data from source control by default 79 | /data/processed/ 80 | 81 | # Mac OS-specific storage files 82 | .DS_Store 83 | 84 | # vim 85 | *.swp 86 | *.swo 87 | 88 | # Mypy cache 89 | .mypy_cache/ 90 | site/ 91 | out/ -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # Format code with black 2 | repos: 3 | # Run black 4 | - repo: https://github.com/psf/black 5 | rev: 23.3.0 6 | hooks: 7 | - id: black 8 | language_version: python3.10 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution Guideline 2 | 3 | ## How to Contribute 4 | 5 | First of all, thank you for your interest in contributing to this project. This project is still in its early stage and there are many things to do. If you are interested in contributing to this project, please follow the steps below: 6 | 7 | 1. Fork the repository 8 | 2. Make your changes 9 | 3. Submit a pull request 10 | 4. and wait for the review 11 | 12 | 13 | ### Clone the repository 14 | 15 | ```bash 16 | # make sure you have the latest version of the code 17 | git clone "https://github.com/cuicaihao/split_raster.git" 18 | # make sure you are in the master branch 19 | git checkout master 20 | # pull the latest code 21 | git pull 22 | # create a new branch for your changes 23 | git checkout -b 24 | # make your changes 25 | # add your changes 26 | git add . 27 | # commit your changes 28 | git commit -m "your commit message" 29 | # push your changes 30 | git push origin 31 | # submit a pull request 32 | ``` 33 | 34 | ## Setting up the development environment 35 | 36 | This project is developed using Python 3.9.16. The following packages are required: 37 | 38 | - pipev 39 | - tqdm 40 | - numpy 41 | - scikit-image 42 | - (optional) gdal (for GeoTiff support) 43 | 44 | Please use the `pipenv` to manage the virtual environment. The following commands will help you set up the development environment. 45 | 46 | ```bash 47 | # install pipenv 48 | pip install pipenv 49 | # install the required packages 50 | pipenv install 51 | # activate the virtual environment 52 | pipenv shell 53 | ``` 54 | 55 | Then if you run the following command in your shell, you will seee the following output. This means that you have successfully set up the development environment. 56 | 57 | ```python 58 | ❯ pipenv graph 59 | pytest==7.2.0 60 | - attrs [required: >=19.2.0, installed: 22.1.0] 61 | - exceptiongroup [required: >=1.0.0rc8, installed: 1.0.4] 62 | - iniconfig [required: Any, installed: 1.1.1] 63 | - packaging [required: Any, installed: 22.0] 64 | - pluggy [required: >=0.12,<2.0, installed: 1.0.0] 65 | - tomli [required: >=1.0.0, installed: 2.0.1] 66 | splitraster==0.3.3 67 | - numpy [required: >=1.19.0, installed: 1.24.0] 68 | - scikit-image [required: >=0.18.0, installed: 0.19.3] 69 | - imageio [required: >=2.4.1, installed: 2.22.4] 70 | - numpy [required: Any, installed: 1.24.0] 71 | - pillow [required: >=8.3.2, installed: 9.3.0] 72 | - networkx [required: >=2.2, installed: 2.8.8] 73 | - numpy [required: >=1.17.0, installed: 1.24.0] 74 | - packaging [required: >=20.0, installed: 22.0] 75 | - pillow [required: >=6.1.0,!=8.3.0,!=7.1.1,!=7.1.0, installed: 9.3.0] 76 | - PyWavelets [required: >=1.1.1, installed: 1.4.1] 77 | - numpy [required: >=1.17.3, installed: 1.24.0] 78 | - scipy [required: >=1.4.1, installed: 1.9.3] 79 | - numpy [required: >=1.18.5,<1.26.0, installed: 1.24.0] 80 | - tifffile [required: >=2019.7.26, installed: 2022.10.10] 81 | - numpy [required: >=1.19.2, installed: 1.24.0] 82 | - tqdm [required: >=4.40.0, installed: 4.64.1] 83 | ``` 84 | 85 | ## Testing 86 | To test your changes, please run the following command: 87 | 88 | ```bash 89 | ❯ pytest test.py -v 90 | cachedir: .pytest_cache 91 | rootdir: /Users/caihaocui/Documents/GitHub/split_raster 92 | collected 2 items 93 | 94 | test.py::test_rgb_gt_slide_window PASSED [ 50%] 95 | test.py::test_rgb_gt_random_crop PASSED [100%] 96 | ``` 97 | 98 | If you see the above output, it means that you have successfully passed the test. 99 | 100 | 101 | ## END 102 | 103 | 104 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | The MIT License (MIT) 3 | Copyright (c) 2025, Caihao Cui 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | 11 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean data lint requirements test_environment help 2 | 3 | ################################################################################# 4 | # GLOBALS # 5 | ################################################################################# 6 | 7 | PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) 8 | BUCKET = [OPTIONAL] your-bucket-for-syncing-data (do not include 's3://') 9 | PROFILE = default 10 | PROJECT_NAME = split_raster 11 | PYTHON_INTERPRETER = python3 12 | 13 | ifeq (,$(shell which conda)) 14 | HAS_CONDA=False 15 | else 16 | HAS_CONDA=True 17 | endif 18 | 19 | ################################################################################# 20 | # COMMANDS # 21 | ################################################################################# 22 | 23 | ## Install Python Dependencies 24 | requirements: test_environment 25 | $(PYTHON_INTERPRETER) -m pip install -U pip setuptools wheel 26 | $(PYTHON_INTERPRETER) -m pip install -r requirements.txt 27 | 28 | ## Make Dataset 29 | data: requirements 30 | $(PYTHON_INTERPRETER) src/data/make_dataset.py data/raw data/processed 31 | 32 | ## Delete all compiled Python files 33 | clean: 34 | find . -type f -name "*.py[co]" -delete 35 | find . -type d -name "__pycache__" -delete 36 | 37 | ## Rebuilt the wheel and upload to PyPI 38 | rebuild: 39 | rm -rf build dist 40 | $(PYTHON_INTERPRETER) setup.py sdist bdist_wheel 41 | twine upload dist/* 42 | ## Upload to PyPI 43 | upload: 44 | $(PYTHON_INTERPRETER) setup.py sdist bdist_wheel 45 | twine upload dist/* 46 | 47 | ## Update Github Pages 48 | gh-pages: 49 | $(PYTHON_INTERPRETER) -m pip install -q mkdocs mkdocs-material 50 | mkdocs gh-deploy 51 | 52 | 53 | # ## Lint using flake8 54 | lint: 55 | black src 56 | 57 | 58 | ## Test python environment is setup correctly 59 | test_environment: 60 | $(PYTHON_INTERPRETER) test_environment.py 61 | 62 | ################################################################################# 63 | # PROJECT RULES # 64 | ################################################################################# 65 | 66 | 67 | 68 | ################################################################################# 69 | # Self Documenting Commands # 70 | ################################################################################# 71 | 72 | .DEFAULT_GOAL := help 73 | 74 | # Inspired by 75 | # sed script explained: 76 | # /^##/: 77 | # * save line in hold space 78 | # * purge line 79 | # * Loop: 80 | # * append newline + line to hold space 81 | # * go to next line 82 | # * if line starts with doc comment, strip comment character off and loop 83 | # * remove target prerequisites 84 | # * append hold space (+ newline) to line 85 | # * replace newline plus comments by `---` 86 | # * print line 87 | # Separate expressions are necessary because labels cannot be delimited by 88 | # semicolon; see 89 | .PHONY: help 90 | help: 91 | @echo "$$(tput bold)Available rules:$$(tput sgr0)" 92 | @echo 93 | @sed -n -e "/^## / { \ 94 | h; \ 95 | s/.*//; \ 96 | :doc" \ 97 | -e "H; \ 98 | n; \ 99 | s/^## //; \ 100 | t doc" \ 101 | -e "s/:.*//; \ 102 | G; \ 103 | s/\\n## /---/; \ 104 | s/\\n/ /g; \ 105 | p; \ 106 | }" ${MAKEFILE_LIST} \ 107 | | LC_ALL='C' sort --ignore-case \ 108 | | awk -F '---' \ 109 | -v ncol=$$(tput cols) \ 110 | -v indent=19 \ 111 | -v col_on="$$(tput setaf 6)" \ 112 | -v col_off="$$(tput sgr0)" \ 113 | '{ \ 114 | printf "%s%*s%s ", col_on, -indent, $$1, col_off; \ 115 | n = split($$2, words, " "); \ 116 | line_length = ncol - indent; \ 117 | for (i = 1; i <= n; i++) { \ 118 | line_length -= length(words[i]) + 1; \ 119 | if (line_length <= 0) { \ 120 | line_length = ncol - indent - length(words[i]) - 1; \ 121 | printf "\n%*s ", -indent, " "; \ 122 | } \ 123 | printf "%s ", words[i]; \ 124 | } \ 125 | printf "\n"; \ 126 | }' \ 127 | | more $(shell test $(shell uname) = Darwin && echo '--no-init --raw-control-chars') 128 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | numpy = "<2.0.0,>=1.19.0" 8 | tqdm = "<5.0.0,>=4.40.0" 9 | scikit-image = "<1.0.0,>=0.18.0" 10 | pytest = "*" 11 | 12 | [dev-packages] 13 | ipykernel = "*" 14 | 15 | [requires] 16 | python_version = "3.10" 17 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "ad5037d96b7637de6eabf5e936e6e76eba8751abf095062f9b7737be6be110e3" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.10" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "exceptiongroup": { 20 | "hashes": [ 21 | "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", 22 | "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" 23 | ], 24 | "markers": "python_version >= '3.7'", 25 | "version": "==1.2.2" 26 | }, 27 | "imageio": { 28 | "hashes": [ 29 | "sha256:11efa15b87bc7871b61590326b2d635439acc321cf7f8ce996f812543ce10eed", 30 | "sha256:71b57b3669666272c818497aebba2b4c5f20d5b37c81720e5e1a56d59c492996" 31 | ], 32 | "markers": "python_version >= '3.9'", 33 | "version": "==2.37.0" 34 | }, 35 | "iniconfig": { 36 | "hashes": [ 37 | "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", 38 | "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" 39 | ], 40 | "markers": "python_version >= '3.8'", 41 | "version": "==2.1.0" 42 | }, 43 | "lazy-loader": { 44 | "hashes": [ 45 | "sha256:342aa8e14d543a154047afb4ba8ef17f5563baad3fc610d7b15b213b0f119efc", 46 | "sha256:47c75182589b91a4e1a85a136c074285a5ad4d9f39c63e0d7fb76391c4574cd1" 47 | ], 48 | "markers": "python_version >= '3.7'", 49 | "version": "==0.4" 50 | }, 51 | "networkx": { 52 | "hashes": [ 53 | "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", 54 | "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f" 55 | ], 56 | "markers": "python_version >= '3.10'", 57 | "version": "==3.4.2" 58 | }, 59 | "numpy": { 60 | "hashes": [ 61 | "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", 62 | "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", 63 | "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", 64 | "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", 65 | "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", 66 | "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", 67 | "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", 68 | "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", 69 | "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", 70 | "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", 71 | "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", 72 | "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", 73 | "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", 74 | "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", 75 | "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", 76 | "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", 77 | "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", 78 | "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", 79 | "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", 80 | "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", 81 | "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", 82 | "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", 83 | "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", 84 | "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", 85 | "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", 86 | "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", 87 | "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", 88 | "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", 89 | "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", 90 | "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", 91 | "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", 92 | "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", 93 | "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", 94 | "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", 95 | "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", 96 | "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f" 97 | ], 98 | "index": "pypi", 99 | "markers": "python_version >= '3.9'", 100 | "version": "==1.26.4" 101 | }, 102 | "packaging": { 103 | "hashes": [ 104 | "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", 105 | "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" 106 | ], 107 | "markers": "python_version >= '3.8'", 108 | "version": "==24.2" 109 | }, 110 | "pillow": { 111 | "hashes": [ 112 | "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83", 113 | "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96", 114 | "sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65", 115 | "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a", 116 | "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352", 117 | "sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f", 118 | "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20", 119 | "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c", 120 | "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114", 121 | "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49", 122 | "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91", 123 | "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", 124 | "sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2", 125 | "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5", 126 | "sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884", 127 | "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e", 128 | "sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c", 129 | "sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196", 130 | "sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756", 131 | "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861", 132 | "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269", 133 | "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1", 134 | "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb", 135 | "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a", 136 | "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081", 137 | "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1", 138 | "sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8", 139 | "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90", 140 | "sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc", 141 | "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5", 142 | "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1", 143 | "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3", 144 | "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35", 145 | "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f", 146 | "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c", 147 | "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2", 148 | "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2", 149 | "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf", 150 | "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65", 151 | "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b", 152 | "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442", 153 | "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2", 154 | "sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade", 155 | "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482", 156 | "sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe", 157 | "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc", 158 | "sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a", 159 | "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec", 160 | "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3", 161 | "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a", 162 | "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07", 163 | "sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6", 164 | "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f", 165 | "sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e", 166 | "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192", 167 | "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0", 168 | "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6", 169 | "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73", 170 | "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f", 171 | "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6", 172 | "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547", 173 | "sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9", 174 | "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457", 175 | "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8", 176 | "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26", 177 | "sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5", 178 | "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab", 179 | "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070", 180 | "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71", 181 | "sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9", 182 | "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761" 183 | ], 184 | "markers": "python_version >= '3.9'", 185 | "version": "==11.1.0" 186 | }, 187 | "pluggy": { 188 | "hashes": [ 189 | "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", 190 | "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" 191 | ], 192 | "markers": "python_version >= '3.8'", 193 | "version": "==1.5.0" 194 | }, 195 | "pytest": { 196 | "hashes": [ 197 | "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", 198 | "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845" 199 | ], 200 | "index": "pypi", 201 | "markers": "python_version >= '3.8'", 202 | "version": "==8.3.5" 203 | }, 204 | "scikit-image": { 205 | "hashes": [ 206 | "sha256:24cc986e1f4187a12aa319f777b36008764e856e5013666a4a83f8df083c2641", 207 | "sha256:28182a9d3e2ce3c2e251383bdda68f8d88d9fff1a3ebe1eb61206595c9773341", 208 | "sha256:330d061bd107d12f8d68f1d611ae27b3b813b8cdb0300a71d07b1379178dd4cd", 209 | "sha256:483bd8cc10c3d8a7a37fae36dfa5b21e239bd4ee121d91cad1f81bba10cfb0ed", 210 | "sha256:5c311069899ce757d7dbf1d03e32acb38bb06153236ae77fcd820fd62044c063", 211 | "sha256:60516257c5a2d2f74387c502aa2f15a0ef3498fbeaa749f730ab18f0a40fd054", 212 | "sha256:64785a8acefee460ec49a354706db0b09d1f325674107d7fa3eadb663fb56d6f", 213 | "sha256:7efa888130f6c548ec0439b1a7ed7295bc10105458a421e9bf739b457730b6da", 214 | "sha256:8db8dd03663112783221bf01ccfc9512d1cc50ac9b5b0fe8f4023967564719fb", 215 | "sha256:9d1e80107bcf2bf1291acfc0bf0425dceb8890abe9f38d8e94e23497cbf7ee0d", 216 | "sha256:a17e17eb8562660cc0d31bb55643a4da996a81944b82c54805c91b3fe66f4824", 217 | "sha256:a4c464b90e978d137330be433df4e76d92ad3c5f46a22f159520ce0fdbea8a09", 218 | "sha256:b2cfc96b27afe9a05bc92f8c6235321d3a66499995675b27415e0d0c76625173", 219 | "sha256:b4f6b61fc2db6340696afe3db6b26e0356911529f5f6aee8c322aa5157490c9b", 220 | "sha256:b8abd3c805ce6944b941cfed0406d88faeb19bab3ed3d4b50187af55cf24d147", 221 | "sha256:bdd2b8c1de0849964dbc54037f36b4e9420157e67e45a8709a80d727f52c7da2", 222 | "sha256:be455aa7039a6afa54e84f9e38293733a2622b8c2fb3362b822d459cc5605e99", 223 | "sha256:d3278f586793176599df6a4cf48cb6beadae35c31e58dc01a98023af3dc31c78", 224 | "sha256:d989d64ff92e0c6c0f2018c7495a5b20e2451839299a018e0e5108b2680f71e0", 225 | "sha256:dd8011efe69c3641920614d550f5505f83658fe33581e49bed86feab43a180fc", 226 | "sha256:e5a37e6cd4d0c018a7a55b9d601357e3382826d3888c10d0213fc63bff977dde", 227 | "sha256:f4bac9196fb80d37567316581c6060763b0f4893d3aca34a9ede3825bc035b17" 228 | ], 229 | "index": "pypi", 230 | "markers": "python_version >= '3.10'", 231 | "version": "==0.25.2" 232 | }, 233 | "scipy": { 234 | "hashes": [ 235 | "sha256:01edfac9f0798ad6b46d9c4c9ca0e0ad23dbf0b1eb70e96adb9fa7f525eff0bf", 236 | "sha256:03205d57a28e18dfd39f0377d5002725bf1f19a46f444108c29bdb246b6c8a11", 237 | "sha256:08b57a9336b8e79b305a143c3655cc5bdbe6d5ece3378578888d2afbb51c4e37", 238 | "sha256:11e7ad32cf184b74380f43d3c0a706f49358b904fa7d5345f16ddf993609184d", 239 | "sha256:28a0d2c2075946346e4408b211240764759e0fabaeb08d871639b5f3b1aca8a0", 240 | "sha256:2b871df1fe1a3ba85d90e22742b93584f8d2b8e6124f8372ab15c71b73e428b8", 241 | "sha256:302093e7dfb120e55515936cb55618ee0b895f8bcaf18ff81eca086c17bd80af", 242 | "sha256:42dabaaa798e987c425ed76062794e93a243be8f0f20fff6e7a89f4d61cb3d40", 243 | "sha256:447ce30cee6a9d5d1379087c9e474628dab3db4a67484be1b7dc3196bfb2fac9", 244 | "sha256:4c6676490ad76d1c2894d77f976144b41bd1a4052107902238047fb6a473e971", 245 | "sha256:54c462098484e7466362a9f1672d20888f724911a74c22ae35b61f9c5919183d", 246 | "sha256:597a0c7008b21c035831c39927406c6181bcf8f60a73f36219b69d010aa04737", 247 | "sha256:5a6fd6eac1ce74a9f77a7fc724080d507c5812d61e72bd5e4c489b042455865e", 248 | "sha256:5ea7ed46d437fc52350b028b1d44e002646e28f3e8ddc714011aaf87330f2f32", 249 | "sha256:601881dfb761311045b03114c5fe718a12634e5608c3b403737ae463c9885d53", 250 | "sha256:62ca1ff3eb513e09ed17a5736929429189adf16d2d740f44e53270cc800ecff1", 251 | "sha256:69ea6e56d00977f355c0f84eba69877b6df084516c602d93a33812aa04d90a3d", 252 | "sha256:6a8e34cf4c188b6dd004654f88586d78f95639e48a25dfae9c5e34a6dc34547e", 253 | "sha256:6d0194c37037707b2afa7a2f2a924cf7bac3dc292d51b6a925e5fcb89bc5c776", 254 | "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5", 255 | "sha256:6f5e296ec63c5da6ba6fa0343ea73fd51b8b3e1a300b0a8cae3ed4b1122c7462", 256 | "sha256:7cd5b77413e1855351cdde594eca99c1f4a588c2d63711388b6a1f1c01f62274", 257 | "sha256:869269b767d5ee7ea6991ed7e22b3ca1f22de73ab9a49c44bad338b725603301", 258 | "sha256:87994da02e73549dfecaed9e09a4f9d58a045a053865679aeb8d6d43747d4df3", 259 | "sha256:888307125ea0c4466287191e5606a2c910963405ce9671448ff9c81c53f85f58", 260 | "sha256:92233b2df6938147be6fa8824b8136f29a18f016ecde986666be5f4d686a91a4", 261 | "sha256:9412f5e408b397ff5641080ed1e798623dbe1ec0d78e72c9eca8992976fa65aa", 262 | "sha256:9b18aa747da280664642997e65aab1dd19d0c3d17068a04b3fe34e2559196cb9", 263 | "sha256:9de9d1416b3d9e7df9923ab23cd2fe714244af10b763975bea9e4f2e81cebd27", 264 | "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9", 265 | "sha256:a5080a79dfb9b78b768cebf3c9dcbc7b665c5875793569f48bf0e2b1d7f68f6f", 266 | "sha256:a8bf5cb4a25046ac61d38f8d3c3426ec11ebc350246a4642f2f315fe95bda655", 267 | "sha256:b09ae80010f52efddb15551025f9016c910296cf70adbf03ce2a8704f3a5ad20", 268 | "sha256:b5e025e903b4f166ea03b109bb241355b9c42c279ea694d8864d033727205e65", 269 | "sha256:bad78d580270a4d32470563ea86c6590b465cb98f83d760ff5b0990cb5518a93", 270 | "sha256:bae43364d600fdc3ac327db99659dcb79e6e7ecd279a75fe1266669d9a652828", 271 | "sha256:c4697a10da8f8765bb7c83e24a470da5797e37041edfd77fd95ba3811a47c4fd", 272 | "sha256:c90ebe8aaa4397eaefa8455a8182b164a6cc1d59ad53f79943f266d99f68687f", 273 | "sha256:cd58a314d92838f7e6f755c8a2167ead4f27e1fd5c1251fd54289569ef3495ec", 274 | "sha256:cf72ff559a53a6a6d77bd8eefd12a17995ffa44ad86c77a5df96f533d4e6c6bb", 275 | "sha256:def751dd08243934c884a3221156d63e15234a3155cf25978b0a668409d45eb6", 276 | "sha256:e7c68b6a43259ba0aab737237876e5c2c549a031ddb7abc28c7b47f22e202ded", 277 | "sha256:ecf797d2d798cf7c838c6d98321061eb3e72a74710e6c40540f0e8087e3b499e", 278 | "sha256:f031846580d9acccd0044efd1a90e6f4df3a6e12b4b6bd694a7bc03a89892b28", 279 | "sha256:fb530e4794fc8ea76a4a21ccb67dea33e5e0e60f07fc38a49e821e1eae3b71a0", 280 | "sha256:fe8a9eb875d430d81755472c5ba75e84acc980e4a8f6204d402849234d3017db" 281 | ], 282 | "markers": "python_version >= '3.10'", 283 | "version": "==1.15.2" 284 | }, 285 | "tifffile": { 286 | "hashes": [ 287 | "sha256:10f205b923c04678f744a6d553f6f86c639c9ba6e714f6758d81af0678ba75dc", 288 | "sha256:30fcc4584216937b5993d0568452b6fea8e12e61f9afb1a8e967c07c281faa06" 289 | ], 290 | "markers": "python_version >= '3.10'", 291 | "version": "==2025.3.13" 292 | }, 293 | "tomli": { 294 | "hashes": [ 295 | "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", 296 | "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", 297 | "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", 298 | "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", 299 | "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", 300 | "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", 301 | "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", 302 | "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", 303 | "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", 304 | "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", 305 | "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", 306 | "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", 307 | "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", 308 | "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", 309 | "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", 310 | "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", 311 | "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", 312 | "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", 313 | "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", 314 | "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", 315 | "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", 316 | "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", 317 | "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", 318 | "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", 319 | "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", 320 | "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", 321 | "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", 322 | "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", 323 | "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", 324 | "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", 325 | "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", 326 | "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7" 327 | ], 328 | "markers": "python_version >= '3.8'", 329 | "version": "==2.2.1" 330 | }, 331 | "tqdm": { 332 | "hashes": [ 333 | "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", 334 | "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2" 335 | ], 336 | "index": "pypi", 337 | "markers": "python_version >= '3.7'", 338 | "version": "==4.67.1" 339 | } 340 | }, 341 | "develop": { 342 | "appnope": { 343 | "hashes": [ 344 | "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", 345 | "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c" 346 | ], 347 | "markers": "python_version >= '3.6'", 348 | "version": "==0.1.4" 349 | }, 350 | "asttokens": { 351 | "hashes": [ 352 | "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", 353 | "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2" 354 | ], 355 | "markers": "python_version >= '3.8'", 356 | "version": "==3.0.0" 357 | }, 358 | "comm": { 359 | "hashes": [ 360 | "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", 361 | "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3" 362 | ], 363 | "markers": "python_version >= '3.8'", 364 | "version": "==0.2.2" 365 | }, 366 | "debugpy": { 367 | "hashes": [ 368 | "sha256:06859f68e817966723ffe046b896b1bd75c665996a77313370336ee9e1de3e90", 369 | "sha256:2b8de94c5c78aa0d0ed79023eb27c7c56a64c68217d881bee2ffbcb13951d0c1", 370 | "sha256:31abc9618be4edad0b3e3a85277bc9ab51a2d9f708ead0d99ffb5bb750e18503", 371 | "sha256:32b6857f8263a969ce2ca098f228e5cc0604d277447ec05911a8c46cf3e7e307", 372 | "sha256:3872ce5453b17837ef47fb9f3edc25085ff998ce63543f45ba7af41e7f7d370f", 373 | "sha256:3dae443739c6b604802da9f3e09b0f45ddf1cf23c99161f3a1a8039f61a8bb89", 374 | "sha256:46abe0b821cad751fc1fb9f860fb2e68d75e2c5d360986d0136cd1db8cad4428", 375 | "sha256:4caca674206e97c85c034c1efab4483f33971d4e02e73081265ecb612af65377", 376 | "sha256:5268ae7fdca75f526d04465931cb0bd24577477ff50e8bb03dab90983f4ebd02", 377 | "sha256:62f9b4a861c256f37e163ada8cf5a81f4c8d5148fc17ee31fb46813bd658cdcc", 378 | "sha256:63ca7670563c320503fea26ac688988d9d6b9c6a12abc8a8cf2e7dd8e5f6b6ea", 379 | "sha256:6fab771639332bd8ceb769aacf454a30d14d7a964f2012bf9c4e04c60f16e85b", 380 | "sha256:79ce4ed40966c4c1631d0131606b055a5a2f8e430e3f7bf8fd3744b09943e8e8", 381 | "sha256:7d9a05efc6973b5aaf076d779cf3a6bbb1199e059a17738a2aa9d27a53bcc888", 382 | "sha256:837e7bef95bdefba426ae38b9a94821ebdc5bea55627879cd48165c90b9e50ce", 383 | "sha256:887d54276cefbe7290a754424b077e41efa405a3e07122d8897de54709dbe522", 384 | "sha256:924464d87e7d905eb0d79fb70846558910e906d9ee309b60c4fe597a2e802590", 385 | "sha256:a0bd87557f97bced5513a74088af0b84982b6ccb2e254b9312e29e8a5c4270eb", 386 | "sha256:acf39a6e98630959763f9669feddee540745dfc45ad28dbc9bd1f9cd60639391", 387 | "sha256:cb56c2db69fb8df3168bc857d7b7d2494fed295dfdbde9a45f27b4b152f37520", 388 | "sha256:d4ba115cdd0e3a70942bd562adba9ec8c651fe69ddde2298a1be296fc331906f", 389 | "sha256:dc7b77f5d32674686a5f06955e4b18c0e41fb5a605f5b33cf225790f114cfeec", 390 | "sha256:ea869fe405880327497e6945c09365922c79d2a1eed4c3ae04d77ac7ae34b2b5", 391 | "sha256:ed93c3155fc1f888ab2b43626182174e457fc31b7781cd1845629303790b8ad1", 392 | "sha256:eee02b2ed52a563126c97bf04194af48f2fe1f68bb522a312b05935798e922ff", 393 | "sha256:f14d2c4efa1809da125ca62df41050d9c7cd9cb9e380a2685d1e453c4d450ccb" 394 | ], 395 | "markers": "python_version >= '3.8'", 396 | "version": "==1.8.13" 397 | }, 398 | "decorator": { 399 | "hashes": [ 400 | "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", 401 | "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a" 402 | ], 403 | "markers": "python_version >= '3.8'", 404 | "version": "==5.2.1" 405 | }, 406 | "exceptiongroup": { 407 | "hashes": [ 408 | "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", 409 | "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" 410 | ], 411 | "markers": "python_version >= '3.7'", 412 | "version": "==1.2.2" 413 | }, 414 | "executing": { 415 | "hashes": [ 416 | "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", 417 | "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755" 418 | ], 419 | "markers": "python_version >= '3.8'", 420 | "version": "==2.2.0" 421 | }, 422 | "ipykernel": { 423 | "hashes": [ 424 | "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", 425 | "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215" 426 | ], 427 | "index": "pypi", 428 | "markers": "python_version >= '3.8'", 429 | "version": "==6.29.5" 430 | }, 431 | "ipython": { 432 | "hashes": [ 433 | "sha256:0419883fa46e0baa182c5d50ebb8d6b49df1889fdb70750ad6d8cfe678eda6e3", 434 | "sha256:c31d658e754673ecc6514583e7dda8069e47136eb62458816b7d1e6625948b5a" 435 | ], 436 | "markers": "python_version >= '3.10'", 437 | "version": "==8.34.0" 438 | }, 439 | "jedi": { 440 | "hashes": [ 441 | "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", 442 | "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9" 443 | ], 444 | "markers": "python_version >= '3.6'", 445 | "version": "==0.19.2" 446 | }, 447 | "jupyter-client": { 448 | "hashes": [ 449 | "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", 450 | "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f" 451 | ], 452 | "markers": "python_version >= '3.8'", 453 | "version": "==8.6.3" 454 | }, 455 | "jupyter-core": { 456 | "hashes": [ 457 | "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", 458 | "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9" 459 | ], 460 | "markers": "python_version >= '3.8'", 461 | "version": "==5.7.2" 462 | }, 463 | "matplotlib-inline": { 464 | "hashes": [ 465 | "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", 466 | "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca" 467 | ], 468 | "markers": "python_version >= '3.8'", 469 | "version": "==0.1.7" 470 | }, 471 | "nest-asyncio": { 472 | "hashes": [ 473 | "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", 474 | "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c" 475 | ], 476 | "markers": "python_version >= '3.5'", 477 | "version": "==1.6.0" 478 | }, 479 | "packaging": { 480 | "hashes": [ 481 | "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", 482 | "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" 483 | ], 484 | "markers": "python_version >= '3.8'", 485 | "version": "==24.2" 486 | }, 487 | "parso": { 488 | "hashes": [ 489 | "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", 490 | "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d" 491 | ], 492 | "markers": "python_version >= '3.6'", 493 | "version": "==0.8.4" 494 | }, 495 | "pexpect": { 496 | "hashes": [ 497 | "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", 498 | "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f" 499 | ], 500 | "markers": "sys_platform != 'win32' and sys_platform != 'emscripten'", 501 | "version": "==4.9.0" 502 | }, 503 | "platformdirs": { 504 | "hashes": [ 505 | "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94", 506 | "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351" 507 | ], 508 | "markers": "python_version >= '3.9'", 509 | "version": "==4.3.7" 510 | }, 511 | "prompt-toolkit": { 512 | "hashes": [ 513 | "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab", 514 | "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198" 515 | ], 516 | "markers": "python_full_version >= '3.8.0'", 517 | "version": "==3.0.50" 518 | }, 519 | "psutil": { 520 | "hashes": [ 521 | "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", 522 | "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e", 523 | "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", 524 | "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", 525 | "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", 526 | "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", 527 | "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", 528 | "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17", 529 | "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", 530 | "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99" 531 | ], 532 | "markers": "python_version >= '3.6'", 533 | "version": "==7.0.0" 534 | }, 535 | "ptyprocess": { 536 | "hashes": [ 537 | "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", 538 | "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" 539 | ], 540 | "version": "==0.7.0" 541 | }, 542 | "pure-eval": { 543 | "hashes": [ 544 | "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", 545 | "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42" 546 | ], 547 | "version": "==0.2.3" 548 | }, 549 | "pygments": { 550 | "hashes": [ 551 | "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", 552 | "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c" 553 | ], 554 | "markers": "python_version >= '3.8'", 555 | "version": "==2.19.1" 556 | }, 557 | "python-dateutil": { 558 | "hashes": [ 559 | "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", 560 | "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" 561 | ], 562 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", 563 | "version": "==2.9.0.post0" 564 | }, 565 | "pyzmq": { 566 | "hashes": [ 567 | "sha256:009a38241c76184cb004c869e82a99f0aee32eda412c1eb44df5820324a01d25", 568 | "sha256:016d89bee8c7d566fad75516b4e53ec7c81018c062d4c51cd061badf9539be52", 569 | "sha256:0240289e33e3fbae44a5db73e54e955399179332a6b1d47c764a4983ec1524c3", 570 | "sha256:03caa2ffd64252122139d50ec92987f89616b9b92c9ba72920b40e92709d5e26", 571 | "sha256:04bfe59852d76d56736bfd10ac1d49d421ab8ed11030b4a0332900691507f557", 572 | "sha256:0b42403ad7d1194dca9574cd3c56691c345f4601fa2d0a33434f35142baec7ac", 573 | "sha256:0f3dfb68cf7bf4cfdf34283a75848e077c5defa4907506327282afe92780084d", 574 | "sha256:14e0b81753424bd374075df6cc30b87f2c99e5f022501d97eff66544ca578941", 575 | "sha256:1586944f4736515af5c6d3a5b150c7e8ca2a2d6e46b23057320584d6f2438f4a", 576 | "sha256:1595533de3a80bf8363372c20bafa963ec4bf9f2b8f539b1d9a5017f430b84c9", 577 | "sha256:1614fcd116275d24f2346ffca4047a741c546ad9d561cbf7813f11226ca4ed2c", 578 | "sha256:18183cc3851b995fdc7e5f03d03b8a4e1b12b0f79dff1ec1da75069af6357a05", 579 | "sha256:1fc81d5d60c9d40e692de14b8d884d43cf67562402b931681f0ccb3ce6b19875", 580 | "sha256:1fe05bd0d633a0f672bb28cb8b4743358d196792e1caf04973b7898a0d70b046", 581 | "sha256:209d09f0ab6ddbcebe64630d1e6ca940687e736f443c265ae15bc4bfad833597", 582 | "sha256:21399b31753bf321043ea60c360ed5052cc7be20739785b1dff1820f819e35b3", 583 | "sha256:214038aaa88e801e54c2ef0cfdb2e6df27eb05f67b477380a452b595c5ecfa37", 584 | "sha256:21c6ddb98557a77cfe3366af0c5600fb222a1b2de5f90d9cd052b324e0c295e8", 585 | "sha256:240b1634b9e530ef6a277d95cbca1a6922f44dfddc5f0a3cd6c722a8de867f14", 586 | "sha256:2833602d9d42c94b9d0d2a44d2b382d3d3a4485be018ba19dddc401a464c617a", 587 | "sha256:2aa1a9f236d5b835fb8642f27de95f9edcfd276c4bc1b6ffc84f27c6fb2e2981", 588 | "sha256:2cf9ab0dff4dbaa2e893eb608373c97eb908e53b7d9793ad00ccbd082c0ee12f", 589 | "sha256:2d3459b6311463c96abcb97808ee0a1abb0d932833edb6aa81c30d622fd4a12d", 590 | "sha256:2dc46ec09f5d36f606ac8393303149e69d17121beee13c8dac25e2a2078e31c4", 591 | "sha256:2eaed0d911fb3280981d5495978152fab6afd9fe217fd16f411523665089cef1", 592 | "sha256:356ec0e39c5a9cda872b65aca1fd8a5d296ffdadf8e2442b70ff32e73ef597b1", 593 | "sha256:3ec332675f6a138db57aad93ae6387953763f85419bdbd18e914cb279ee1c451", 594 | "sha256:3fc9e71490d989144981ea21ef4fdfaa7b6aa84aff9632d91c736441ce2f6b00", 595 | "sha256:49334faa749d55b77f084389a80654bf2e68ab5191c0235066f0140c1b670d64", 596 | "sha256:4b127cfe10b4c56e4285b69fd4b38ea1d368099ea4273d8fb349163fce3cd598", 597 | "sha256:4c22a12713707467abedc6d75529dd365180c4c2a1511268972c6e1d472bd63e", 598 | "sha256:4e2cafe7e9c7fed690e8ecf65af119f9c482923b5075a78f6f7629c63e1b4b1d", 599 | "sha256:52b064fafef772d0f5dbf52d4c39f092be7bc62d9a602fe6e82082e001326de3", 600 | "sha256:5e17cc198dc50a25a0f245e6b1e56f692df2acec3ccae82d1f60c34bfb72bbec", 601 | "sha256:6d64e74143587efe7c9522bb74d1448128fdf9897cc9b6d8b9927490922fd558", 602 | "sha256:6e317ee1d4528a03506cb1c282cd9db73660a35b3564096de37de7350e7d87a7", 603 | "sha256:6fb539a6382a048308b409d8c66d79bf636eda1b24f70c78f2a1fd16e92b037b", 604 | "sha256:70b3acb9ad729a53d4e751dace35404a024f188aad406013454216aba5485b4e", 605 | "sha256:73ca9ae9a9011b714cf7650450cd9c8b61a135180b708904f1f0a05004543dce", 606 | "sha256:749d671b0eec8e738bbf0b361168369d8c682b94fcd458c20741dc4d69ef5278", 607 | "sha256:7897b8c8bbbb2bd8cad887bffcb07aede71ef1e45383bd4d6ac049bf0af312a4", 608 | "sha256:7998b60ef1c105846fb3bfca494769fde3bba6160902e7cd27a8df8257890ee9", 609 | "sha256:7a4ac2ffa34f1212dd586af90f4ba894e424f0cabb3a49cdcff944925640f6ac", 610 | "sha256:8ab78dc21c7b1e13053086bcf0b4246440b43b5409904b73bfd1156654ece8a1", 611 | "sha256:8c088e009a6d6b9f563336adb906e3a8d3fd64db129acc8d8fd0e9fe22b2dac8", 612 | "sha256:9176856f36c34a8aa5c0b35ddf52a5d5cd8abeece57c2cd904cfddae3fd9acd3", 613 | "sha256:91dead2daca698ae52ce70ee2adbb94ddd9b5f96877565fd40aa4efd18ecc6a3", 614 | "sha256:92a30840f4f2a31f7049d0a7de5fc69dd03b19bd5d8e7fed8d0bde49ce49b589", 615 | "sha256:943a22ebb3daacb45f76a9bcca9a7b74e7d94608c0c0505da30af900b998ca8d", 616 | "sha256:952085a09ff32115794629ba47f8940896d7842afdef1283332109d38222479d", 617 | "sha256:96c0006a8d1d00e46cb44c8e8d7316d4a232f3d8f2ed43179d4578dbcb0829b6", 618 | "sha256:979486d444ca3c469cd1c7f6a619ce48ff08b3b595d451937db543754bfacb65", 619 | "sha256:9b0137a1c40da3b7989839f9b78a44de642cdd1ce20dcef341de174c8d04aa53", 620 | "sha256:a995404bd3982c089e57b428c74edd5bfc3b0616b3dbcd6a8e270f1ee2110f36", 621 | "sha256:aa7efc695d1fc9f72d91bf9b6c6fe2d7e1b4193836ec530a98faf7d7a7577a58", 622 | "sha256:ad03f4252d9041b0635c37528dfa3f44b39f46024ae28c8567f7423676ee409b", 623 | "sha256:b2db7c82f08b8ce44c0b9d1153ce63907491972a7581e8b6adea71817f119df8", 624 | "sha256:b2eddfbbfb473a62c3a251bb737a6d58d91907f6e1d95791431ebe556f47d916", 625 | "sha256:b380e9087078ba91e45fb18cdd0c25275ffaa045cf63c947be0ddae6186bc9d9", 626 | "sha256:b4fc9903a73c25be9d5fe45c87faababcf3879445efa16140146b08fccfac017", 627 | "sha256:b72206eb041f780451c61e1e89dbc3705f3d66aaaa14ee320d4f55864b13358a", 628 | "sha256:b7b578d604e79e99aa39495becea013fd043fa9f36e4b490efa951f3d847a24d", 629 | "sha256:ba698c7c252af83b6bba9775035263f0df5f807f0404019916d4b71af8161f66", 630 | "sha256:bad7fe0372e505442482ca3ccbc0d6f38dae81b1650f57a0aa6bbee18e7df495", 631 | "sha256:bbef99eb8d18ba9a40f00e8836b8040cdcf0f2fa649684cf7a66339599919d21", 632 | "sha256:bd84441e4021cec6e4dd040550386cd9c9ea1d9418ea1a8002dbb7b576026b2b", 633 | "sha256:be77efd735bb1064605be8dec6e721141c1421ef0b115ef54e493a64e50e9a52", 634 | "sha256:c0b519fa2159c42272f8a244354a0e110d65175647e5185b04008ec00df9f079", 635 | "sha256:c15b69af22030960ac63567e98ad8221cddf5d720d9cf03d85021dfd452324ef", 636 | "sha256:c1bd75d692cd7c6d862a98013bfdf06702783b75cffbf5dae06d718fecefe8f2", 637 | "sha256:c208a977843d18d3bd185f323e4eaa912eb4869cb230947dc6edd8a27a4e558a", 638 | "sha256:c4430c7cba23bb0e2ee203eee7851c1654167d956fc6d4b3a87909ccaf3c5825", 639 | "sha256:c80653332c6136da7f4d4e143975e74ac0fa14f851f716d90583bc19e8945cea", 640 | "sha256:cf6db401f4957afbf372a4730c6d5b2a234393af723983cbf4bcd13d54c71e1a", 641 | "sha256:cf736cc1298ef15280d9fcf7a25c09b05af016656856dc6fe5626fd8912658dd", 642 | "sha256:d015efcd96aca8882057e7e6f06224f79eecd22cad193d3e6a0a91ec67590d1f", 643 | "sha256:d35cc1086f1d4f907df85c6cceb2245cb39a04f69c3f375993363216134d76d4", 644 | "sha256:d7165bcda0dbf203e5ad04d79955d223d84b2263df4db92f525ba370b03a12ab", 645 | "sha256:d8270d104ec7caa0bdac246d31d48d94472033ceab5ba142881704350b28159c", 646 | "sha256:da87e977f92d930a3683e10ba2b38bcc59adfc25896827e0b9d78b208b7757a6", 647 | "sha256:e281a8071a06888575a4eb523c4deeefdcd2f5fe4a2d47e02ac8bf3a5b49f695", 648 | "sha256:e34a63f71d2ecffb3c643909ad2d488251afeb5ef3635602b3448e609611a7ed", 649 | "sha256:eb96568a22fe070590942cd4780950e2172e00fb033a8b76e47692583b1bd97c", 650 | "sha256:eddc2be28a379c218e0d92e4a432805dcb0ca5870156a90b54c03cd9799f9f8a", 651 | "sha256:efba4f53ac7752eea6d8ca38a4ddac579e6e742fba78d1e99c12c95cd2acfc64", 652 | "sha256:f1cd68b8236faab78138a8fc703f7ca0ad431b17a3fcac696358600d4e6243b3", 653 | "sha256:f52eba83272a26b444f4b8fc79f2e2c83f91d706d693836c9f7ccb16e6713c31", 654 | "sha256:f950f17ae608e0786298340163cac25a4c5543ef25362dd5ddb6dcb10b547be9", 655 | "sha256:fa85953df84beb7b8b73cb3ec3f5d92b62687a09a8e71525c6734e020edf56fd", 656 | "sha256:fbf206e5329e20937fa19bd41cf3af06d5967f8f7e86b59d783b26b40ced755c", 657 | "sha256:fd30fc80fe96efb06bea21667c5793bbd65c0dc793187feb39b8f96990680b00", 658 | "sha256:fe67291775ea4c2883764ba467eb389c29c308c56b86c1e19e49c9e1ed0cbeca", 659 | "sha256:fea7efbd7e49af9d7e5ed6c506dfc7de3d1a628790bd3a35fd0e3c904dc7d464" 660 | ], 661 | "markers": "python_version >= '3.8'", 662 | "version": "==26.3.0" 663 | }, 664 | "six": { 665 | "hashes": [ 666 | "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", 667 | "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" 668 | ], 669 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", 670 | "version": "==1.17.0" 671 | }, 672 | "stack-data": { 673 | "hashes": [ 674 | "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", 675 | "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695" 676 | ], 677 | "version": "==0.6.3" 678 | }, 679 | "tornado": { 680 | "hashes": [ 681 | "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803", 682 | "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec", 683 | "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482", 684 | "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634", 685 | "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38", 686 | "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b", 687 | "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c", 688 | "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf", 689 | "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946", 690 | "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73", 691 | "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1" 692 | ], 693 | "markers": "python_version >= '3.8'", 694 | "version": "==6.4.2" 695 | }, 696 | "traitlets": { 697 | "hashes": [ 698 | "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", 699 | "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f" 700 | ], 701 | "markers": "python_version >= '3.8'", 702 | "version": "==5.14.3" 703 | }, 704 | "typing-extensions": { 705 | "hashes": [ 706 | "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", 707 | "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" 708 | ], 709 | "markers": "python_version >= '3.8'", 710 | "version": "==4.12.2" 711 | }, 712 | "wcwidth": { 713 | "hashes": [ 714 | "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", 715 | "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" 716 | ], 717 | "version": "==0.2.13" 718 | } 719 | } 720 | } 721 | -------------------------------------------------------------------------------- /PyPi.md: -------------------------------------------------------------------------------- 1 | # Split Raster 2 | 3 | Provide good support for deep learning and computer vision tasks by creating a tiled output from an input raster dataset. 4 | 5 | Find more details: 6 | 7 | - Document: [split_raster](https://cuicaihao.github.io/split_raster/) 8 | - Pypi: [splitraster](https://pypi.org/project/splitraster/) 9 | 10 | 11 | ## Use the packages 12 | 13 | ```bash 14 | pip install splitraster 15 | ``` 16 | 17 | ## Try Sample code 18 | 19 | The sample image can be found in the GitHub repo. 20 | 21 | ```python 22 | 23 | from splitraster import io 24 | 25 | input_image_path = "./data/raw/RGB.png" 26 | gt_image_path = "./data/raw/GT.png" 27 | 28 | save_path = "../data/processed/RGB" 29 | crop_size = 256 30 | repetition_rate = 0.5 31 | overwrite = False 32 | 33 | n = io.split_image(input_image_path, save_path, crop_size, 34 | repetition_rate=repetition_rate, overwrite=overwrite) 35 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 36 | 37 | save_path_gt = "./data/processed/GT" 38 | n = io.split_image(gt_image_path, save_path_gt, crop_size, 39 | repetition_rate=repetition_rate, overwrite=overwrite) 40 | print(f"{n} tiles sample of {gt_image_path} are added at {save_path_gt}") 41 | 42 | 43 | ``` 44 | 45 | Possible results: 46 | 47 | ```bash 48 | Successfully installed splitraster-0.1.0 49 | ❯ python test.py 50 | Input Image File Shape (H, W, D):(1000, 1000, 3) 51 | crop_size=256, stride=128 52 | Padding Image File Shape (H, W, D):(1024, 1024, 3) 53 | There are 49 files in the ./data/processed/RGB 54 | New image name will start with 50 55 | Generating: 100%|█████████████████████████████████████████████████████████████| 49/49 [00:00<00:00, 50.65img/s] 56 | 49 tiles sample of ./data/raw/RGB.png are added at ./data/processed/RGB 57 | Input Image File Shape (H, W, D):(1000, 1000) 58 | crop_size=256, stride=128 59 | Padding Image File Shape (H, W, D):(1024, 1024) 60 | There are 49 files in the ./data/processed/GT 61 | New image name will start with 50 62 | Generating: 100%|████████████████████████████████████████████████████████████| 49/49 [00:00<00:00, 139.72img/s] 63 | 49 tiles sample of ./data/raw/GT.png are added at ./data/processed/GT 64 | ``` 65 | 66 | You can also work with Remote Sensing (GeoTIFF) Satellite images such as Multispectral Images which have more bands or channels. All the codes will be the same, but with a small difference. Replace the `io` with the `geo` module. 67 | 68 | This feature also needs you to install the `gdal` package with the following command in your python environment. 69 | This package is not in the required packages due to many users may not use this function. 70 | 71 | ```bash 72 | conda install -c conda-forge gdal 73 | ``` 74 | 75 | Sample Code: 76 | 77 | ```Python 78 | from splitraster import geo 79 | input_image_path = "./data/raw/Input.tif" 80 | gt_image_path = "./data/raw/GT.tif" 81 | 82 | save_path = "../data/processed/Input" 83 | crop_size = 256 84 | repetition_rate = 0.5 85 | overwrite = False 86 | 87 | n = geo.split_image(input_image_path, save_path, crop_size, 88 | repetition_rate=repetition_rate, overwrite=overwrite) 89 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 90 | ``` 91 | 92 | ## Random Sampling Code 93 | 94 | The basic implementation is still the same as the above. Just replace the 'split_image' method to 'rand_crop_image'. 95 | 96 | ```python 97 | from splitraster import io 98 | input_image_path = "./data/raw/RGB.png" 99 | gt_image_path = "./data/raw/GT.png" 100 | 101 | input_save_path = "./data/processed/Rand/RGB" 102 | gt_save_path = "./data/processed/Rand/GT" 103 | 104 | n = io.random_crop_image(input_image_path, input_save_path, gt_image_path, gt_save_path, crop_size=256, crop_number=20, img_ext='.png', label_ext='.png', overwrite=True) 105 | 106 | print(f"{n} sample paris of {input_image_path, gt_image_path} are added at {input_save_path, gt_save_path}.") 107 | 108 | ``` 109 | 110 | ```python 111 | from splitraster import geo 112 | input_tif_image_path = "./data/raw/TIF/RGB5k.tif" 113 | gt_tif_image_path = "./data/raw/TIF/GT5k.tif" 114 | 115 | input_save_image_path = "./data/processed/Rand/RGB_TIF" 116 | gt_save_image_path = "./data/processed/Rand/GT_TIF" 117 | 118 | n = geo.random_crop_image(input_tif_image_path, input_save_image_path, gt_tif_image_path, gt_save_image_path, crop_size=500, crop_number=20, overwrite=True) 119 | 120 | print(f"{n} sample paris of {input_tif_image_path, gt_tif_image_path} are added at {input_save_image_path, gt_save_image_path}.") 121 | 122 | ``` 123 | 124 | Future Update: 125 | 126 | - [x] Add Random Sampling feature. 127 | - [ ] Create a GUI with Qt and generate an executable file 128 | - [ ] Add Sample Balancing feature. 129 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Split Raster 2 | 3 | [![image](https://img.shields.io/pypi/pyversions/splitraster)](https://python.org/pypi/splitraster) 4 | [![image](https://img.shields.io/pypi/v/splitraster?color=g)](https://python.org/pypi/splitraster) 5 | [![SplitRaster Developmnet Build Install and Test](https://github.com/cuicaihao/split_raster/actions/workflows/python-CI.yml/badge.svg?branch=develop)](https://github.com/cuicaihao/split_raster/actions/workflows/python-CI.yml) 6 | [![SplitRaster Package Release and Publish](https://github.com/cuicaihao/split_raster/actions/workflows/python-CD.yml/badge.svg)](https://github.com/cuicaihao/split_raster/actions/workflows/python-CD.yml) 7 | [![DMonth](https://pepy.tech/badge/splitraster/month)](https://pepy.tech/project/splitraster) 8 | [![DTotal](https://pepy.tech/badge/splitraster?)](https://pepy.tech/project/splitraster) 9 | [![image](https://img.shields.io/github/license/cuicaihao/split_raster?color=blue)](https://python.org/pypi/splitraster) 10 | 11 | 12 | - Document: [splitraster](https://cuicaihao.github.io/split_raster/) 13 | - Pypi: [splitraster](https://pypi.org/project/splitraster/) 14 | 15 | 16 | ## Introduction 17 | 18 | Split Raster is an open-source and highly versatile Python package designed to easily break down large images into smaller, more manageable tiles. While the package is particularly useful for deep learning and computer vision tasks, it can be applied to a wide range of applications. 19 | 20 | Initially developed by the authoer to provide optimal support for deep learning and computer vision tasks, Split Raster was specifically designed for image segmentation tasks on satellite images as well as remote sensing methods. By generating tiled output image samples from an input raster dataset, Split Raster enables more efficient and effective analysis of large images. The package also includes a random sampling function that generates a fixed number of tiles for early experimentation. 21 | 22 | For example, let's say you have a set of RGB and GT images, each with dimensions of 1000-by-1000 pixels. Utilizing Split Raster, you can easily generate 16 tiles, each with dimensions of 256x256 pixels and automatic padding on the edges. The package also allows for customization of tile size and overlap to better suit individual project needs. Furthermore, Split Raster handles the padding and naming of output images automatically, saving time and effort (e.g., 0001.png, 0002.png, ..., 9999.png). 23 | 24 | ![Sample Image](docs/img/split_raster_sample.png) 25 | 26 | ## Tutorial to generate the above sample image 27 | 28 | Open the notebook [SplitRaster Tutorial](notebooks/Tutorial.ipynb). 29 | 30 | This tutorial will show you how to use the package to split a large image into small tiles. So you can use the small tiles for your deep learning and computer vision tasks. 31 | 32 | [`pytorch`](https://pytorch.org/) is used in this tutorial. You can also use `tensorflow` or other deep learning frameworks. 33 | 34 | ## Install the packages 35 | 36 | ```bash 37 | pip install splitraster 38 | ``` 39 | 40 | ## Try Sample code 41 | 42 | The sample image can be found in the GitHub repo. 43 | 44 | ```python 45 | 46 | from splitraster import io 47 | 48 | input_image_path = "./data/raw/RGB.png" 49 | gt_image_path = "./data/raw/GT.png" 50 | 51 | save_path = "../data/processed/RGB" 52 | save_path_gt = "./data/processed/GT" 53 | 54 | crop_size = 256 55 | repetition_rate = 0.5 56 | overwrite = False 57 | 58 | n = io.split_image(input_image_path, save_path, crop_size, 59 | repetition_rate=repetition_rate, overwrite=overwrite) 60 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 61 | 62 | 63 | n = io.split_image(gt_image_path, save_path_gt, crop_size, 64 | repetition_rate=repetition_rate, overwrite=overwrite) 65 | print(f"{n} tiles sample of {gt_image_path} are added at {save_path_gt}") 66 | 67 | 68 | ``` 69 | 70 | Possible results: 71 | 72 | ```bash 73 | Successfully installed splitraster-0.*.* 74 | ❯ python test.py 75 | Input Image File Shape (H, W, D):(1000, 1000, 3) 76 | crop_size=256, stride=128 77 | Padding Image File Shape (H, W, D):(1024, 1024, 3) 78 | There are 49 files in the ./data/processed/RGB 79 | New image name will start with 50 80 | Generating: 100%|█████████████████████████████████████████████████████████████| 49/49 [00:00<00:00, 50.65img/s] 81 | 49 tiles sample of ./data/raw/RGB.png are added at ./data/processed/RGB 82 | Input Image File Shape (H, W, D):(1000, 1000) 83 | crop_size=256, stride=128 84 | Padding Image File Shape (H, W, D):(1024, 1024) 85 | There are 49 files in the ./data/processed/GT 86 | New image name will start with 50 87 | Generating: 100%|████████████████████████████████████████████████████████████| 49/49 [00:00<00:00, 139.72img/s] 88 | 49 tiles sample of ./data/raw/GT.png are added at ./data/processed/GT 89 | ``` 90 | 91 | Check Notebook for Details: [Tutorial](notebooks/Tutorial.ipynb) 92 | 93 | 94 | ## GIS TIFF Image 95 | 96 | You can also work with Remote Sensing (GeoTIFF) Satellite images such as Multispectral Images which have more bands or channels. All the codes will be the same, but with a small difference. Replace the `io` with the `geo` module. 97 | 98 | This feature also needs you to install the `gdal` package with the following command in your python environment. 99 | 100 | This package is not in the required packages due to many users may not use this function. 101 | 102 | However, if you do, please consider create the conda environment as follows for your application. 103 | 104 | ```bash 105 | conda create -n split_raster_py310 python=3.10 -y 106 | conda activate split_raster_py310 107 | conda install gdal -y 108 | conda install ipykernel -y 109 | pip install --upgrade pip 110 | pip install splitraster 111 | ``` 112 | 113 | On a Mac, you can install these using Homebrew: 114 | ```bash 115 | brew install gdal 116 | ``` 117 | then, you can install the Python GDAL package: 118 | 119 | ```bash 120 | pip install GDAl 121 | ``` 122 | Please note that installing GDAL can be complex due to its system dependencies. If you encounter issues, you may need to consult the GDAL documentation or seek help from the community. 123 | 124 | 125 | Sample Code: 126 | 127 | ```Python 128 | from splitraster import geo 129 | input_image_path = "./data/raw/Input.tif" 130 | gt_image_path = "./data/raw/GT.tif" 131 | 132 | save_path = "../data/processed/Input" 133 | crop_size = 256 134 | repetition_rate = 0.5 135 | overwrite = False 136 | 137 | n = geo.split_image(input_image_path, save_path, crop_size, 138 | repetition_rate=repetition_rate, overwrite=overwrite) 139 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 140 | ``` 141 | 142 | Check Notebook for Details: [Tutorial_II](notebooks/Tutorial_II.ipynb) 143 | 144 | 145 | ## Random Sampling Code 146 | 147 | The basic implementation is still the same as the above. Just replace the 'split_image' method to 'rand_crop_image'. 148 | 149 | ```python 150 | from splitraster import io 151 | input_image_path = "./data/raw/RGB.png" 152 | gt_image_path = "./data/raw/GT.png" 153 | 154 | input_save_path = "./data/processed/Rand/RGB" 155 | gt_save_path = "./data/processed/Rand/GT" 156 | 157 | n = io.random_crop_image(input_image_path, input_save_path, gt_image_path, gt_save_path, crop_size=256, crop_number=20, img_ext='.png', label_ext='.png', overwrite=True) 158 | 159 | print(f"{n} sample paris of {input_image_path, gt_image_path} are added at {input_save_path, gt_save_path}.") 160 | 161 | ``` 162 | 163 | ```python 164 | from splitraster import geo 165 | input_tif_image_path = "./data/raw/TIF/RGB5k.tif" 166 | gt_tif_image_path = "./data/raw/TIF/GT5k.tif" 167 | 168 | input_save_image_path = "./data/processed/Rand/RGB_TIF" 169 | gt_save_image_path = "./data/processed/Rand/GT_TIF" 170 | 171 | n = geo.random_crop_image(input_tif_image_path, input_save_image_path, gt_tif_image_path, gt_save_image_path, crop_size=500, crop_number=20, overwrite=True) 172 | 173 | print(f"{n} sample paris of {input_tif_image_path, gt_tif_image_path} are added at {input_save_image_path, gt_save_image_path}.") 174 | 175 | ``` 176 | 177 | ## Contribution Guidelines 178 | 179 | If you run into issues or have questions, please [open an issue](https://github.com/cuicaihao/split_raster/issues) or [submit a pull request](https://github.com/cuicaihao/split_raster/pulls). 180 | 181 | If you are interested in contributing to `splitraster`, please see our [contributing guidelines](../CONTRIBUTING.md). 182 | 183 |

Project based on the cookiecutter data science project template. #cookiecutterdatascience

184 | -------------------------------------------------------------------------------- /data/external/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/external/.gitkeep -------------------------------------------------------------------------------- /data/interim/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/interim/.gitkeep -------------------------------------------------------------------------------- /data/raw/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/raw/.gitkeep -------------------------------------------------------------------------------- /data/raw/GT.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/raw/GT.png -------------------------------------------------------------------------------- /data/raw/RGB.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/raw/RGB.png -------------------------------------------------------------------------------- /data/raw/TIF/GT5k.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/raw/TIF/GT5k.tif -------------------------------------------------------------------------------- /data/raw/TIF/RGB5k.tif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/data/raw/TIF/RGB5k.tif -------------------------------------------------------------------------------- /data/raw/readme.txt: -------------------------------------------------------------------------------- 1 | Sample Image from AerialImageData 2 | 3 | kitsap11.tif -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/.nojekyll -------------------------------------------------------------------------------- /docs/about.md: -------------------------------------------------------------------------------- 1 | # About this Package 2 | 3 | Split Raster is a python package to split a large image into small tiles. It is useful for deep learning and computer vision tasks. The package can also be used to split a large image into small tiles for other applications. 4 | 5 | The initial version of the package is developed by Chris to provide good support for deep learning and computer vision tasks on Satelite Images by creating tiled output image samples from an input raster dataset. 6 | 7 | Visit [Author's Blog](https://cuicaihao.com) for more information. 8 | 9 | Related Projects: 10 | 11 | - [Aerial Image Segmentation with Deep Learning on PyTorch](https://cuicaihao.com/2021/08/12/aerial-image-segmentation-with-deep-learning-on-pytorch/) 12 | 13 | 14 | - [Roads from Above: Augmenting Civil Engineering & Geospatial Workflows with Machine Learning](https://cuicaihao.com/2018/10/21/roads-from-above/) 15 | 16 | 17 | -------------------------------------------------------------------------------- /docs/gis.md: -------------------------------------------------------------------------------- 1 | 2 | # Split Raster Images for Remote Sensing (GeoTIFF) and GIS 3 | 4 | If you are working with Remote Sensing images, you can use this package to split the images into small tiles. 5 | 6 | You can also work with Remote Sensing (GeoTIFF) Satellite images such as Multispectral Images which have more bands or channels. All the codes will be the same, but with a small difference. Replace the `io` with the `geo` module. 7 | 8 | This feature also needs you to install the [`gdal` package](https://gdal.org/) with the following command in your python environment. 9 | 10 | ![gdal](img/gdalicon.png) 11 | 12 | This package is not in the required packages due to many users may not use this function. 13 | 14 | ```bash 15 | conda install -c conda-forge gdal 16 | ``` 17 | 18 | ## Try Sample code 19 | 20 | Sample Code: 21 | 22 | ```Python 23 | from splitraster import geo 24 | input_image_path = "./data/raw/Input.tif" 25 | gt_image_path = "./data/raw/GT.tif" 26 | 27 | save_path = "../data/processed/Input" 28 | crop_size = 256 29 | repetition_rate = 0.5 30 | overwrite = False 31 | 32 | n = geo.split_image(input_image_path, save_path, crop_size, 33 | repetition_rate=repetition_rate, overwrite=overwrite) 34 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 35 | ``` 36 | 37 | ## Random Sampling Code 38 | ```python 39 | from splitraster import geo 40 | input_tif_image_path = "./data/raw/TIF/RGB5k.tif" 41 | gt_tif_image_path = "./data/raw/TIF/GT5k.tif" 42 | 43 | input_save_image_path = "./data/processed/Rand/RGB_TIF" 44 | gt_save_image_path = "./data/processed/Rand/GT_TIF" 45 | 46 | n = geo.random_crop_image(input_tif_image_path, input_save_image_path, gt_tif_image_path, gt_save_image_path, crop_size=500, crop_number=20, overwrite=True) 47 | 48 | print(f"{n} sample paris of {input_tif_image_path, gt_tif_image_path} are added at {input_save_image_path, gt_save_image_path}.") 49 | 50 | ``` -------------------------------------------------------------------------------- /docs/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/img/favicon.ico -------------------------------------------------------------------------------- /docs/img/gdalicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/img/gdalicon.png -------------------------------------------------------------------------------- /docs/img/output-grid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/img/output-grid.png -------------------------------------------------------------------------------- /docs/img/output_img_gt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/img/output_img_gt.png -------------------------------------------------------------------------------- /docs/img/split_raster_sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/docs/img/split_raster_sample.png -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Split Raster 2 | 3 | [![image](https://img.shields.io/pypi/pyversions/splitraster)](https://python.org/pypi/splitraster) 4 | [![image](https://img.shields.io/pypi/v/splitraster?color=g)](https://python.org/pypi/splitraster) 5 | [![SplitRaster Developmnet Build Install and Test](https://github.com/cuicaihao/split_raster/actions/workflows/python-CI.yml/badge.svg?branch=develop)](https://github.com/cuicaihao/split_raster/actions/workflows/python-CI.yml) 6 | [![SplitRaster Package Release and Publish](https://github.com/cuicaihao/split_raster/actions/workflows/python-CD.yml/badge.svg)](https://github.com/cuicaihao/split_raster/actions/workflows/python-CD.yml) 7 | [![DMonth](https://pepy.tech/badge/splitraster/month)](https://pepy.tech/project/splitraster) 8 | [![DTotal](https://pepy.tech/badge/splitraster?)](https://pepy.tech/project/splitraster) 9 | [![image](https://img.shields.io/github/license/cuicaihao/split_raster?color=blue)](https://python.org/pypi/splitraster) 10 | 11 | 12 | ## Quick Links 13 | 14 | - PyPI: [split-raster](https://pypi.org/project/splitraster/) 15 | - GitHub: [split-raster](https://github.com/cuicaihao/split_raster) 16 | 17 | ## Introduction 18 | 19 | ![Sample Image](img/split_raster_sample.png) 20 | 21 | Split Raster is an open-source and highly versatile Python package designed to easily break down large images into smaller, more manageable tiles. While the package is particularly useful for deep learning and computer vision tasks, it can be applied to a wide range of applications. 22 | 23 | Initially developed by [Caihao Cui](https://github.com/cuicaihao) to provide optimal support for deep learning and computer vision tasks, Split Raster was specifically designed for image segmentation tasks on satellite images as well as remote sensing methods. By generating tiled output image samples from an input raster dataset, Split Raster enables more efficient and effective analysis of large images. The package also includes a random sampling function that generates a fixed number of tiles for early experimentation. 24 | 25 | For example, let's say you have a set of RGB and GT images, each with dimensions of 1000-by-1000 pixels. Utilizing `Split Raster`, you can easily generate 16 tiles, each with dimensions of 256x256 pixels and automatic padding on the edges. The package also allows for customization of tile size and overlap to better suit individual project needs. Furthermore, Split Raster handles the padding and naming of output images automatically, saving time and effort (e.g., 0001.png, 0002.png, ..., 9999.png). 26 | 27 | ## Install the packages 28 | 29 | ```bash 30 | pip install splitraster 31 | ``` 32 | 33 | ## Try Sample code 34 | 35 | The sample image can be found in the GitHub repo. 36 | 37 | ```python 38 | 39 | from splitraster import io 40 | 41 | input_image_path = "./data/raw/RGB.png" 42 | gt_image_path = "./data/raw/GT.png" 43 | 44 | save_path = "./data/processed/RGB" 45 | save_path_gt = "./data/processed/GT" 46 | 47 | crop_size = 256 # 256x256 pixels of the output tiles 48 | repetition_rate = 0.0 # 0.0 means no overlap 49 | overwrite = False # if True, overwrite the existing files in the save_path 50 | 51 | n = io.split_image(input_image_path, save_path, crop_size, 52 | repetition_rate=repetition_rate, overwrite=overwrite) 53 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 54 | 55 | 56 | n = io.split_image(gt_image_path, save_path_gt, crop_size, 57 | repetition_rate=repetition_rate, overwrite=overwrite) 58 | print(f"{n} tiles sample of {gt_image_path} are added at {save_path_gt}") 59 | 60 | 61 | ``` 62 | 63 | Possible results. Note that if you did not set the overwrite parameter to True, the existing files will not be overwritten. The name of the new files will start with the next number of the existing files. 64 | 65 | ```bash 66 | Input Image File Shape (H, W, D):(1000, 1000, 3) 67 | crop_size = 256, stride = 256 68 | Padding Image File Shape (H, W, D):(1024, 1024, 3) 69 | Generating: 100%|██████████| 16/16 [00:00<00:00, 27.63img/s] 70 | 16 tiles sample of ../data/raw/RGB.png are added at ../data/processed/RGB 71 | Input Image File Shape (H, W, D):(1000, 1000) 72 | crop_size = 256, stride = 256 73 | Padding Image File Shape (H, W, D):(1024, 1024) 74 | Generating: 100%|██████████| 16/16 [00:00<00:00, 48.39img/s]16 tiles sample of ../data/raw/GT.png are added at ../data/processed/GT 75 | ``` 76 | 77 | 78 | ## Random Sampling Code 79 | 80 | The basic implementation is still the same as the above. Just replace the 'split_image' method to 'rand_crop_image'. 81 | 82 | ```python 83 | from splitraster import io 84 | input_image_path = "./data/raw/RGB.png" 85 | gt_image_path = "./data/raw/GT.png" 86 | 87 | input_save_path = "./data/processed/Rand/RGB" 88 | gt_save_path = "./data/processed/Rand/GT" 89 | 90 | n = io.random_crop_image(input_image_path, input_save_path, gt_image_path, gt_save_path, crop_size=256, crop_number=20, img_ext='.png', label_ext='.png', overwrite=True) 91 | 92 | print(f"{n} sample paris of {input_image_path, gt_image_path} are added at {input_save_path, gt_save_path}.") 93 | 94 | ``` 95 | Results: 96 | 97 | ```bash 98 | Generating: 100%|██████████| 20/20 [00:01<00:00, 19.27img/s]20 sample paris of ('../data/raw/RGB.png', '../data/raw/GT.png') are added at ('../data/processed/Rand/RGB', '../data/processed/Rand/GT'). 99 | ``` 100 | 101 | 102 | ## Update Log 103 | - 2023-Mar-19 Update github actions and add tutorial for the package. 104 | - 2022-Dec-16 Upgrade the package to support python 3.8, 3.9, 3.10, 3.11 (https://pypi.org/project/splitraster/0.3.3). 105 | - 2022-Jan-16 Fix bugs to make package suitable for python 3.7. Publish new version at(https://pypi.org/project/splitraster/0.3.2/) . 106 | 107 | ## Contribution Guidelines 108 | 109 | If you run into issues or have questions, please [open an issue](https://github.com/cuicaihao/split_raster/issues) or [submit a pull request](https://github.com/cuicaihao/split_raster/pulls). 110 | 111 | If you are interested in contributing to `splitraster`, please see our [contributing guidelines](../CONTRIBUTING.md). 112 | -------------------------------------------------------------------------------- /docs/sphinx/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/split_raster.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/split_raster.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/split_raster" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/split_raster" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /docs/sphinx/commands.rst: -------------------------------------------------------------------------------- 1 | Commands 2 | ======== 3 | 4 | The Makefile contains the central entry points for common tasks related to this project. 5 | 6 | Syncing data to S3 7 | ^^^^^^^^^^^^^^^^^^ 8 | 9 | * `make sync_data_to_s3` will use `aws s3 sync` to recursively sync files in `data/` up to `s3://[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')/data/`. 10 | * `make sync_data_from_s3` will use `aws s3 sync` to recursively sync files from `s3://[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')/data/` to `data/`. 11 | -------------------------------------------------------------------------------- /docs/sphinx/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Split Raster documentation build configuration file, created by 4 | # sphinx-quickstart. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import os 15 | import sys 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | # sys.path.insert(0, os.path.abspath('.')) 21 | 22 | # -- General configuration ----------------------------------------------------- 23 | 24 | # If your documentation needs a minimal Sphinx version, state it here. 25 | # needs_sphinx = '1.0' 26 | 27 | # Add any Sphinx extension module names here, as strings. They can be extensions 28 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 29 | extensions = [] 30 | 31 | # Add any paths that contain templates here, relative to this directory. 32 | templates_path = ["_templates"] 33 | 34 | # The suffix of source filenames. 35 | source_suffix = ".rst" 36 | 37 | # The encoding of source files. 38 | # source_encoding = 'utf-8-sig' 39 | 40 | # The master toctree document. 41 | master_doc = "index" 42 | 43 | # General information about the project. 44 | project = "Split Raster" 45 | 46 | # The version info for the project you're documenting, acts as replacement for 47 | # |version| and |release|, also used in various other places throughout the 48 | # built documents. 49 | # 50 | # The short X.Y version. 51 | version = "0.1" 52 | # The full version, including alpha/beta/rc tags. 53 | release = "0.1" 54 | 55 | # The language for content autogenerated by Sphinx. Refer to documentation 56 | # for a list of supported languages. 57 | # language = None 58 | 59 | # There are two options for replacing |today|: either, you set today to some 60 | # non-false value, then it is used: 61 | # today = '' 62 | # Else, today_fmt is used as the format for a strftime call. 63 | # today_fmt = '%B %d, %Y' 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | exclude_patterns = ["_build"] 68 | 69 | # The reST default role (used for this markup: `text`) to use for all documents. 70 | # default_role = None 71 | 72 | # If true, '()' will be appended to :func: etc. cross-reference text. 73 | # add_function_parentheses = True 74 | 75 | # If true, the current module name will be prepended to all description 76 | # unit titles (such as .. function::). 77 | # add_module_names = True 78 | 79 | # If true, sectionauthor and moduleauthor directives will be shown in the 80 | # output. They are ignored by default. 81 | # show_authors = False 82 | 83 | # The name of the Pygments (syntax highlighting) style to use. 84 | pygments_style = "sphinx" 85 | 86 | # A list of ignored prefixes for module index sorting. 87 | # modindex_common_prefix = [] 88 | 89 | 90 | # -- Options for HTML output --------------------------------------------------- 91 | 92 | # The theme to use for HTML and HTML Help pages. See the documentation for 93 | # a list of builtin themes. 94 | html_theme = "default" 95 | 96 | # Theme options are theme-specific and customize the look and feel of a theme 97 | # further. For a list of options available for each theme, see the 98 | # documentation. 99 | # html_theme_options = {} 100 | 101 | # Add any paths that contain custom themes here, relative to this directory. 102 | # html_theme_path = [] 103 | 104 | # The name for this set of Sphinx documents. If None, it defaults to 105 | # " v documentation". 106 | # html_title = None 107 | 108 | # A shorter title for the navigation bar. Default is the same as html_title. 109 | # html_short_title = None 110 | 111 | # The name of an image file (relative to this directory) to place at the top 112 | # of the sidebar. 113 | # html_logo = None 114 | 115 | # The name of an image file (within the static path) to use as favicon of the 116 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 117 | # pixels large. 118 | # html_favicon = None 119 | 120 | # Add any paths that contain custom static files (such as style sheets) here, 121 | # relative to this directory. They are copied after the builtin static files, 122 | # so a file named "default.css" will overwrite the builtin "default.css". 123 | html_static_path = ["_static"] 124 | 125 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 126 | # using the given strftime format. 127 | # html_last_updated_fmt = '%b %d, %Y' 128 | 129 | # If true, SmartyPants will be used to convert quotes and dashes to 130 | # typographically correct entities. 131 | # html_use_smartypants = True 132 | 133 | # Custom sidebar templates, maps document names to template names. 134 | # html_sidebars = {} 135 | 136 | # Additional templates that should be rendered to pages, maps page names to 137 | # template names. 138 | # html_additional_pages = {} 139 | 140 | # If false, no module index is generated. 141 | # html_domain_indices = True 142 | 143 | # If false, no index is generated. 144 | # html_use_index = True 145 | 146 | # If true, the index is split into individual pages for each letter. 147 | # html_split_index = False 148 | 149 | # If true, links to the reST sources are added to the pages. 150 | # html_show_sourcelink = True 151 | 152 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 153 | # html_show_sphinx = True 154 | 155 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 156 | # html_show_copyright = True 157 | 158 | # If true, an OpenSearch description file will be output, and all pages will 159 | # contain a tag referring to it. The value of this option must be the 160 | # base URL from which the finished HTML is served. 161 | # html_use_opensearch = '' 162 | 163 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 164 | # html_file_suffix = None 165 | 166 | # Output file base name for HTML help builder. 167 | htmlhelp_basename = "split_rasterdoc" 168 | 169 | 170 | # -- Options for LaTeX output -------------------------------------------------- 171 | 172 | latex_elements = { 173 | # The paper size ('letterpaper' or 'a4paper'). 174 | # 'papersize': 'letterpaper', 175 | # The font size ('10pt', '11pt' or '12pt'). 176 | # 'pointsize': '10pt', 177 | # Additional stuff for the LaTeX preamble. 178 | # 'preamble': '', 179 | } 180 | 181 | # Grouping the document tree into LaTeX files. List of tuples 182 | # (source start file, target name, title, author, documentclass [howto/manual]). 183 | latex_documents = [ 184 | ( 185 | "index", 186 | "split_raster.tex", 187 | "Split Raster Documentation", 188 | "Your name (or your organization/company/team)", 189 | "manual", 190 | ), 191 | ] 192 | 193 | # The name of an image file (relative to this directory) to place at the top of 194 | # the title page. 195 | # latex_logo = None 196 | 197 | # For "manual" documents, if this is true, then toplevel headings are parts, 198 | # not chapters. 199 | # latex_use_parts = False 200 | 201 | # If true, show page references after internal links. 202 | # latex_show_pagerefs = False 203 | 204 | # If true, show URL addresses after external links. 205 | # latex_show_urls = False 206 | 207 | # Documents to append as an appendix to all manuals. 208 | # latex_appendices = [] 209 | 210 | # If false, no module index is generated. 211 | # latex_domain_indices = True 212 | 213 | 214 | # -- Options for manual page output -------------------------------------------- 215 | 216 | # One entry per manual page. List of tuples 217 | # (source start file, name, description, authors, manual section). 218 | man_pages = [ 219 | ( 220 | "index", 221 | "split_raster", 222 | "Split Raster Documentation", 223 | ["Your name (or your organization/company/team)"], 224 | 1, 225 | ) 226 | ] 227 | 228 | # If true, show URL addresses after external links. 229 | # man_show_urls = False 230 | 231 | 232 | # -- Options for Texinfo output ------------------------------------------------ 233 | 234 | # Grouping the document tree into Texinfo files. List of tuples 235 | # (source start file, target name, title, author, 236 | # dir menu entry, description, category) 237 | texinfo_documents = [ 238 | ( 239 | "index", 240 | "split_raster", 241 | "Split Raster Documentation", 242 | "Your name (or your organization/company/team)", 243 | "Split Raster", 244 | "Creates a tiled output from an input raster dataset.", 245 | "Miscellaneous", 246 | ), 247 | ] 248 | 249 | # Documents to append as an appendix to all manuals. 250 | # texinfo_appendices = [] 251 | 252 | # If false, no module index is generated. 253 | # texinfo_domain_indices = True 254 | 255 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 256 | # texinfo_show_urls = 'footnote' 257 | -------------------------------------------------------------------------------- /docs/sphinx/getting-started.rst: -------------------------------------------------------------------------------- 1 | Getting started 2 | =============== 3 | 4 | This is where you describe how to get set up on a clean install, including the 5 | commands necessary to get the raw data (using the `sync_data_from_s3` command, 6 | for example), and then how to make the cleaned, final data sets. 7 | -------------------------------------------------------------------------------- /docs/sphinx/index.rst: -------------------------------------------------------------------------------- 1 | .. Split Raster documentation master file, created by 2 | sphinx-quickstart. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Split Raster documentation! 7 | ============================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | getting-started 15 | commands 16 | 17 | 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`modindex` 24 | * :ref:`search` 25 | -------------------------------------------------------------------------------- /docs/sphinx/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. linkcheck to check all external links for integrity 37 | echo. doctest to run all doctests embedded in the documentation if enabled 38 | goto end 39 | ) 40 | 41 | if "%1" == "clean" ( 42 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 43 | del /q /s %BUILDDIR%\* 44 | goto end 45 | ) 46 | 47 | if "%1" == "html" ( 48 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 49 | if errorlevel 1 exit /b 1 50 | echo. 51 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 52 | goto end 53 | ) 54 | 55 | if "%1" == "dirhtml" ( 56 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 57 | if errorlevel 1 exit /b 1 58 | echo. 59 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 60 | goto end 61 | ) 62 | 63 | if "%1" == "singlehtml" ( 64 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 68 | goto end 69 | ) 70 | 71 | if "%1" == "pickle" ( 72 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished; now you can process the pickle files. 76 | goto end 77 | ) 78 | 79 | if "%1" == "json" ( 80 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished; now you can process the JSON files. 84 | goto end 85 | ) 86 | 87 | if "%1" == "htmlhelp" ( 88 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can run HTML Help Workshop with the ^ 92 | .hhp project file in %BUILDDIR%/htmlhelp. 93 | goto end 94 | ) 95 | 96 | if "%1" == "qthelp" ( 97 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 98 | if errorlevel 1 exit /b 1 99 | echo. 100 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 101 | .qhcp project file in %BUILDDIR%/qthelp, like this: 102 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\split_raster.qhcp 103 | echo.To view the help file: 104 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\split_raster.ghc 105 | goto end 106 | ) 107 | 108 | if "%1" == "devhelp" ( 109 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 110 | if errorlevel 1 exit /b 1 111 | echo. 112 | echo.Build finished. 113 | goto end 114 | ) 115 | 116 | if "%1" == "epub" ( 117 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 118 | if errorlevel 1 exit /b 1 119 | echo. 120 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 121 | goto end 122 | ) 123 | 124 | if "%1" == "latex" ( 125 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 129 | goto end 130 | ) 131 | 132 | if "%1" == "text" ( 133 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The text files are in %BUILDDIR%/text. 137 | goto end 138 | ) 139 | 140 | if "%1" == "man" ( 141 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 145 | goto end 146 | ) 147 | 148 | if "%1" == "texinfo" ( 149 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 150 | if errorlevel 1 exit /b 1 151 | echo. 152 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 153 | goto end 154 | ) 155 | 156 | if "%1" == "gettext" ( 157 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 158 | if errorlevel 1 exit /b 1 159 | echo. 160 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 161 | goto end 162 | ) 163 | 164 | if "%1" == "changes" ( 165 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 166 | if errorlevel 1 exit /b 1 167 | echo. 168 | echo.The overview file is in %BUILDDIR%/changes. 169 | goto end 170 | ) 171 | 172 | if "%1" == "linkcheck" ( 173 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 174 | if errorlevel 1 exit /b 1 175 | echo. 176 | echo.Link check complete; look for any errors in the above output ^ 177 | or in %BUILDDIR%/linkcheck/output.txt. 178 | goto end 179 | ) 180 | 181 | if "%1" == "doctest" ( 182 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 183 | if errorlevel 1 exit /b 1 184 | echo. 185 | echo.Testing of doctests in the sources finished, look at the ^ 186 | results in %BUILDDIR%/doctest/output.txt. 187 | goto end 188 | ) 189 | 190 | :end 191 | -------------------------------------------------------------------------------- /docs/tutorial.md: -------------------------------------------------------------------------------- 1 | # Tutorial for Using Split-Raster for Deep Learning 2 | 3 | This demo we will split a large image into small tiles. It is useful for deep learning and computer vision tasks. The package can also be used to split a large image into small tiles for other applications. 4 | 5 | For example, we have a large image of size 1000-by-1000, and we want to split it into 256-by-256 tiles. The `SplitRaster` package successfully generate 16 256x256 images tiles with automatic padding on the edges. You can adjust the tile size and the overlap of the tiles for your own applications. 6 | 7 | Setup your local or cloud environment for this demo. 8 | 9 | This demo we use the python 3.10, but the package is compatible with python 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12. 10 | 11 | ```bash 12 | >pip install -q splitraster 13 | ``` 14 | 15 | ## Create Image Sample Pairs 16 | 17 | ```python 18 | from splitraster import io 19 | 20 | input_image_path = "../data/raw/RGB.png" 21 | gt_image_path = "../data/raw/GT.png" 22 | 23 | save_path = "../data/processed/RGB" 24 | save_path_gt = "../data/processed/GT" 25 | 26 | crop_size = 256 27 | repetition_rate = 0 # <----- change this value to 0.5 for 50% overlap 28 | overwrite = True # <----- change this value to False for no overwrite demo 29 | 30 | n = io.split_image(input_image_path, save_path, crop_size, 31 | repetition_rate=repetition_rate, overwrite=overwrite) 32 | print(f"{n} tiles sample of {input_image_path} are added at {save_path}") 33 | 34 | 35 | n = io.split_image(gt_image_path, save_path_gt, crop_size, 36 | repetition_rate=repetition_rate, overwrite=overwrite) 37 | print(f"{n} tiles sample of {gt_image_path} are added at {save_path_gt}") 38 | ``` 39 | Output: 40 | ```bash 41 | Input Image File Shape (H, W, D):(1000, 1000, 3) 42 | crop_size = 256, stride = 256 43 | Padding Image File Shape (H, W, D):(1024, 1024, 3) 44 | ... 45 | 46 | 16 tiles sample of ../data/raw/GT.png are added at ../data/processed/GT 47 | ``` 48 | 49 | ## Random Sampling Code 50 | 51 | If you want to create a small data set at the early stage for exploaration. Use the random sampling code, you can use the following code. The following code shows to geneate a 20 tiles (256x256) from the 1000x1000 image 52 | 53 | ```python 54 | from splitraster import io 55 | input_image_path = "../data/raw/RGB.png" 56 | gt_image_path = "../data/raw/GT.png" 57 | 58 | input_save_path = "../data/processed/Rand/RGB" 59 | gt_save_path = "../data/processed/Rand/GT" 60 | 61 | n = io.random_crop_image(input_image_path, input_save_path, gt_image_path, gt_save_path, crop_size=256, crop_number=20, img_ext='.png', label_ext='.png', overwrite=True) 62 | 63 | print(f"{n} sample paris of {input_image_path, gt_image_path} are added at {input_save_path, gt_save_path}.") 64 | ``` 65 | 66 | Result: 67 | 68 | ```bash 69 | Generating: 100%|██████████| 20/20 [00:01<00:00, 19.27img/s]20 sample paris of ('../data/raw/RGB.png', '../data/raw/GT.png') are added at ('../data/processed/Rand/RGB', '../data/processed/Rand/GT'). 70 | ``` 71 | 72 | 73 | 74 | 75 | ## Use the output of the Split-Raster as the input of the Deep Learning Model 76 | 77 | We will use pytorch as the deep learning framework for this demo. 78 | 79 | ```bash 80 | pip install -q torch torchvision 81 | ``` 82 | 83 | ## Create a DataLoader for the Split-Raster output 84 | 85 | 86 | ```python 87 | 88 | import torch 89 | from torch.utils.data import Dataset 90 | from torchvision import datasets 91 | from torchvision.transforms import ToTensor 92 | import matplotlib.pyplot as plt 93 | 94 | from skimage.io import imread, imsave 95 | import os 96 | import numpy as np 97 | ``` 98 | create the `DatasetSegmentation` class to create a custom dataset class for the deep learning model. 99 | ```python 100 | # Create a custom dataset class 101 | class DatasetSegmentation(torch.utils.data.Dataset): 102 | def __init__(self, image_path, label_path): 103 | self.imgfolder = image_path 104 | self.maskfolder = label_path 105 | self.imgs = list(sorted(os.listdir(image_path))) 106 | self.masks = list(sorted(os.listdir(label_path))) 107 | 108 | def __getitem__(self, idx): 109 | img_path = os.path.join(self.imgfolder, self.imgs[idx]) 110 | mask_path = os.path.join(self.maskfolder, self.masks[idx]) 111 | data = imread(img_path) 112 | data = np.moveaxis(data, -1, 0) 113 | label = imread(mask_path) 114 | label = label/255 115 | return torch.from_numpy(data).float(), torch.from_numpy(label).long() 116 | 117 | def __len__(self): 118 | return len(self.imgs) 119 | AerialDataset = DatasetSegmentation("../data/processed/RGB", "../data/processed/GT") 120 | ``` 121 | 122 | Create a DataLoader and read a batch of images from the Split-Raster output. 123 | 124 | ```python 125 | from torch.utils.data import DataLoader 126 | train_dataloader = DataLoader(AerialDataset, batch_size=16, shuffle=False) 127 | train_features, train_labels = next(iter(train_dataloader)) 128 | print(f"Feature batch shape: {train_features.size()}") 129 | print(f"Labels batch shape: {train_labels.size()}") 130 | ``` 131 | 132 | Output: 133 | 134 | ```bash 135 | Feature batch shape: torch.Size([16, 3, 256, 256]) 136 | Labels batch shape: torch.Size([16, 256, 256]) 137 | ``` 138 | ## Visualize the images and labels. 139 | 140 | ``` python 141 | # Select 16 random images from the training set 142 | import random 143 | idx = random.randint(0, 15) 144 | img = train_features[idx].squeeze().numpy() 145 | label = train_labels[idx].squeeze().numpy() 146 | 147 | print(f"Feature batch shape: {img.shape, img.max(), img.min()}") 148 | print(f"Labels batch shape: {label.shape, label.max(), label.min()}") 149 | 150 | 151 | 152 | from matplotlib.pyplot import figure 153 | 154 | figure(figsize=(12, 5), dpi=80) 155 | plt.subplot(1,2,1) 156 | img = np.moveaxis(img, 0, -1) # adjust the channel dimension 157 | plt.imshow(img.astype(np.uint8) ) 158 | plt.subplot(1,2,2) 159 | 160 | plt.imshow(label.astype(np.uint8), cmap="gray") 161 | plt.show() 162 | 163 | ``` 164 | 165 | Feature batch shape: ((3, 256, 256), 221.0, 1.0) 166 | Labels batch shape: ((256, 256), 1, 0) 167 | 168 | ![output_img_gt.png](img/output_img_gt.png) 169 | 170 | ## Use torchvision to visualize the images and labels 171 | 172 | ```python 173 | import torchvision 174 | grid_img = torchvision.utils.make_grid(train_features/255, nrow=4) 175 | grid_label = torchvision.utils.make_grid(train_labels.unsqueeze_(1), nrow=4) 176 | print(grid_img.shape) 177 | print(grid_label.shape) 178 | figure(figsize=(12, 18), dpi=80) 179 | plt.subplot(1,2,1) 180 | plt.imshow(grid_img.permute(1, 2, 0)) 181 | plt.subplot(1,2,2) 182 | plt.imshow(grid_label[0,:,:], cmap='gray') 183 | plt.show() 184 | ``` 185 | 186 | Output: 187 | ``` 188 | (torch.Size([3, 1034, 1034]), torch.Size([3, 1034, 1034])) 189 | ``` 190 | ![output-grid.png](img/output-grid.png) 191 | 192 | 193 | ## Download the Notebook 194 | 195 | Find the full code in this Notebook Tutorial: [SplitRaster Tutorial](https://github.com/cuicaihao/split_raster/blob/master/notebooks/Tutorial.ipynb). 196 | 197 | --- 198 | 199 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Split Raster 2 | # site_url: https://example.com/ # TODO: edit it before you deploy your site to a production server. 3 | nav: 4 | - Home: index.md 5 | - Tutorial: tutorial.md 6 | - GIS-RS: gis.md 7 | - About: about.md 8 | theme: readthedocs 9 | -------------------------------------------------------------------------------- /notebooks/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/notebooks/.gitkeep -------------------------------------------------------------------------------- /notebooks/Tutorial_II.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "# Tutorial for Using Split-Raster for Deep Learning" 9 | ] 10 | }, 11 | { 12 | "attachments": {}, 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "This demo we will split a large image into small tiles. It is useful for deep learning and computer vision tasks. The package can also be used to split a large image into small tiles for other applications.\n", 17 | "\n", 18 | "For example, we have a large image of size 1000-by-1000, and we want to split it into 256-by-256 tiles. The `SplitRaster` package successfully generate 16 256x256 images tiles with automatic padding on the edges. You can adjust the tile size and the overlap of the tiles for your own applications." 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "## Known Issues: OSGEO / GDAL\n", 26 | "\n", 27 | "The osgeo module is part of the GDAL library, which is a translator library for raster and vector geospatial data formats.\n", 28 | "You can install GDAL using pip, but it has some system dependencies. On a Mac, you can install these using Homebrew:\n", 29 | "```bash\n", 30 | "brew install gdal\n", 31 | "```\n", 32 | "\n", 33 | "then, you can install the Python GDAL package:\n", 34 | "\n", 35 | "```bash\n", 36 | "pip install GDAl\n", 37 | "```\n", 38 | "Please note that installing GDAL can be complex due to its system dependencies. If you encounter issues, you may need to consult the GDAL documentation or seek help from the community." 39 | ] 40 | }, 41 | { 42 | "attachments": {}, 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## Setup Env with Conda/MiniConda\n", 47 | "\n", 48 | "Setup your local or cloud environment for this demo.\n", 49 | "\n", 50 | "```Bash\n", 51 | "conda create -n split_raster_py310 python=3.10 -y\n", 52 | "conda activate split_raster_py310\n", 53 | "conda install gdal -y\n", 54 | "conda install ipykernel -y\n", 55 | "pip install --upgrade pip\n", 56 | "pip install splitraster\n", 57 | "``` " 58 | ] 59 | }, 60 | { 61 | "attachments": {}, 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "This demo we use the python 3.10, but the package is compatible with python 3.7, 3.8, 3.9, 3.10, 3.11, 3.12. " 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 1, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "# Clean the output folder\n", 75 | "!rm -rf ../data/processed/RGB_TIF\n", 76 | "!rm -rf ../data/processed/GT_TIF\n" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": 2, 82 | "metadata": {}, 83 | "outputs": [ 84 | { 85 | "name": "stderr", 86 | "output_type": "stream", 87 | "text": [ 88 | "/Users/caihaocui/.local/share/virtualenvs/split_raster-co_bDcoB/lib/python3.10/site-packages/osgeo/gdal.py:314: FutureWarning: Neither gdal.UseExceptions() nor gdal.DontUseExceptions() has been explicitly called. In GDAL 4.0, exceptions will be enabled by default.\n", 89 | " warnings.warn(\n" 90 | ] 91 | }, 92 | { 93 | "name": "stdout", 94 | "output_type": "stream", 95 | "text": [ 96 | "Input Image File Shape (D, H, W):(3, 5000, 5000)\n", 97 | "crop_size=256, stride=256\n", 98 | "Padding Image File Shape (D, H, W):(3, 5120, 5120)\n" 99 | ] 100 | }, 101 | { 102 | "name": "stderr", 103 | "output_type": "stream", 104 | "text": [ 105 | "Generating: 100%|\u001b[32m██████████\u001b[0m| 400/400 [00:00<00:00, 1114.16img/s]\n" 106 | ] 107 | }, 108 | { 109 | "name": "stdout", 110 | "output_type": "stream", 111 | "text": [ 112 | "400 tiles sample of ../data/raw/TIF/RGB5k.tif are added at ../data/processed/RGB_TIF\n", 113 | "Input Image File Shape (D, H, W):(1, 5000, 5000)\n", 114 | "crop_size=256, stride=256\n", 115 | "Padding Image File Shape (D, H, W):(1, 5120, 5120)\n" 116 | ] 117 | }, 118 | { 119 | "name": "stderr", 120 | "output_type": "stream", 121 | "text": [ 122 | "Generating: 100%|\u001b[32m██████████\u001b[0m| 400/400 [00:00<00:00, 2581.73img/s]" 123 | ] 124 | }, 125 | { 126 | "name": "stdout", 127 | "output_type": "stream", 128 | "text": [ 129 | "400 tiles sample of ../data/raw/TIF/GT5k.tif are added at ../data/processed/GT_TIF\n" 130 | ] 131 | }, 132 | { 133 | "name": "stderr", 134 | "output_type": "stream", 135 | "text": [ 136 | "\n" 137 | ] 138 | } 139 | ], 140 | "source": [ 141 | "from splitraster import geo\n", 142 | "\n", 143 | "input_image_path = \"../data/raw/TIF/RGB5k.tif\"\n", 144 | "gt_image_path = \"../data/raw/TIF/GT5k.tif\"\n", 145 | "\n", 146 | "save_path = \"../data/processed/RGB_TIF\"\n", 147 | "save_path_gt = \"../data/processed/GT_TIF\"\n", 148 | "\n", 149 | "crop_size = 256\n", 150 | "repetition_rate = 0 # <----- change this value to 0.5 for 50% overlap\n", 151 | "overwrite = True # <----- change this value to False for no overwrite demo\n", 152 | "\n", 153 | "n = geo.split_image(input_image_path, save_path, crop_size,\n", 154 | " repetition_rate=repetition_rate, overwrite=overwrite)\n", 155 | "print(f\"{n} tiles sample of {input_image_path} are added at {save_path}\")\n", 156 | "\n", 157 | "\n", 158 | "n = geo.split_image(gt_image_path, save_path_gt, crop_size,\n", 159 | " repetition_rate=repetition_rate, overwrite=overwrite)\n", 160 | "print(f\"{n} tiles sample of {gt_image_path} are added at {save_path_gt}\")" 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": 3, 166 | "metadata": {}, 167 | "outputs": [ 168 | { 169 | "name": "stdout", 170 | "output_type": "stream", 171 | "text": [ 172 | "0001.tif 0051.tif 0101.tif 0151.tif 0201.tif 0251.tif 0301.tif 0351.tif\n", 173 | "0002.tif 0052.tif 0102.tif 0152.tif 0202.tif 0252.tif 0302.tif 0352.tif\n", 174 | "0003.tif 0053.tif 0103.tif 0153.tif 0203.tif 0253.tif 0303.tif 0353.tif\n", 175 | "0004.tif 0054.tif 0104.tif 0154.tif 0204.tif 0254.tif 0304.tif 0354.tif\n", 176 | "0005.tif 0055.tif 0105.tif 0155.tif 0205.tif 0255.tif 0305.tif 0355.tif\n", 177 | "0006.tif 0056.tif 0106.tif 0156.tif 0206.tif 0256.tif 0306.tif 0356.tif\n", 178 | "0007.tif 0057.tif 0107.tif 0157.tif 0207.tif 0257.tif 0307.tif 0357.tif\n", 179 | "0008.tif 0058.tif 0108.tif 0158.tif 0208.tif 0258.tif 0308.tif 0358.tif\n", 180 | "0009.tif 0059.tif 0109.tif 0159.tif 0209.tif 0259.tif 0309.tif 0359.tif\n", 181 | "0010.tif 0060.tif 0110.tif 0160.tif 0210.tif 0260.tif 0310.tif 0360.tif\n", 182 | "0011.tif 0061.tif 0111.tif 0161.tif 0211.tif 0261.tif 0311.tif 0361.tif\n", 183 | "0012.tif 0062.tif 0112.tif 0162.tif 0212.tif 0262.tif 0312.tif 0362.tif\n", 184 | "0013.tif 0063.tif 0113.tif 0163.tif 0213.tif 0263.tif 0313.tif 0363.tif\n", 185 | "0014.tif 0064.tif 0114.tif 0164.tif 0214.tif 0264.tif 0314.tif 0364.tif\n", 186 | "0015.tif 0065.tif 0115.tif 0165.tif 0215.tif 0265.tif 0315.tif 0365.tif\n", 187 | "0016.tif 0066.tif 0116.tif 0166.tif 0216.tif 0266.tif 0316.tif 0366.tif\n", 188 | "0017.tif 0067.tif 0117.tif 0167.tif 0217.tif 0267.tif 0317.tif 0367.tif\n", 189 | "0018.tif 0068.tif 0118.tif 0168.tif 0218.tif 0268.tif 0318.tif 0368.tif\n", 190 | "0019.tif 0069.tif 0119.tif 0169.tif 0219.tif 0269.tif 0319.tif 0369.tif\n", 191 | "0020.tif 0070.tif 0120.tif 0170.tif 0220.tif 0270.tif 0320.tif 0370.tif\n", 192 | "0021.tif 0071.tif 0121.tif 0171.tif 0221.tif 0271.tif 0321.tif 0371.tif\n", 193 | "0022.tif 0072.tif 0122.tif 0172.tif 0222.tif 0272.tif 0322.tif 0372.tif\n", 194 | "0023.tif 0073.tif 0123.tif 0173.tif 0223.tif 0273.tif 0323.tif 0373.tif\n", 195 | "0024.tif 0074.tif 0124.tif 0174.tif 0224.tif 0274.tif 0324.tif 0374.tif\n", 196 | "0025.tif 0075.tif 0125.tif 0175.tif 0225.tif 0275.tif 0325.tif 0375.tif\n", 197 | "0026.tif 0076.tif 0126.tif 0176.tif 0226.tif 0276.tif 0326.tif 0376.tif\n", 198 | "0027.tif 0077.tif 0127.tif 0177.tif 0227.tif 0277.tif 0327.tif 0377.tif\n", 199 | "0028.tif 0078.tif 0128.tif 0178.tif 0228.tif 0278.tif 0328.tif 0378.tif\n", 200 | "0029.tif 0079.tif 0129.tif 0179.tif 0229.tif 0279.tif 0329.tif 0379.tif\n", 201 | "0030.tif 0080.tif 0130.tif 0180.tif 0230.tif 0280.tif 0330.tif 0380.tif\n", 202 | "0031.tif 0081.tif 0131.tif 0181.tif 0231.tif 0281.tif 0331.tif 0381.tif\n", 203 | "0032.tif 0082.tif 0132.tif 0182.tif 0232.tif 0282.tif 0332.tif 0382.tif\n", 204 | "0033.tif 0083.tif 0133.tif 0183.tif 0233.tif 0283.tif 0333.tif 0383.tif\n", 205 | "0034.tif 0084.tif 0134.tif 0184.tif 0234.tif 0284.tif 0334.tif 0384.tif\n", 206 | "0035.tif 0085.tif 0135.tif 0185.tif 0235.tif 0285.tif 0335.tif 0385.tif\n", 207 | "0036.tif 0086.tif 0136.tif 0186.tif 0236.tif 0286.tif 0336.tif 0386.tif\n", 208 | "0037.tif 0087.tif 0137.tif 0187.tif 0237.tif 0287.tif 0337.tif 0387.tif\n", 209 | "0038.tif 0088.tif 0138.tif 0188.tif 0238.tif 0288.tif 0338.tif 0388.tif\n", 210 | "0039.tif 0089.tif 0139.tif 0189.tif 0239.tif 0289.tif 0339.tif 0389.tif\n", 211 | "0040.tif 0090.tif 0140.tif 0190.tif 0240.tif 0290.tif 0340.tif 0390.tif\n", 212 | "0041.tif 0091.tif 0141.tif 0191.tif 0241.tif 0291.tif 0341.tif 0391.tif\n", 213 | "0042.tif 0092.tif 0142.tif 0192.tif 0242.tif 0292.tif 0342.tif 0392.tif\n", 214 | "0043.tif 0093.tif 0143.tif 0193.tif 0243.tif 0293.tif 0343.tif 0393.tif\n", 215 | "0044.tif 0094.tif 0144.tif 0194.tif 0244.tif 0294.tif 0344.tif 0394.tif\n", 216 | "0045.tif 0095.tif 0145.tif 0195.tif 0245.tif 0295.tif 0345.tif 0395.tif\n", 217 | "0046.tif 0096.tif 0146.tif 0196.tif 0246.tif 0296.tif 0346.tif 0396.tif\n", 218 | "0047.tif 0097.tif 0147.tif 0197.tif 0247.tif 0297.tif 0347.tif 0397.tif\n", 219 | "0048.tif 0098.tif 0148.tif 0198.tif 0248.tif 0298.tif 0348.tif 0398.tif\n", 220 | "0049.tif 0099.tif 0149.tif 0199.tif 0249.tif 0299.tif 0349.tif 0399.tif\n", 221 | "0050.tif 0100.tif 0150.tif 0200.tif 0250.tif 0300.tif 0350.tif 0400.tif\n" 222 | ] 223 | } 224 | ], 225 | "source": [ 226 | "!ls ../data/processed/RGB_TIF" 227 | ] 228 | }, 229 | { 230 | "attachments": {}, 231 | "cell_type": "markdown", 232 | "metadata": {}, 233 | "source": [ 234 | "## Random Sampling Code\n", 235 | "\n", 236 | "If you want to create a small data set at the early stage for exploaration. Use the random sampling code, you can use the following code. The following code shows to geneate a 20 tiles (256x256) from the 1000x1000 image." 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 4, 242 | "metadata": {}, 243 | "outputs": [], 244 | "source": [ 245 | "# Clean the output folder\n", 246 | "!rm -rf ../data/processed/Rand/RGB_TIF\n", 247 | "!rm -rf ../data/processed/Rand/GT_TIF\n" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": 5, 253 | "metadata": {}, 254 | "outputs": [ 255 | { 256 | "name": "stderr", 257 | "output_type": "stream", 258 | "text": [ 259 | "Generating: 100%|\u001b[32m██████████\u001b[0m| 20/20 [00:00<00:00, 227.96img/s]" 260 | ] 261 | }, 262 | { 263 | "name": "stdout", 264 | "output_type": "stream", 265 | "text": [ 266 | "20 sample paris of ('../data/raw/TIF/RGB5k.tif', '../data/raw/TIF/GT5k.tif') are added at ('../data/processed/Rand/RGB_TIF', '../data/processed/Rand/GT_TIF').\n" 267 | ] 268 | }, 269 | { 270 | "name": "stderr", 271 | "output_type": "stream", 272 | "text": [ 273 | "\n" 274 | ] 275 | } 276 | ], 277 | "source": [ 278 | "from splitraster import geo\n", 279 | "input_image_path = \"../data/raw/TIF/RGB5k.tif\"\n", 280 | "gt_image_path = \"../data/raw/TIF/GT5k.tif\"\n", 281 | "\n", 282 | "input_save_path = \"../data/processed/Rand/RGB_TIF\"\n", 283 | "gt_save_path = \"../data/processed/Rand/GT_TIF\"\n", 284 | "\n", 285 | "n = geo.random_crop_image(input_image_path, input_save_path, gt_image_path, gt_save_path, crop_size=500, crop_number=20, img_ext='.png', label_ext='.png', overwrite=True)\n", 286 | "\n", 287 | "print(f\"{n} sample paris of {input_image_path, gt_image_path} are added at {input_save_path, gt_save_path}.\")" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 6, 293 | "metadata": {}, 294 | "outputs": [ 295 | { 296 | "name": "stdout", 297 | "output_type": "stream", 298 | "text": [ 299 | "0001.png 0004.png 0007.png 0010.png 0013.png 0016.png 0019.png\n", 300 | "0002.png 0005.png 0008.png 0011.png 0014.png 0017.png 0020.png\n", 301 | "0003.png 0006.png 0009.png 0012.png 0015.png 0018.png\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "!ls ../data/processed/Rand/RGB_TIF" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 7, 312 | "metadata": {}, 313 | "outputs": [ 314 | { 315 | "name": "stdout", 316 | "output_type": "stream", 317 | "text": [ 318 | "Latest run time 2025-03-23 20:37:15\n" 319 | ] 320 | } 321 | ], 322 | "source": [ 323 | "# print the current time\n", 324 | "from datetime import datetime\n", 325 | "print(f\"Latest run time {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\")" 326 | ] 327 | }, 328 | { 329 | "cell_type": "markdown", 330 | "metadata": {}, 331 | "source": [ 332 | "---" 333 | ] 334 | } 335 | ], 336 | "metadata": { 337 | "kernelspec": { 338 | "display_name": "split_raster-co_bDcoB", 339 | "language": "python", 340 | "name": "python3" 341 | }, 342 | "language_info": { 343 | "codemirror_mode": { 344 | "name": "ipython", 345 | "version": 3 346 | }, 347 | "file_extension": ".py", 348 | "mimetype": "text/x-python", 349 | "name": "python", 350 | "nbconvert_exporter": "python", 351 | "pygments_lexer": "ipython3", 352 | "version": "3.10.16" 353 | }, 354 | "orig_nbformat": 4 355 | }, 356 | "nbformat": 4, 357 | "nbformat_minor": 2 358 | } 359 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.19.0, <2.0.0 2 | tqdm>=4.40.0, <5.0.0 3 | scikit-image>=0.18.0, <1.0.0 4 | # GDAL==3.8.4 # For GIS only on MacOS `brew install gdal` and `pip install GDAL==3.8.4` 5 | # python_version >= "3.10" -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | from pathlib import Path 3 | 4 | 5 | def read_requirements(): 6 | with open("requirements.txt", "r") as req_file: 7 | requirements = req_file.read().splitlines() 8 | return requirements 9 | 10 | 11 | setup( 12 | name="splitraster", 13 | version="0.3.7", 14 | author="Chris Cui", 15 | license="MIT", 16 | platforms="any", 17 | author_email="", 18 | description="Provide good support for deep learning and computer vision tasks by creating a tiled output from an input raster dataset.", 19 | long_description=Path("PyPi.md").read_text(), 20 | long_description_content_type="text/markdown", 21 | url="https://github.com/cuicaihao/split_raster", 22 | package_dir={"": "src"}, 23 | project_urls={}, 24 | packages=find_packages(where="src", exclude=["data"]), 25 | python_requires=">=3.7, <3.14", 26 | keywords="split raster tiling ", 27 | install_requires=read_requirements(), 28 | classifiers=[ 29 | "License :: OSI Approved :: MIT License", 30 | "Operating System :: OS Independent", 31 | "Programming Language :: Python :: 3.7", 32 | "Programming Language :: Python :: 3.8", 33 | "Programming Language :: Python :: 3.9", 34 | "Programming Language :: Python :: 3.10", 35 | "Programming Language :: Python :: 3.11", 36 | "Programming Language :: Python :: 3.12", 37 | "Programming Language :: Python :: 3.13", 38 | ], 39 | ) 40 | # rm -rf build dist 41 | # python setup.py sdist bdist_wheel 42 | # twine upload dist/* 43 | -------------------------------------------------------------------------------- /src/data/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/src/data/.gitkeep -------------------------------------------------------------------------------- /src/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/src/data/__init__.py -------------------------------------------------------------------------------- /src/data/make_dataset.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import click 3 | import logging 4 | from pathlib import Path 5 | from dotenv import find_dotenv, load_dotenv 6 | 7 | 8 | @click.command() 9 | @click.argument("input_filepath", type=click.Path(exists=True)) 10 | @click.argument("output_filepath", type=click.Path()) 11 | def main(input_filepath, output_filepath): 12 | """Runs data processing scripts to turn raw data from (../raw) into 13 | cleaned data ready to be analyzed (saved in ../processed). 14 | """ 15 | logger = logging.getLogger(__name__) 16 | logger.info("making final data set from raw data") 17 | 18 | 19 | if __name__ == "__main__": 20 | log_fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 21 | logging.basicConfig(level=logging.INFO, format=log_fmt) 22 | 23 | # not used in this stub but often useful for finding various files 24 | project_dir = Path(__file__).resolve().parents[2] 25 | 26 | # find .env automagically by walking up directories until it's found, then 27 | # load up the .env entries as environment variables 28 | load_dotenv(find_dotenv()) 29 | 30 | main() 31 | -------------------------------------------------------------------------------- /src/splitraster/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cuicaihao/split_raster/d6eea3504e83523bc9f269c01203ecf076e482af/src/splitraster/__init__.py -------------------------------------------------------------------------------- /src/splitraster/geo.py: -------------------------------------------------------------------------------- 1 | from tqdm import tqdm 2 | from osgeo import gdal, gdal_array 3 | import numpy as np 4 | from pathlib import Path 5 | import random 6 | from typing import Tuple, Optional 7 | 8 | 9 | def read_rasterArray(image_path: str) -> Tuple[np.ndarray, Tuple[float, ...], str]: 10 | dataset = gdal.Open(image_path, gdal.GA_ReadOnly) 11 | image = dataset.ReadAsArray() # get the rasterArray 12 | # convert 2D raster to [1, H, W] format 13 | if len(image.shape) == 2: 14 | image = image[np.newaxis, :, :] 15 | proj = dataset.GetProjection() 16 | geotrans = dataset.GetGeoTransform() 17 | return image, geotrans, proj 18 | 19 | 20 | def save_rasterGeoTIF( 21 | im_data: np.ndarray, im_geotrans: Tuple[float, ...], im_proj: str, file_name: str 22 | ) -> None: 23 | if Path(file_name).is_file(): 24 | print(f"Overwrite existing file: {file_name}") 25 | 26 | if "int8" in im_data.dtype.name: 27 | datatype = gdal.GDT_Byte 28 | elif "int16" in im_data.dtype.name: 29 | datatype = gdal.GDT_UInt16 30 | else: 31 | datatype = gdal.GDT_Float32 32 | 33 | if len(im_data.shape) == 3: 34 | im_bands, im_height, im_width = im_data.shape 35 | elif len(im_data.shape) == 2: 36 | im_data = np.array([im_data]) 37 | im_bands, im_height, im_width = im_data.shape 38 | 39 | driver = gdal.GetDriverByName("GTiff") 40 | dataset = driver.Create( 41 | file_name, int(im_width), int(im_height), int(im_bands), datatype 42 | ) 43 | if dataset is not None: 44 | dataset.SetGeoTransform(im_geotrans) 45 | dataset.SetProjection(im_proj) 46 | for i in range(im_bands): 47 | dataset.GetRasterBand(i + 1).WriteArray(im_data[i]) 48 | del dataset 49 | 50 | 51 | def save_rasterArray(im_data: np.ndarray, file_name: str) -> bool: 52 | if Path(file_name).is_file(): 53 | print(f"Overwrite existing file: {file_name}") 54 | gdal_array.SaveArray(im_data, file_name, format="GTiff") 55 | return True 56 | 57 | 58 | def count_files(folder_path: str) -> int: 59 | return sum(1 for path in Path(folder_path).iterdir() if path.is_file()) 60 | 61 | 62 | def padding_mul_image(img: np.ndarray, stride: int) -> np.ndarray: 63 | D, height, width = img.shape # (D, H, W) format Channel First. 64 | # get the minimal padding image size 65 | H = int(np.ceil(height / stride) * stride) 66 | W = int(np.ceil(width / stride) * stride) 67 | 68 | padded_img = np.zeros((D, H, W), dtype=img.dtype) 69 | for d in range(D): # padding every layer 70 | onelayer = img[d, :, :] 71 | padded_img[d, :, :] = np.pad( 72 | onelayer, ((0, H - height), (0, W - width)), "reflect" 73 | ) 74 | return padded_img 75 | 76 | 77 | def split_image( 78 | img_path: str, 79 | save_path: str, 80 | crop_size: int, 81 | repetition_rate: float = 0, 82 | overwrite: bool = True, 83 | ext: Optional[str] = ".", 84 | ) -> Optional[int]: 85 | # check input image 86 | img, geotrans, proj = read_rasterArray(img_path) 87 | if img is None: 88 | print("Image not found") 89 | return None 90 | # get image suffix 91 | ext = Path(img_path).suffix 92 | # check output folder, if not exists, creat it. 93 | Path(save_path).mkdir(parents=True, exist_ok=True) 94 | 95 | print(f"Input Image File Shape (D, H, W):{ img.shape}") 96 | 97 | stride = int(crop_size * (1 - repetition_rate)) 98 | print(f"crop_size = {crop_size}, stride = {stride}") 99 | 100 | padded_img = padding_mul_image(img, stride) 101 | 102 | H = padded_img.shape[1] 103 | W = padded_img.shape[2] 104 | 105 | print(f"Padding Image File Shape (D, H, W):{ padded_img.shape}") 106 | 107 | if overwrite: 108 | new_name = 1 109 | else: 110 | cnt = count_files(save_path) 111 | new_name = cnt + 1 112 | print(f"There are {cnt} files in the {save_path}") 113 | print(f"New image name will start with {new_name}") 114 | 115 | n_rows = int((H - crop_size) / stride + 1) 116 | n_cols = int((W - crop_size) / stride + 1) 117 | 118 | def tile_generator(): 119 | for idh in range(n_rows): 120 | h = idh * stride 121 | for idw in range(n_cols): 122 | w = idw * stride 123 | yield h, w 124 | 125 | with tqdm( 126 | total=n_rows * n_cols, desc="Generating", colour="green", leave=True, unit="img" 127 | ) as pbar: 128 | for n, (h, w) in enumerate(tile_generator()): 129 | crop_img = padded_img[:, h : h + crop_size, w : w + crop_size] 130 | crop_image_name = f"{new_name:04d}{ext}" 131 | crop_image_path = Path(save_path) / crop_image_name 132 | save_rasterGeoTIF(crop_img, geotrans, proj, str(crop_image_path)) 133 | new_name += 1 134 | pbar.update(1) 135 | 136 | return n + 1 137 | 138 | 139 | def random_crop_image( 140 | img_path: str, 141 | img_save_path: str, 142 | label_path: str, 143 | label_save_path: str, 144 | crop_size: int = 256, 145 | crop_number: int = 20, 146 | img_ext: str = ".tif", 147 | label_ext: str = ".tif", 148 | overwrite: bool = True, 149 | ) -> Optional[int]: 150 | """Generate Random cropped image pair from the input image pairs. 151 | 152 | Args: 153 | img_path (str): path of input image 154 | img_save_path (str): path to save cropped images 155 | label_path (str): path of input label 156 | label_save_path (str): path to save cropped labels 157 | crop_size (int): image tile size (H,W), i.e., 256x256 158 | crop_number (int): number of crops to generate 159 | img_ext (str): extension for image files 160 | label_ext (str): extension for label files 161 | overwrite (bool): overwrite existing files 162 | """ 163 | img, geotrans, proj = read_rasterArray(label_path) 164 | if img is None: 165 | print("Input image is missing") 166 | return None 167 | 168 | label, geotrans, proj = read_rasterArray(img_path) 169 | if label is None: 170 | print("Label image is missing") 171 | return None 172 | 173 | # check output folder, if not exists, create it. 174 | Path(img_save_path).mkdir(parents=True, exist_ok=True) 175 | Path(label_save_path).mkdir(parents=True, exist_ok=True) 176 | 177 | # get the file formats, if none, use the same format as the source file. 178 | if img_ext is None: 179 | img_ext = Path(img_path).suffix 180 | if label_ext is None: 181 | label_ext = Path(label_path).suffix 182 | 183 | # find the start name of the image paris. 184 | if overwrite: 185 | new_name = 1 186 | else: 187 | img_cnt = count_files(img_path) 188 | label_cnt = count_files(label_path) 189 | new_name = img_cnt + 1 190 | print(f"There are {img_cnt} files in the {img_save_path}") 191 | print(f"There are {label_cnt} files in the {label_save_path}") 192 | if not img_cnt == label_cnt: 193 | print("Image Pair doest not match in output folders.") 194 | return None 195 | print(f"New image pairs' name will start with {new_name}") 196 | 197 | crop_cnt = 0 198 | H = img.shape[1] 199 | W = img.shape[2] 200 | 201 | with tqdm( 202 | total=crop_number, desc="Generating", colour="green", leave=True, unit="img" 203 | ) as pbar: 204 | while crop_cnt < crop_number: 205 | # Crop img_crop, label_crop paris and save them to the output folders. 206 | UpperLeftX = random.randint(0, H - crop_size) 207 | UpperLeftY = random.randint(0, W - crop_size) 208 | 209 | imgCrop = img[ 210 | :, 211 | UpperLeftX : UpperLeftX + crop_size, 212 | UpperLeftY : UpperLeftY + crop_size, 213 | ] 214 | 215 | labelCrop = label[ 216 | :, 217 | UpperLeftX : UpperLeftX + crop_size, 218 | UpperLeftY : UpperLeftY + crop_size, 219 | ] 220 | # save image pairs 221 | crop_image_name = f"{new_name:04d}{img_ext}" 222 | crop_image_path = Path(img_save_path) / crop_image_name 223 | save_rasterGeoTIF(imgCrop, geotrans, proj, str(crop_image_path)) 224 | 225 | crop_image_name = f"{new_name:04d}{label_ext}" 226 | crop_image_path = Path(label_save_path) / crop_image_name 227 | save_rasterGeoTIF(labelCrop, geotrans, proj, str(crop_image_path)) 228 | 229 | new_name += 1 # update image name 230 | crop_cnt += 1 # add crop count 231 | pbar.update(1) 232 | 233 | return crop_cnt # return total crop sample pair number. 234 | -------------------------------------------------------------------------------- /src/splitraster/io.py: -------------------------------------------------------------------------------- 1 | # import time 2 | # from osgeo import gdal 3 | from tqdm import tqdm 4 | import numpy as np 5 | from skimage.io import imread, imsave 6 | from pathlib import Path 7 | import random 8 | 9 | 10 | def read_image(file_name) -> np.ndarray: 11 | """ 12 | Read image from file_name 13 | Args: file_name: image file name 14 | Returns: image array 15 | Note: 16 | The different color bands/channels are stored in the third dimension, 17 | such that a gray-image is MxN, an RGB-image HxWx3 and an RGBA-image HxWx4. 18 | """ 19 | try: 20 | if not Path(file_name).is_file(): 21 | print(file_name + "Can not open file!") 22 | return None 23 | img = imread(file_name) 24 | return img 25 | except Exception as e: 26 | print("Error in read_image: " + str(e)) 27 | return None 28 | 29 | 30 | def save_image(img_arr, file_name) -> str: 31 | """ 32 | Save image to file_name 33 | Args: img_arr: image array 34 | Output: file_name: image file name 35 | """ 36 | if Path(file_name).is_file(): 37 | print(f"Overwrite existing file: {file_name}") 38 | imsave(file_name, img_arr) 39 | return file_name 40 | 41 | 42 | def count_files(folder_path): 43 | """ 44 | Count the number of files in the folder 45 | Args: folder_path: folder path 46 | Returns: number of files 47 | """ 48 | count = 0 49 | for path in Path(folder_path).iterdir(): 50 | if path.is_file(): 51 | count += 1 52 | return count 53 | 54 | 55 | def padding_image(img, stride) -> np.ndarray: 56 | """ 57 | Padding image to the size of multiple of stride 58 | Args: 59 | img: image array 60 | stride: stride 61 | Returns: 62 | padded image array 63 | """ 64 | 65 | if len(img.shape) == 2: 66 | img = img[:, :, np.newaxis] 67 | height = img.shape[0] 68 | width = img.shape[1] 69 | D = img.shape[2] # this one is for (H, W, C) format 70 | # get the minial padding image size 71 | H = int(np.ceil(height / stride) * stride) 72 | W = int(np.ceil(width / stride) * stride) 73 | 74 | padded_img = np.zeros([H, W, D], dtype=img.dtype) 75 | for d in range(D): # padding every layer 76 | onelayer = img[:, :, d] 77 | padded_img[:, :, d] = np.pad( 78 | onelayer, ((0, H - height), (0, W - width)), "reflect" 79 | ) 80 | padded_img = np.squeeze(padded_img) # Remove axes of length one 81 | return padded_img 82 | 83 | 84 | def split_image( 85 | img_path, save_path, crop_size, repetition_rate=0, overwrite=True 86 | ) -> int: 87 | """ 88 | Split image into tiles 89 | Args: 90 | img_path: image path 91 | save_path: save path 92 | crop_size: crop size 93 | repetition_rate: repetition rate 94 | overwrite: overwrite existing files 95 | Returns: 96 | number of tiles 97 | """ 98 | 99 | # check input image 100 | img = read_image(img_path) 101 | if img is None: 102 | return None 103 | # get image suffix 104 | ext = Path(img_path).suffix 105 | # check output folder, if not exists, creat it. 106 | Path(save_path).mkdir(parents=True, exist_ok=True) 107 | 108 | print(f"Input Image File Shape (H, W, D):{ img.shape}") 109 | 110 | stride = int(crop_size * (1 - repetition_rate)) 111 | print(f"crop_size = {crop_size}, stride = {stride}") 112 | 113 | padded_img = padding_image(img, stride) 114 | H = padded_img.shape[0] 115 | W = padded_img.shape[1] 116 | print(f"Padding Image File Shape (H, W, D):{ padded_img.shape}") 117 | 118 | if overwrite: 119 | new_name = 1 120 | else: 121 | cnt = count_files(save_path) 122 | new_name = cnt + 1 123 | print(f"There are {cnt} files in the {save_path}") 124 | print(f"New image name will start with {new_name}") 125 | 126 | n_rows = int((H - crop_size) / stride + 1) 127 | n_cols = int((W - crop_size) / stride + 1) 128 | 129 | def tile_generator(): 130 | for idh in range(n_rows): 131 | h = idh * stride 132 | for idw in range(n_cols): 133 | w = idw * stride 134 | yield h, w 135 | 136 | with tqdm( 137 | total=n_rows * n_cols, desc="Generating", colour="green", leave=True, unit="img" 138 | ) as pbar: 139 | if len(img.shape) == 2: 140 | for n, (h, w) in enumerate(tile_generator()): 141 | crop_img = padded_img[h : h + crop_size, w : w + crop_size] 142 | crop_image_name = f"{new_name:04d}{ext}" 143 | crop_image_path = Path(save_path) / crop_image_name 144 | save_image(crop_img, crop_image_path) 145 | new_name = new_name + 1 146 | pbar.update(1) 147 | else: 148 | for n, (h, w) in enumerate(tile_generator()): 149 | crop_img = padded_img[h : h + crop_size, w : w + crop_size, :] 150 | crop_image_name = f"{new_name:04d}{ext}" 151 | crop_image_path = Path(save_path) / crop_image_name 152 | save_image(crop_img, crop_image_path) 153 | new_name = new_name + 1 154 | pbar.update(1) 155 | 156 | return n + 1 157 | 158 | 159 | def random_crop_image( 160 | img_path, 161 | img_save_path, 162 | label_path, 163 | label_save_path, 164 | crop_size=256, 165 | crop_number=20, 166 | img_ext=".jpg", 167 | label_ext=".png", 168 | overwrite=True, 169 | ) -> int: 170 | """Generate Random cropped image pair from the input image pairs. 171 | 172 | Args: 173 | img_path (str): path of input image 174 | img_save_path (str): 175 | crop_size (int): image tile size (H,W), i.e., 256x256 176 | overwrite (bool, optional): [overwrite existing files]. Defaults to True. 177 | """ 178 | img = read_image(img_path) 179 | if img is None: 180 | print("Input image is missing") 181 | return None 182 | label = read_image(label_path) 183 | if label is None: 184 | print("Label image is missing") 185 | return None 186 | 187 | # check output folder, if not exists, create it. 188 | Path(img_save_path).mkdir(parents=True, exist_ok=True) 189 | Path(label_save_path).mkdir(parents=True, exist_ok=True) 190 | 191 | # get the file formats, if none, use the same format as the source file. 192 | if img_ext is None: 193 | img_ext = Path(img_path).suffix 194 | if label_ext is None: 195 | label_ext = Path(label_path).suffix 196 | 197 | # find the start name of the image paris. 198 | if overwrite: 199 | new_name = 1 200 | else: 201 | img_cnt = count_files(img_path) 202 | label_cnt = count_files(label_path) 203 | new_name = img_cnt + 1 204 | print(f"There are {img_cnt} files in the {img_save_path}") 205 | print(f"There are {label_cnt} files in the {label_save_path}") 206 | if not img_cnt == label_cnt: 207 | print("Image Pair doest not match in output folders.") 208 | return None 209 | print(f"New image pairs' name will start with {new_name}") 210 | 211 | crop_cnt = 0 212 | H = img.shape[0] 213 | W = img.shape[1] 214 | 215 | with tqdm( 216 | total=crop_number, desc="Generating", colour="green", leave=True, unit="img" 217 | ) as pbar: 218 | while crop_cnt < crop_number: 219 | # Crop img_crop, label_crop paris and save them to the output folders. 220 | UpperLeftX = random.randint(0, H - crop_size) 221 | UpperLeftY = random.randint(0, W - crop_size) 222 | if len(img.shape) == 2: 223 | imgCrop = img[ 224 | UpperLeftX : UpperLeftX + crop_size, 225 | UpperLeftY : UpperLeftY + crop_size, 226 | ] 227 | else: 228 | imgCrop = img[ 229 | UpperLeftX : UpperLeftX + crop_size, 230 | UpperLeftY : UpperLeftY + crop_size, 231 | :, 232 | ] 233 | if len(label.shape) == 2: 234 | labelCrop = label[ 235 | UpperLeftX : UpperLeftX + crop_size, 236 | UpperLeftY : UpperLeftY + crop_size, 237 | ] 238 | else: 239 | labelCrop = label[ 240 | UpperLeftX : UpperLeftX + crop_size, 241 | UpperLeftY : UpperLeftY + crop_size, 242 | :, 243 | ] 244 | # save image pairs 245 | crop_image_name = f"{new_name:04d}{img_ext}" 246 | crop_image_path = Path(img_save_path) / crop_image_name 247 | save_image(imgCrop, crop_image_path) 248 | 249 | crop_image_name = f"{new_name:04d}{label_ext}" 250 | crop_image_path = Path(label_save_path) / crop_image_name 251 | save_image(labelCrop, crop_image_path) 252 | 253 | new_name = new_name + 1 # update image name 254 | crop_cnt = crop_cnt + 1 # add crop count 255 | pbar.update(1) 256 | 257 | return crop_cnt # return total crop sample pair number. 258 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | # Test the Packages 2 | # Example A: 3 | def test_rgb_gt_slide_window() -> None: 4 | from splitraster import io 5 | 6 | # Step 1: set input image file path 7 | input_image_path = "./data/raw/RGB.png" 8 | gt_image_path = "./data/raw/GT.png" 9 | 10 | # Step 2: prepare output directory and splitting configuration 11 | input_save_path = "./data/processed/RGB" 12 | gt_save_path = "./data/processed/GT" 13 | 14 | crop_size = 256 15 | repetition_rate = 0 16 | overwrite = False 17 | 18 | # step 3: split the RGB images 19 | n = io.split_image( 20 | input_image_path, 21 | input_save_path, 22 | crop_size, 23 | repetition_rate=repetition_rate, 24 | overwrite=overwrite, 25 | ) 26 | print(f"{n} tiles sample of {input_image_path} are added at {input_save_path}") 27 | 28 | # step 4: split the GT images 29 | n = io.split_image( 30 | gt_image_path, 31 | gt_save_path, 32 | crop_size, 33 | repetition_rate=repetition_rate, 34 | overwrite=overwrite, 35 | ) 36 | print(f"{n} tiles sample of {gt_image_path} are added at {gt_save_path}") 37 | 38 | # Step 5: Use the RGB and GT folders for your deep learning model. 39 | 40 | 41 | # Example B 42 | def test_rgb_gt_random_crop(): 43 | from splitraster import io 44 | 45 | input_image_path = "./data/raw/RGB.png" 46 | gt_image_path = "./data/raw/GT.png" 47 | 48 | save_path = "./data/processed/Rand/RGB" 49 | save_path_gt = "./data/processed/Rand/GT" 50 | 51 | n = io.random_crop_image( 52 | input_image_path, 53 | save_path, 54 | gt_image_path, 55 | save_path_gt, 56 | crop_size=256, 57 | crop_number=20, 58 | img_ext=".png", 59 | label_ext=".png", 60 | overwrite=True, 61 | ) 62 | 63 | print( 64 | f"{n} sample paris of {input_image_path, gt_image_path} are added at {save_path, save_path_gt}" 65 | ) 66 | 67 | 68 | # # Example C 69 | # def test_tif_slide_window(): 70 | # from splitraster import geo 71 | 72 | # input_tif_image_path = "./data/raw/TIF/RGB5k.tif" 73 | # gt_tif_image_path = "./data/raw/TIF/GT5k.tif" 74 | 75 | # input_save_image_path = "./data/processed/RGB_TIF" 76 | # gt_save_image_path = "./data/processed/GT_TIF" 77 | 78 | # crop_size = 500 79 | # repetition_rate = 0 80 | # overwrite = True 81 | 82 | # n = geo.split_image( 83 | # input_tif_image_path, 84 | # input_save_image_path, 85 | # crop_size, 86 | # repetition_rate, 87 | # overwrite, 88 | # ) 89 | 90 | # print( 91 | # f"{n} tiles sample of {input_tif_image_path} are added at {input_save_image_path}" 92 | # ) 93 | 94 | # n = geo.split_image( 95 | # gt_tif_image_path, gt_save_image_path, crop_size, repetition_rate, overwrite 96 | # ) 97 | 98 | # print(f"{n} tiles sample of {gt_tif_image_path} are added at {gt_save_image_path}") 99 | 100 | 101 | # # Example D 102 | # def test_tif_random_sample(): 103 | # from splitraster import geo 104 | 105 | # input_tif_image_path = "./data/raw/TIF/RGB5k.tif" 106 | # gt_tif_image_path = "./data/raw/TIF/GT5k.tif" 107 | 108 | # input_save_image_path = "./data/processed/Rand/RGB_TIF" 109 | # gt_save_image_path = "./data/processed/Rand/GT_TIF" 110 | 111 | # n = geo.random_crop_image( 112 | # input_tif_image_path, 113 | # input_save_image_path, 114 | # gt_tif_image_path, 115 | # gt_save_image_path, 116 | # crop_size=500, 117 | # crop_number=20, 118 | # overwrite=True, 119 | # ) 120 | 121 | # print( 122 | # f"{n} sample paris of {input_tif_image_path, gt_tif_image_path} are added at {input_save_image_path, gt_save_image_path}." 123 | # ) 124 | 125 | 126 | print("PASS") 127 | -------------------------------------------------------------------------------- /test_environment.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | REQUIRED_PYTHON = "python3" 4 | 5 | 6 | def main(): 7 | system_major = sys.version_info.major 8 | if REQUIRED_PYTHON == "python": 9 | required_major = 2 10 | elif REQUIRED_PYTHON == "python3": 11 | required_major = 3 12 | else: 13 | raise ValueError("Unrecognized python interpreter: {}".format(REQUIRED_PYTHON)) 14 | 15 | if system_major != required_major: 16 | raise TypeError( 17 | "This project requires Python {}. Found: Python {}".format( 18 | required_major, sys.version 19 | ) 20 | ) 21 | else: 22 | print(">>> Development environment passes all tests!") 23 | 24 | 25 | if __name__ == "__main__": 26 | main() 27 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 79 3 | max-complexity = 10 4 | --------------------------------------------------------------------------------