├── .github ├── dependabot.yml └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CODE_OF_CONDUCT.md ├── LICENSE ├── Makefile ├── README.rst ├── README.template.rst ├── conftest.py ├── default.nix ├── docs ├── Makefile ├── make.bat └── source │ ├── _static │ └── custom.css │ ├── api_core.rst │ ├── api_parser.rst │ ├── api_reference.rst │ ├── changelog.rst │ ├── conf.py │ ├── contributing.rst │ ├── history.rst │ ├── howto.rst │ ├── index.rst │ ├── installation.rst │ ├── quickstarts.rst │ └── readme.rst ├── example.json ├── flake.lock ├── flake.nix ├── jsonpath ├── __init__.py ├── cli.py ├── core.py ├── grammar.lark ├── lark.py ├── parser.py ├── py.typed └── transformer.py ├── jsonpath_build.py ├── noxfile.py ├── pdm.lock ├── pyproject.toml ├── scripts ├── build_readme.py ├── export_requirements_txt.py ├── requirements │ ├── requirements-dev.txt │ ├── requirements-docs.txt │ ├── requirements-mini.txt │ └── requirements.txt └── watch_build_and_serve_html_docs.py ├── setup.cfg └── tests ├── __init__.py ├── test_cli.py ├── test_core.py ├── test_lark.py └── utils.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v1 13 | - name: Set up PDM 14 | uses: pdm-project/setup-pdm@v2.5 15 | with: 16 | python-version: 3.9 17 | version: 2.19.2 18 | - name: Build release 19 | run: | 20 | pdm build 21 | - name: Upload release 22 | uses: actions/upload-artifact@v4 23 | with: 24 | name: dist 25 | path: dist 26 | - name: Publish release to PYPI 27 | run: | 28 | pip install twine 29 | twine upload -u ${{ secrets.PYPI_USERNAME }} -p ${{ secrets.PYPI_PASSWORD }} --verbose dist/* 30 | - name: Publish release to GitHub Release 31 | uses: softprops/action-gh-release@v2 32 | with: 33 | files: dist/* 34 | env: 35 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 36 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Lint&Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - release/* 8 | pull_request: 9 | branches: 10 | - "*" 11 | 12 | jobs: 13 | lint: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v1 17 | - name: Set up PDM 18 | uses: pdm-project/setup-pdm@v3 19 | with: 20 | python-version: 3.13 21 | version: 2.19.2 22 | - name: Cache Nox Virtualenvs 23 | uses: actions/cache@v1 24 | with: 25 | path: .nox 26 | key: ${{ runner.os }}-nox-${{ hashFiles('**/pdm.lock') }} 27 | restore-keys: ${{ runner.os }}-nox 28 | - name: Install nox 29 | run: | 30 | pip install nox 31 | pdm config python.use_venv true 32 | - uses: pre-commit/action@v2.0.0 33 | env: 34 | SKIP: export_requirements_txt 35 | test: 36 | needs: lint 37 | runs-on: ubuntu-latest 38 | strategy: 39 | matrix: 40 | python-version: ["3.10", "3.11", "3.12", "3.13"] 41 | steps: 42 | - uses: actions/checkout@v1 43 | - name: Set up PDM 44 | uses: pdm-project/setup-pdm@v3 45 | with: 46 | python-version: ${{ matrix.python-version }} 47 | - name: Cache Nox Virtualenvs 48 | uses: actions/cache@v1 49 | with: 50 | path: .nox 51 | key: ${{ runner.os }}-${{ matrix.python-version }}-nox-${{ hashFiles('**/pdm.lock') }} 52 | restore-keys: ${{ runner.os }}-${{ matrix.python-version }}-nox 53 | - name: Install nox 54 | run: | 55 | pip install nox 56 | pdm config python.use_venv true 57 | - name: Test with coverage 58 | run: | 59 | make PYTHON=${{ matrix.python-version }} cov 60 | - name: Upload coverage to Codecov 61 | uses: codecov/codecov-action@v2 62 | with: 63 | token: ${{ secrets.CODECOV_TOKEN }} 64 | flags: main,unittest,${{ matrix.python-version }} 65 | fail_ci_if_error: false 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/python 3 | # Edit at https://www.gitignore.io/?templates=python 4 | 5 | ### Python ### 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | pip-wheel-metadata/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # celery beat schedule file 98 | celerybeat-schedule 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # Environments 104 | .env 105 | .venv 106 | env/ 107 | venv/ 108 | ENV/ 109 | env.bak/ 110 | venv.bak/ 111 | 112 | # Spyder project settings 113 | .spyderproject 114 | .spyproject 115 | 116 | # Rope project settings 117 | .ropeproject 118 | 119 | # mkdocs documentation 120 | /site 121 | 122 | # mypy 123 | .mypy_cache/ 124 | .dmypy.json 125 | dmypy.json 126 | 127 | # Pyre type checker 128 | .pyre/ 129 | 130 | # End of https://www.gitignore.io/api/python 131 | 132 | # Custom 133 | ## IDEA 134 | .vscode 135 | ## Codegen 136 | lark_parser.py 137 | ## Emacs Persp-mode 138 | .persp-confs 139 | ## PDM 140 | __pypackages__ 141 | .pdm.toml 142 | .dream2nix/ 143 | .pdm-python 144 | pdm.toml 145 | result 146 | .envrc 147 | .direnv 148 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/commitizen-tools/commitizen 3 | rev: v4.1.0 4 | hooks: 5 | - id: commitizen 6 | stages: 7 | - commit-msg 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: check-symlinks 12 | - id: check-toml 13 | - id: check-yaml 14 | args: [--unsafe] 15 | - id: detect-private-key 16 | - id: end-of-file-fixer 17 | - id: trailing-whitespace 18 | - id: check-added-large-files 19 | - id: mixed-line-ending 20 | args: [--fix=lf] 21 | - repo: https://github.com/pre-commit/pygrep-hooks 22 | rev: v1.10.0 23 | hooks: 24 | - id: python-check-blanket-noqa 25 | - id: python-check-mock-methods 26 | - id: python-no-eval 27 | - id: python-no-log-warn 28 | - id: python-use-type-annotations 29 | - id: rst-backticks 30 | - repo: https://github.com/psf/black 31 | rev: 24.10.0 32 | hooks: 33 | - id: black 34 | - repo: https://github.com/asottile/blacken-docs 35 | rev: 1.19.1 36 | hooks: 37 | - id: blacken-docs 38 | additional_dependencies: [black==23.3.*] 39 | - repo: https://github.com/PyCQA/flake8 40 | rev: 7.1.1 41 | hooks: 42 | - id: flake8 43 | additional_dependencies: ["flake8-bugbear==23.5.*"] 44 | - repo: https://github.com/pre-commit/mirrors-mypy 45 | rev: v1.14.0 46 | hooks: 47 | - id: mypy 48 | files: (jsonpath|tests)/.+\.py$ 49 | pass_filenames: false 50 | args: 51 | - jsonpath 52 | - tests 53 | - repo: https://github.com/pre-commit/mirrors-isort 54 | rev: v5.10.1 55 | hooks: 56 | - id: isort 57 | - repo: https://github.com/PyCQA/doc8 58 | rev: v1.1.2 59 | hooks: 60 | - id: doc8 61 | - repo: local 62 | hooks: 63 | - id: build_readme 64 | name: build_readme 65 | description: Build README.rst 66 | entry: nox -s build_readme 67 | language: system 68 | pass_filenames: false 69 | types: [rst] 70 | - id: export_requirements_txt 71 | name: export_requirements_txt 72 | description: create requirement file for python 73 | entry: python3 scripts/export_requirements_txt.py 74 | language: system 75 | files: pdm.lock 76 | pass_filenames: false 77 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.12" 7 | 8 | python: 9 | install: 10 | - requirements: ./scripts/requirements/requirements-docs.txt 11 | - path: . 12 | 13 | sphinx: 14 | builder: html 15 | configuration: docs/source/conf.py 16 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at linw1995@icloud.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 林玮 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | help: 2 | @echo "PYTHON=X.Y init setup development environemnt with specific Python version" 3 | @echo "init setup development environment with defualt Python version 3.11" 4 | @echo "update-dev update devepoment dependencies via pdm and via pre-commit" 5 | @echo "update update all dependencies via pdm and via pre-commit" 6 | @echo "pre-commit setup git hooks" 7 | @echo "check-all run code quality checkers" 8 | @echo "test run quick tests" 9 | @echo "vtest run quick tests with verbose" 10 | @echo "PYTHON=X.Y cov run tests with coverage and with specific Python version" 11 | @echo "cov run tests with coverage and with default Python version 3.11" 12 | @echo "serve-docs serve documents with live-reloading" 13 | 14 | EMPTY := 15 | SPACE := $(EMPTY) $(EMPTY) 16 | 17 | PYTHON = 3.13 18 | EXTRAS = parser 19 | DEV_EXTRAS = test docs build_readme 20 | EXTRAS_ARGS = $(if $(EXTRAS),-G,) $(subst $(SPACE),$(SPACE)-G$(SPACE),$(EXTRAS)) 21 | DEV_EXTRAS_ARGS = $(if $(DEV_EXTRAS),-G,) $(subst $(SPACE),$(SPACE)-G$(SPACE),$(DEV_EXTRAS)) 22 | 23 | # Environment setup 24 | init: 25 | @echo ">> installing $(if $(EXTRAS),\"$(EXTRAS)\" ,)dependencies by pdm" 26 | $(if $(PYTHON),pdm use -f $(PYTHON),) 27 | pdm info && pdm info --env 28 | pdm sync --no-editable -v $(EXTRAS_ARGS) $(DEV_EXTRAS_ARGS) 29 | pdm config -l python.use_venv true 30 | pdm config -l strategy.inherit_metadata true 31 | 32 | deinit: 33 | rm -rf .nox 34 | rm -rf __pypackages__ 35 | rm -rf .mypy_cache 36 | rm -rf htmlcov 37 | rm -rf .pytest_cache 38 | rm -rf *.egg-info 39 | 40 | 41 | update-dev: 42 | pdm update $(DEV_EXTRAS_ARGS) $(EXTRAS_ARGS) 43 | pre-commit autoupdate 44 | 45 | update: 46 | pdm update 47 | pre-commit autoupdate 48 | 49 | # Environment setup end 50 | 51 | pre-commit: 52 | pre-commit install --hook-type commit-msg --hook-type pre-commit --overwrite 53 | 54 | check-all: 55 | pre-commit run --all-files 56 | 57 | test: 58 | pdm run pytest -q -x --ff --nf 59 | 60 | vtest: 61 | pdm run pytest -vv -x --ff --nf 62 | 63 | cov: 64 | rm -rf .coverage 65 | nox -p $(PYTHON) -s coverage_test coverage_report -- $(TARGET) 66 | 67 | # Docs 68 | serve-docs: 69 | pdm run python scripts/watch_build_and_serve_html_docs.py 70 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | JSONPATH 3 | ======== 4 | 5 | |license| |Pypi Status| |Python version| |Package version| |PyPI - Downloads| 6 | |GitHub last commit| |Code style: black| |Build Status| |codecov| |PDM managed| 7 | 8 | A selector expression for extracting data from JSON. 9 | 10 | Quickstarts 11 | <<<<<<<<<<< 12 | 13 | 14 | Installation 15 | ~~~~~~~~~~~~ 16 | 17 | Install the stable version from PYPI. 18 | 19 | .. code-block:: shell 20 | 21 | pip install jsonpath-extractor 22 | 23 | Or install the latest version from Github. 24 | 25 | .. code-block:: shell 26 | 27 | pip install git+https://github.com/linw1995/jsonpath.git@master 28 | 29 | Usage 30 | ~~~~~ 31 | 32 | .. code-block:: json 33 | 34 | { 35 | "goods": [ 36 | {"price": 100, "category": "Comic book"}, 37 | {"price": 200, "category": "magazine"}, 38 | {"price": 200, "no category": ""} 39 | ], 40 | "targetCategory": "book" 41 | } 42 | 43 | 44 | How to parse and extract all the comic book data from the above JSON file. 45 | 46 | .. code-block:: python3 47 | 48 | import json 49 | 50 | from jsonpath import parse 51 | 52 | with open("example.json", "r") as f: 53 | data = json.load(f) 54 | 55 | assert parse("$.goods[contains(@.category, $.targetCategory)]").find(data) == [ 56 | {"price": 100, "category": "Comic book"} 57 | ] 58 | 59 | Or use the `jsonpath.core `_ module to extract it. 60 | 61 | .. code-block:: python3 62 | 63 | from jsonpath.core import Root, Contains, Self 64 | 65 | assert Root().Name("goods").Predicate( 66 | Contains(Self().Name("category"), Root().Name("targetCategory")) 67 | ).find(data) == [{"price": 100, "category": "Comic book"}] 68 | 69 | 70 | Usage via CLI 71 | ~~~~~~~~~~~~~ 72 | 73 | The faster way to extract by using CLI. 74 | 75 | .. code-block:: shell 76 | 77 | jp -f example.json "$.goods[contains(@.category, $.targetCategory)]" 78 | 79 | Or pass content by pipeline. 80 | 81 | .. code-block:: shell 82 | 83 | cat example.json | jp "$.goods[contains(@.category, $.targetCategory)]" 84 | 85 | The output of the above commands. 86 | 87 | .. code-block:: json 88 | 89 | [ 90 | { 91 | "price": 100, 92 | "category": "Comic book" 93 | } 94 | ] 95 | 96 | Changelog 97 | <<<<<<<<< 98 | 99 | v0.9.1 100 | ~~~~~~ 101 | 102 | Build 103 | ***** 104 | 105 | - Remove support for Python 3.8 106 | 107 | 108 | Contributing 109 | <<<<<<<<<<<< 110 | 111 | 112 | Environment Setup 113 | ~~~~~~~~~~~~~~~~~ 114 | 115 | Clone the source codes from Github. 116 | 117 | .. code-block:: shell 118 | 119 | git clone https://github.com/linw1995/jsonpath.git 120 | cd jsonpath 121 | 122 | Setup the development environment. 123 | Please make sure you install the pdm_, 124 | pre-commit_ and nox_ CLIs in your environment. 125 | 126 | .. code-block:: shell 127 | 128 | make init 129 | make PYTHON=3.8 init # for specific python version 130 | 131 | Linting 132 | ~~~~~~~ 133 | 134 | Use pre-commit_ for installing linters to ensure a good code style. 135 | 136 | .. code-block:: shell 137 | 138 | make pre-commit 139 | 140 | Run linters. Some linters run via CLI nox_, so make sure you install it. 141 | 142 | .. code-block:: shell 143 | 144 | make check-all 145 | 146 | Testing 147 | ~~~~~~~ 148 | 149 | Run quick tests. 150 | 151 | .. code-block:: shell 152 | 153 | make 154 | 155 | Run quick tests with verbose. 156 | 157 | .. code-block:: shell 158 | 159 | make vtest 160 | 161 | Run tests with coverage. 162 | Testing in multiple Python environments is powered by CLI nox_. 163 | 164 | .. code-block:: shell 165 | 166 | make cov 167 | 168 | Documentation 169 | ~~~~~~~~~~~~~ 170 | 171 | Run serving documents with live-reloading. 172 | 173 | .. code-block:: shell 174 | 175 | make serve-docs 176 | 177 | .. _pdm: https://github.com/pdm-project/pdm 178 | .. _pre-commit: https://pre-commit.com/ 179 | .. _nox: https://nox.thea.codes/en/stable/ 180 | 181 | .. |license| image:: https://img.shields.io/github/license/linw1995/jsonpath.svg 182 | :target: https://github.com/linw1995/jsonpath/blob/master/LICENSE 183 | 184 | .. |Pypi Status| image:: https://img.shields.io/pypi/status/jsonpath-extractor.svg 185 | :target: https://pypi.org/project/jsonpath-extractor 186 | 187 | .. |Python version| image:: https://img.shields.io/pypi/pyversions/jsonpath-extractor.svg 188 | :target: https://pypi.org/project/jsonpath-extractor 189 | 190 | .. |Package version| image:: https://img.shields.io/pypi/v/jsonpath-extractor.svg 191 | :target: https://pypi.org/project/jsonpath-extractor 192 | 193 | .. |PyPI - Downloads| image:: https://img.shields.io/pypi/dm/jsonpath-extractor.svg 194 | :target: https://pypi.org/project/jsonpath-extractor 195 | 196 | .. |GitHub last commit| image:: https://img.shields.io/github/last-commit/linw1995/jsonpath.svg 197 | :target: https://github.com/linw1995/jsonpath 198 | 199 | .. |Code style: black| image:: https://img.shields.io/badge/code%20style-black-000000.svg 200 | :target: https://github.com/ambv/black 201 | 202 | .. |Build Status| image:: https://github.com/linw1995/jsonpath/workflows/Lint&Test/badge.svg 203 | :target: https://github.com/linw1995/jsonpath/actions?query=workflow%3ALint%26Test 204 | 205 | .. |codecov| image:: https://codecov.io/gh/linw1995/jsonpath/branch/master/graph/badge.svg 206 | :target: https://codecov.io/gh/linw1995/jsonpath 207 | 208 | .. |PDM managed| image:: https://img.shields.io/badge/pdm-managed-blueviolet 209 | :target: https://pdm.fming.dev 210 | -------------------------------------------------------------------------------- /README.template.rst: -------------------------------------------------------------------------------- 1 | .. include:: docs/source/readme.rst 2 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | from pathlib import Path 3 | 4 | # Third Party Library 5 | import sybil 6 | 7 | from sybil.parsers.rest import DocTestParser, PythonCodeBlockParser 8 | 9 | ignore_collect = sum( 10 | ( 11 | [str(p.absolute()) for p in Path().glob(pattern)] 12 | for pattern in ["jsonpath/lark_parser.py"] 13 | ), 14 | [], 15 | ) 16 | 17 | 18 | def pytest_ignore_collect(path, config): 19 | """return True to prevent considering this path for collection. 20 | This hook is consulted for all files and directories prior to calling 21 | more specific hooks. 22 | """ 23 | # https://docs.pytest.org/en/5.4.3/reference.html?highlight=pytest_ignore_collect#_pytest.hookspec.pytest_ignore_collect 24 | # noqa: B950 25 | if str(path) in ignore_collect: 26 | return True 27 | 28 | return False 29 | 30 | 31 | pytest_collect_file = sybil.Sybil( 32 | parsers=[ 33 | DocTestParser(), 34 | PythonCodeBlockParser(), 35 | ], 36 | pattern="*.rst", 37 | fixtures=[], 38 | ).pytest() 39 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | { 2 | lib, 3 | dream2nix, 4 | ... 5 | }: { 6 | imports = [ 7 | dream2nix.modules.dream2nix.WIP-python-pdm 8 | ]; 9 | 10 | mkDerivation = { 11 | src = lib.cleanSourceWith { 12 | src = lib.cleanSource ./.; 13 | filter = name: type: 14 | !(builtins.any (x: x) [ 15 | (lib.hasSuffix ".nix" name) 16 | (lib.hasPrefix "." (builtins.baseNameOf name)) 17 | (lib.hasSuffix "flake.lock" name) 18 | ]); 19 | }; 20 | }; 21 | 22 | pdm.lockfile = ./pdm.lock; 23 | pdm.pyproject = ./pyproject.toml; 24 | 25 | buildPythonPackage = { 26 | pythonImportsCheck = [ 27 | "jsonpath" 28 | ]; 29 | }; 30 | 31 | pdm.editables = lib.mkForce {}; 32 | } 33 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | div.sphinxsidebar { 2 | width: 250px; 3 | } 4 | 5 | dl.class{ 6 | margin-bottom: 1rem; 7 | } 8 | 9 | dl.method, dl.field-list { 10 | margin-bottom: 0.5rem; 11 | } 12 | 13 | pre { 14 | padding: 7px 15px; 15 | } 16 | -------------------------------------------------------------------------------- /docs/source/api_core.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: jsonpath.core 2 | :members: 3 | :inherited-members: 4 | :show-inheritance: 5 | :exclude-members: mro, chain, find, find_first, find_iter, get_begin, get_next, get_expression 6 | 7 | .. autoexception:: jsonpath.core.JSONPathError 8 | :show-inheritance: 9 | 10 | .. autoexception:: jsonpath.core.JSONPathSyntaxError 11 | :show-inheritance: 12 | 13 | .. autoexception:: jsonpath.core.JSONPathUndefinedFunctionError 14 | :show-inheritance: 15 | 16 | .. autoexception:: jsonpath.core.JSONPathFindError 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /docs/source/api_parser.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: jsonpath.parser 2 | :members: 3 | :inherited-members: 4 | :show-inheritance: 5 | :exclude-members: INT 6 | -------------------------------------------------------------------------------- /docs/source/api_reference.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | API Reference 3 | ============= 4 | 5 | .. automodule:: jsonpath 6 | 7 | .. toctree:: 8 | :name: API Reference 9 | :maxdepth: 2 10 | 11 | api_core 12 | api_parser 13 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | v0.9.1 6 | ~~~~~~ 7 | 8 | Build 9 | ***** 10 | 11 | - Remove support for Python 3.8 12 | 13 | .. include:: history.rst 14 | :start-line: 4 15 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | # Standard Library 21 | from datetime import date 22 | 23 | project = "JSONPath-Extractor" 24 | year = date.today().year 25 | copyright = f"{year}, 林玮" 26 | author = "林玮" 27 | 28 | 29 | # -- General configuration --------------------------------------------------- 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = ["sphinx.ext.autodoc"] 35 | autodoc_inherit_docstrings = True 36 | 37 | # Add any paths that contain templates here, relative to this directory. 38 | templates_path = ["_templates"] 39 | 40 | # List of patterns, relative to source directory, that match files and 41 | # directories to ignore when looking for source files. 42 | # This pattern also affects html_static_path and html_extra_path. 43 | exclude_patterns = [] 44 | 45 | 46 | # -- Options for HTML output ------------------------------------------------- 47 | 48 | # The theme to use for HTML and HTML Help pages. See the documentation for 49 | # a list of builtin themes. 50 | # 51 | html_theme = "alabaster" 52 | html_theme_options = { 53 | "description": "A selector expression for extracting data from JSON.", 54 | "github_button": True, 55 | "github_type": "star", 56 | "travis_button": False, 57 | "codecov_button": True, 58 | "github_user": "linw1995", 59 | "github_repo": "jsonpath", 60 | "fixed_sidebar": False, 61 | "page_width": "1024px", 62 | "sidebar_width": "230px", 63 | } 64 | # Add any paths that contain custom static files (such as style sheets) here, 65 | # relative to this directory. They are copied after the builtin static files, 66 | # so a file named "default.css" will overwrite the builtin "default.css". 67 | html_static_path = ["_static"] 68 | html_sidebars = { 69 | "**": ["about.html", "navigation.html", "relations.html", "searchbox.html"] 70 | } 71 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Contributing 3 | ============ 4 | 5 | 6 | Environment Setup 7 | ~~~~~~~~~~~~~~~~~ 8 | 9 | Clone the source codes from Github. 10 | 11 | .. code-block:: shell 12 | 13 | git clone https://github.com/linw1995/jsonpath.git 14 | cd jsonpath 15 | 16 | Setup the development environment. 17 | Please make sure you install the pdm_, 18 | pre-commit_ and nox_ CLIs in your environment. 19 | 20 | .. code-block:: shell 21 | 22 | make init 23 | make PYTHON=3.8 init # for specific python version 24 | 25 | Linting 26 | ~~~~~~~ 27 | 28 | Use pre-commit_ for installing linters to ensure a good code style. 29 | 30 | .. code-block:: shell 31 | 32 | make pre-commit 33 | 34 | Run linters. Some linters run via CLI nox_, so make sure you install it. 35 | 36 | .. code-block:: shell 37 | 38 | make check-all 39 | 40 | Testing 41 | ~~~~~~~ 42 | 43 | Run quick tests. 44 | 45 | .. code-block:: shell 46 | 47 | make 48 | 49 | Run quick tests with verbose. 50 | 51 | .. code-block:: shell 52 | 53 | make vtest 54 | 55 | Run tests with coverage. 56 | Testing in multiple Python environments is powered by CLI nox_. 57 | 58 | .. code-block:: shell 59 | 60 | make cov 61 | 62 | Documentation 63 | ~~~~~~~~~~~~~ 64 | 65 | Run serving documents with live-reloading. 66 | 67 | .. code-block:: shell 68 | 69 | make serve-docs 70 | 71 | .. _pdm: https://github.com/pdm-project/pdm 72 | .. _pre-commit: https://pre-commit.com/ 73 | .. _nox: https://nox.thea.codes/en/stable/ 74 | -------------------------------------------------------------------------------- /docs/source/history.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | v0.9.0 6 | ~~~~~~ 7 | 8 | Build 9 | ***** 10 | 11 | - Remove support for Python 3.7 12 | 13 | v0.8.0 14 | ~~~~~~ 15 | 16 | Features 17 | ******** 18 | 19 | - 69ff6cb_ add iter_find and find_first methods 20 | - be22151_ better JSONPath object representations 21 | 22 | Refactor 23 | ******** 24 | 25 | - 9d9d78f_ raise AttributeError by object.__getattribute__ 26 | - 4191b8c_ not registers base class "Expr" for chaining invocations 27 | 28 | Build 29 | ***** 30 | 31 | - cc6ab56_ 2040721_ upgrade lark-parser package to latest 32 | - fb7e902_ fit with latest PDM 33 | - 10ea6d3_ excludes .mypy_cache for local build 34 | 35 | Fix 36 | *** 37 | 38 | - 1dccec1_ fix: right way to generate standalone parser 39 | 40 | .. _69ff6cb: https://github.com/linw1995/jsonpath/commit/69ff6cb47a08d3f957224adb163970454b6a1c87 41 | .. _be22151: https://github.com/linw1995/jsonpath/commit/be221513bd8a1821e8007eb1c2d4f10aa6d3f987 42 | .. _9d9d78f: https://github.com/linw1995/jsonpath/commit/9d9d78fd60b7b284c446c06e7102d05decd24c2b 43 | .. _4191b8c: https://github.com/linw1995/jsonpath/commit/4191b8c745871733e58e97be11cdbcd845870484 44 | .. _cc6ab56: https://github.com/linw1995/jsonpath/commit/cc6ab56 45 | .. _2040721: https://github.com/linw1995/jsonpath/commit/2040721 46 | .. _1dccec1: https://github.com/linw1995/jsonpath/commit/1dccec1 47 | .. _fb7e902: https://github.com/linw1995/jsonpath/commit/fb7e902 48 | .. _10ea6d3: https://github.com/linw1995/jsonpath/commit/10ea6d3 49 | 50 | v0.7.3 51 | ~~~~~~ 52 | 53 | - a4e3dee Chg:Refactoring 54 | - f46e87e Fix:Exports requirements.txt error 55 | - c085900 New:Supports Python3.9 56 | - 3f8b882 Fix:mypy error when using Python39 57 | - 3b1a40a Fix:Missing Python3.9 58 | - 53905c2 Chg:Update Brace class doc. 59 | - ad76217 Chg:Update Brace class doc. 60 | - c4d9538 Fix:Build document first while running 'make live_docs' 61 | - b12491e Fix,Dev:Must deactivate before using nox 62 | - 82ada7a Fix:build.py file contamination (fixes #26) 63 | 64 | v0.7.2 65 | ~~~~~~ 66 | 67 | - 5741909 New:Add a new context variable "parent" 68 | - dde3703 New:Add a helper function 'temporary_set' 69 | - 3336e86 New:Be able to get parent node while searching 70 | - 901354f Fix:Slice nested exprs find erorr 71 | - c0e82d6 Fix:Slice.find apply on list only 72 | 73 | v0.7.1 74 | ~~~~~~ 75 | 76 | - 7ba2b1c Fix:IndexError raised from method Array.find (#20) 77 | 78 | v0.7.0 79 | ~~~~~~ 80 | 81 | - 46cfd08 Fix,Dev:make init_by_poetry error when python(system) version is 82 | lower than 3.7 83 | - 19f981f Chg:Upgrade lark-parser 84 | - 982e344 New:Rewrite grammar to support more extensible jsonpath expression, 85 | e.g., more elegant comparison syntax. 86 | - 1803339 New:Slice supports to cooperate with JSONPath. 87 | 88 | v0.6.1 89 | ~~~~~~ 90 | 91 | - c79ef49 Fix:jsonpath/lark_parser.py file is missing in wheel file 92 | 93 | v0.6.0 94 | ~~~~~~ 95 | 96 | - 3fa0e29 Chg:Remove redundant code 97 | - 8e33efd Fix:Typo 98 | - d3552ac Fix:Release bad sdist. (closes #11) 99 | - e8eab43 New:Create CODE_OF_CONDUCT.md 100 | - 4d8dcd5 Chg:Better way to use codegen module 101 | - f85bd48 Chg:Raises AssertionError when the operator is not supported 102 | 103 | v0.5.1 104 | ~~~~~~ 105 | 106 | - 5d30a84 Fix,Dev,CI:Release stage error 107 | 108 | v0.5.0 109 | ~~~~~~ 110 | 111 | - 2971509 New:Add --ensure-ascii argument. (closes #9) 112 | - 1c6f602 New:Be able to use stand-alone parser. 113 | - c78505e Chg:Only release built distribution, wheel. (See #11) 114 | 115 | v0.4.0 116 | ~~~~~~ 117 | 118 | - 9f8f039 New:Add Command-line interface support 119 | 120 | v0.3.0 121 | ~~~~~~ 122 | 123 | - 98e6718 New:Add Predicate class 124 | 125 | v0.2.0 126 | ~~~~~~ 127 | 128 | - Chg:Use lark-parser to replace sly 129 | - New:Create docs by sphinx 130 | - New,Dev:Watch related files, 131 | build and serve Sphinx documentation automatically. 132 | - New,Dev:Test with doctest by pytest 133 | - New:Add .readthedocs.yaml for docs deployment 134 | 135 | v0.2.0-alpha.2 136 | ~~~~~~~~~~~~~~ 137 | 138 | - 2440951 Fix:Cannot release into PyPI 139 | 140 | v0.2.0-alpha.1 141 | ~~~~~~~~~~~~~~ 142 | 143 | - ea0aaff Chg,Dev:Allow to commit on master branch 144 | - bc42f61 Fix:Type annotation error 145 | 146 | v0.2.0-alpha 147 | ~~~~~~~~~~~~ 148 | 149 | - 1be3dbf New:Add scripts/export_requirements_txt.sh 150 | - 56d09bd Chg:Upgrade dependencies 151 | - ba5868c Chg:Update GitHub Actions config 152 | - 944fe7b New:Add caches action 153 | - 8625aeb New:Upload release built from actions 154 | - b882c38 Chg:Use lark-parser to replace sly 155 | - dad27f8 Fix,Dev:CI err because of poetry install git dep 156 | - 1fd8c41 Chg:Replace tab with space in grammar.lark 157 | - e1a73a4 Chg:more specific type annotation 158 | - 9dbbdfb Chg:Upgrade lark to 0.8.1 159 | - b62b848 Chg:Rafactoring for reducing non-neccessory code 160 | - b84fb93 Fix:Not raise JSONPath undefined function error explicitly 161 | - d9ff6f6 Chg:Use type.__new__ to overwrite expr's find method 162 | - 3b8d41d Chg:Refactoring for reducing the duplicated code 163 | - ce42257 New:Create docs by sphinx 164 | - bb31c2c Fix,Dev:lint docs error 165 | - b09ec5e New,Dev:Watch related files, 166 | build and serve Sphinx documentation automatically. 167 | - a078e8f Fix,Dev:Isort error 168 | - db56773 New,Dev:Test with doctest by pytest 169 | - 48ad21c Fix,Dev:shell function not inherits envs of parent process 170 | - 28a4fc0 Fix,Dev:lint error 171 | - a78fdf8 Fix,Dev:Live reload docs error 172 | due to .venv/bin/python not setting env-values 173 | - 2995f46 New,Doc:API reference 174 | - d918d80 Chg,Doc:Update quickstarts.rst 175 | - f18d92c New:Add .readthedocs.yaml for docs deployment 176 | - e6b7576 New,Doc:Translate :py:mod: directive into link 177 | 178 | v0.1.1 179 | ~~~~~~ 180 | 181 | - 35f0960 New:Add release actions for pypi and gh-release 182 | - ce022b6 New:Add codecov for code coverage report 183 | - 7f4fe3c Fix:The reduce/reduce conflicts 184 | - 258b0fa Fix:The shift/reduce conflicts 185 | - 95f088d New:Add Github Actions for CI 186 | -------------------------------------------------------------------------------- /docs/source/howto.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | HowTo 3 | ===== 4 | 5 | Check specific key in dictionary 6 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 7 | 8 | >>> from jsonpath import parse 9 | >>> expr = parse('$[contains(@, "a")]') 10 | >>> expr.find([{"a": 0}, {"a": 1}, {}, {"b": 1}]) 11 | [{'a': 0}, {'a': 1}] 12 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | ============================================== 2 | Welcome to JSONPath-Extractor's documentation! 3 | ============================================== 4 | 5 | .. include:: readme.rst 6 | :start-line: 4 7 | 8 | Contents 9 | <<<<<<<< 10 | 11 | .. toctree:: 12 | :maxdepth: 4 13 | 14 | quickstarts 15 | howto 16 | contributing 17 | api_reference 18 | changelog 19 | 20 | Indices and tables 21 | <<<<<<<<<<<<<<<<<< 22 | 23 | * :ref:`genindex` 24 | * :ref:`modindex` 25 | * :ref:`search` 26 | -------------------------------------------------------------------------------- /docs/source/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Install the stable version from PYPI. 6 | 7 | .. code-block:: shell 8 | 9 | pip install jsonpath-extractor 10 | 11 | Or install the latest version from Github. 12 | 13 | .. code-block:: shell 14 | 15 | pip install git+https://github.com/linw1995/jsonpath.git@master 16 | -------------------------------------------------------------------------------- /docs/source/quickstarts.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | Quickstarts 3 | =========== 4 | 5 | 6 | Installation 7 | ~~~~~~~~~~~~ 8 | 9 | .. include:: installation.rst 10 | :start-line: 4 11 | 12 | 13 | Usage 14 | ~~~~~ 15 | 16 | .. code-block:: json 17 | 18 | { 19 | "goods": [ 20 | {"price": 100, "category": "Comic book"}, 21 | {"price": 200, "category": "magazine"}, 22 | {"price": 200, "no category": ""} 23 | ], 24 | "targetCategory": "book" 25 | } 26 | 27 | 28 | How to parse and extract all the comic book data from the above JSON file. 29 | 30 | .. code-block:: python3 31 | 32 | import json 33 | 34 | from jsonpath import parse 35 | 36 | with open("example.json", "r") as f: 37 | data = json.load(f) 38 | 39 | assert parse("$.goods[contains(@.category, $.targetCategory)]").find(data) == [ 40 | {"price": 100, "category": "Comic book"} 41 | ] 42 | 43 | Or use the :py:mod:`jsonpath.core` module to extract it. 44 | 45 | .. code-block:: python3 46 | 47 | from jsonpath.core import Root, Contains, Self 48 | 49 | assert Root().Name("goods").Predicate( 50 | Contains(Self().Name("category"), Root().Name("targetCategory")) 51 | ).find(data) == [{"price": 100, "category": "Comic book"}] 52 | 53 | 54 | Usage via CLI 55 | ~~~~~~~~~~~~~ 56 | 57 | The faster way to extract by using CLI. 58 | 59 | .. code-block:: shell 60 | 61 | jp -f example.json "$.goods[contains(@.category, $.targetCategory)]" 62 | 63 | Or pass content by pipeline. 64 | 65 | .. code-block:: shell 66 | 67 | cat example.json | jp "$.goods[contains(@.category, $.targetCategory)]" 68 | 69 | The output of the above commands. 70 | 71 | .. code-block:: json 72 | 73 | [ 74 | { 75 | "price": 100, 76 | "category": "Comic book" 77 | } 78 | ] 79 | -------------------------------------------------------------------------------- /docs/source/readme.rst: -------------------------------------------------------------------------------- 1 | ======== 2 | JSONPATH 3 | ======== 4 | 5 | |license| |Pypi Status| |Python version| |Package version| |PyPI - Downloads| 6 | |GitHub last commit| |Code style: black| |Build Status| |codecov| |PDM managed| 7 | 8 | A selector expression for extracting data from JSON. 9 | 10 | Quickstarts 11 | <<<<<<<<<<< 12 | 13 | .. include:: quickstarts.rst 14 | :start-line: 4 15 | 16 | Changelog 17 | <<<<<<<<< 18 | 19 | .. include:: changelog.rst 20 | :start-line: 4 21 | :end-before: .. include:: history.rst 22 | 23 | 24 | Contributing 25 | <<<<<<<<<<<< 26 | 27 | .. include:: contributing.rst 28 | :start-line: 4 29 | 30 | .. |license| image:: https://img.shields.io/github/license/linw1995/jsonpath.svg 31 | :target: https://github.com/linw1995/jsonpath/blob/master/LICENSE 32 | 33 | .. |Pypi Status| image:: https://img.shields.io/pypi/status/jsonpath-extractor.svg 34 | :target: https://pypi.org/project/jsonpath-extractor 35 | 36 | .. |Python version| image:: https://img.shields.io/pypi/pyversions/jsonpath-extractor.svg 37 | :target: https://pypi.org/project/jsonpath-extractor 38 | 39 | .. |Package version| image:: https://img.shields.io/pypi/v/jsonpath-extractor.svg 40 | :target: https://pypi.org/project/jsonpath-extractor 41 | 42 | .. |PyPI - Downloads| image:: https://img.shields.io/pypi/dm/jsonpath-extractor.svg 43 | :target: https://pypi.org/project/jsonpath-extractor 44 | 45 | .. |GitHub last commit| image:: https://img.shields.io/github/last-commit/linw1995/jsonpath.svg 46 | :target: https://github.com/linw1995/jsonpath 47 | 48 | .. |Code style: black| image:: https://img.shields.io/badge/code%20style-black-000000.svg 49 | :target: https://github.com/ambv/black 50 | 51 | .. |Build Status| image:: https://github.com/linw1995/jsonpath/workflows/Lint&Test/badge.svg 52 | :target: https://github.com/linw1995/jsonpath/actions?query=workflow%3ALint%26Test 53 | 54 | .. |codecov| image:: https://codecov.io/gh/linw1995/jsonpath/branch/master/graph/badge.svg 55 | :target: https://codecov.io/gh/linw1995/jsonpath 56 | 57 | .. |PDM managed| image:: https://img.shields.io/badge/pdm-managed-blueviolet 58 | :target: https://pdm.fming.dev 59 | -------------------------------------------------------------------------------- /example.json: -------------------------------------------------------------------------------- 1 | { 2 | "goods": [ 3 | {"price": 100, "category": "Comic book"}, 4 | {"price": 200, "category": "magazine"}, 5 | {"price": 200, "no category": ""} 6 | ], 7 | "targetCategory": "book" 8 | } 9 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "dream2nix": { 4 | "inputs": { 5 | "nixpkgs": "nixpkgs", 6 | "purescript-overlay": "purescript-overlay", 7 | "pyproject-nix": "pyproject-nix" 8 | }, 9 | "locked": { 10 | "lastModified": 1734729217, 11 | "narHash": "sha256-UaBik0h7veLw+VqsK5EP2ucC68BEkHLDJkcfmY+wEuY=", 12 | "owner": "nix-community", 13 | "repo": "dream2nix", 14 | "rev": "98c1c2e934995a2c6ce740d4ff43ce0daa19b79f", 15 | "type": "github" 16 | }, 17 | "original": { 18 | "owner": "nix-community", 19 | "repo": "dream2nix", 20 | "type": "github" 21 | } 22 | }, 23 | "flake-compat": { 24 | "flake": false, 25 | "locked": { 26 | "lastModified": 1696426674, 27 | "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", 28 | "owner": "edolstra", 29 | "repo": "flake-compat", 30 | "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", 31 | "type": "github" 32 | }, 33 | "original": { 34 | "owner": "edolstra", 35 | "repo": "flake-compat", 36 | "type": "github" 37 | } 38 | }, 39 | "nixpkgs": { 40 | "locked": { 41 | "lastModified": 1729850857, 42 | "narHash": "sha256-WvLXzNNnnw+qpFOmgaM3JUlNEH+T4s22b5i2oyyCpXE=", 43 | "owner": "NixOS", 44 | "repo": "nixpkgs", 45 | "rev": "41dea55321e5a999b17033296ac05fe8a8b5a257", 46 | "type": "github" 47 | }, 48 | "original": { 49 | "owner": "NixOS", 50 | "ref": "nixpkgs-unstable", 51 | "repo": "nixpkgs", 52 | "type": "github" 53 | } 54 | }, 55 | "purescript-overlay": { 56 | "inputs": { 57 | "flake-compat": "flake-compat", 58 | "nixpkgs": [ 59 | "dream2nix", 60 | "nixpkgs" 61 | ], 62 | "slimlock": "slimlock" 63 | }, 64 | "locked": { 65 | "lastModified": 1728546539, 66 | "narHash": "sha256-Sws7w0tlnjD+Bjck1nv29NjC5DbL6nH5auL9Ex9Iz2A=", 67 | "owner": "thomashoneyman", 68 | "repo": "purescript-overlay", 69 | "rev": "4ad4c15d07bd899d7346b331f377606631eb0ee4", 70 | "type": "github" 71 | }, 72 | "original": { 73 | "owner": "thomashoneyman", 74 | "repo": "purescript-overlay", 75 | "type": "github" 76 | } 77 | }, 78 | "pyproject-nix": { 79 | "flake": false, 80 | "locked": { 81 | "lastModified": 1702448246, 82 | "narHash": "sha256-hFg5s/hoJFv7tDpiGvEvXP0UfFvFEDgTdyHIjDVHu1I=", 83 | "owner": "davhau", 84 | "repo": "pyproject.nix", 85 | "rev": "5a06a2697b228c04dd2f35659b4b659ca74f7aeb", 86 | "type": "github" 87 | }, 88 | "original": { 89 | "owner": "davhau", 90 | "ref": "dream2nix", 91 | "repo": "pyproject.nix", 92 | "type": "github" 93 | } 94 | }, 95 | "root": { 96 | "inputs": { 97 | "dream2nix": "dream2nix", 98 | "nixpkgs": [ 99 | "dream2nix", 100 | "nixpkgs" 101 | ] 102 | } 103 | }, 104 | "slimlock": { 105 | "inputs": { 106 | "nixpkgs": [ 107 | "dream2nix", 108 | "purescript-overlay", 109 | "nixpkgs" 110 | ] 111 | }, 112 | "locked": { 113 | "lastModified": 1688756706, 114 | "narHash": "sha256-xzkkMv3neJJJ89zo3o2ojp7nFeaZc2G0fYwNXNJRFlo=", 115 | "owner": "thomashoneyman", 116 | "repo": "slimlock", 117 | "rev": "cf72723f59e2340d24881fd7bf61cb113b4c407c", 118 | "type": "github" 119 | }, 120 | "original": { 121 | "owner": "thomashoneyman", 122 | "repo": "slimlock", 123 | "type": "github" 124 | } 125 | } 126 | }, 127 | "root": "root", 128 | "version": 7 129 | } 130 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | inputs = { 3 | dream2nix.url = "github:nix-community/dream2nix"; 4 | nixpkgs.follows = "dream2nix/nixpkgs"; 5 | }; 6 | 7 | outputs = { 8 | self, 9 | dream2nix, 10 | nixpkgs, 11 | }: let 12 | eachSystem = nixpkgs.lib.genAttrs [ 13 | "aarch64-darwin" 14 | "aarch64-linux" 15 | "x86_64-darwin" 16 | "x86_64-linux" 17 | ]; 18 | in { 19 | packages = eachSystem (system: let 20 | pkgs = import nixpkgs { 21 | inherit system; 22 | }; 23 | in { 24 | default = dream2nix.lib.evalModules { 25 | packageSets.nixpkgs = pkgs; 26 | modules = [ 27 | ./default.nix 28 | { 29 | paths.projectRoot = ./.; 30 | paths.projectRootFile = "flake.nix"; 31 | paths.package = ./.; 32 | } 33 | ]; 34 | }; 35 | }); 36 | devShells = eachSystem (system: let 37 | pkgs = import nixpkgs {inherit system;}; 38 | in { 39 | default = pkgs.mkShell { 40 | inputsFrom = [self.packages.${system}.default.devShell]; 41 | 42 | packages = with pkgs; [ 43 | pre-commit 44 | python3Packages.nox 45 | 46 | python310 47 | python311 48 | python312 49 | python313 50 | ]; 51 | }; 52 | }); 53 | }; 54 | } 55 | -------------------------------------------------------------------------------- /jsonpath/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | =============== 3 | :mod:`jsonpath` 4 | =============== 5 | A selector expression for extracting data from JSON. 6 | """ 7 | 8 | # Local Folder 9 | from .core import ( 10 | Array, 11 | Brace, 12 | Compare, 13 | Contains, 14 | Equal, 15 | Expr, 16 | ExprMeta, 17 | GreaterEqual, 18 | GreaterThan, 19 | JSONPathError, 20 | JSONPathFindError, 21 | JSONPathSyntaxError, 22 | Key, 23 | LessEqual, 24 | LessThan, 25 | Name, 26 | Not, 27 | NotEqual, 28 | Root, 29 | Search, 30 | Self, 31 | Slice, 32 | Value, 33 | ) 34 | from .parser import parse 35 | 36 | __all__ = ( 37 | "Array", 38 | "Contains", 39 | "Expr", 40 | "Slice", 41 | "ExprMeta", 42 | "Root", 43 | "Name", 44 | "parse", 45 | "Search", 46 | "Self", 47 | "Brace", 48 | "Compare", 49 | "LessThan", 50 | "LessEqual", 51 | "Equal", 52 | "GreaterEqual", 53 | "GreaterThan", 54 | "NotEqual", 55 | "Not", 56 | "Key", 57 | "Value", 58 | "JSONPathError", 59 | "JSONPathSyntaxError", 60 | "JSONPathFindError", 61 | ) 62 | -------------------------------------------------------------------------------- /jsonpath/cli.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import argparse 3 | import json 4 | import sys 5 | 6 | from pathlib import Path 7 | 8 | # Local Folder 9 | from .core import JSONPathError 10 | from .parser import parse 11 | 12 | 13 | def cli(args: argparse.Namespace) -> None: 14 | try: 15 | jp = parse(args.expression) 16 | except JSONPathError as exc: 17 | sys.exit(str(exc)) 18 | 19 | if args.file: 20 | file_path = Path(args.file) 21 | with file_path.open() as f: 22 | data = json.load(f) 23 | elif not sys.stdin.isatty(): 24 | data = json.load(sys.stdin) 25 | else: 26 | sys.exit("JSON file is needed.") 27 | 28 | json.dump(jp.find(data), sys.stdout, indent=2, ensure_ascii=args.ensure_ascii) 29 | sys.stdout.write("\n") 30 | 31 | 32 | def create_args_parser() -> argparse.ArgumentParser: 33 | args_parser = argparse.ArgumentParser() 34 | args_parser.add_argument("expression", help="JSONPath expression") 35 | args_parser.add_argument( 36 | "-f", 37 | "--file", 38 | help="JSON file need to be parsed and extracted by JSONPath expression", 39 | ) 40 | args_parser.add_argument( 41 | "--ensure-ascii", 42 | help="Escape all non-ASCII characters from extracting results", 43 | action="store_true", 44 | ) 45 | return args_parser 46 | 47 | 48 | def main() -> None: 49 | args_parser = create_args_parser() 50 | args = args_parser.parse_args() 51 | cli(args) 52 | 53 | 54 | if __name__ == "__main__": 55 | main() 56 | -------------------------------------------------------------------------------- /jsonpath/core.py: -------------------------------------------------------------------------------- 1 | """ 2 | ============================ 3 | :mod:`core` -- JSONPath Core 4 | ============================ 5 | """ 6 | 7 | # Standard Library 8 | import functools 9 | import json 10 | import weakref 11 | 12 | from abc import abstractmethod 13 | from contextlib import contextmanager, suppress 14 | from contextvars import ContextVar 15 | from typing import ( 16 | Any, 17 | Callable, 18 | Dict, 19 | Generator, 20 | Iterator, 21 | List, 22 | Optional, 23 | Tuple, 24 | Type, 25 | TypeVar, 26 | Union, 27 | ) 28 | from weakref import ReferenceType 29 | 30 | # Third Party Library 31 | from typing_extensions import Literal 32 | 33 | var_root: ContextVar[Any] = ContextVar("root") 34 | var_parent: ContextVar[Union[List[Any], Dict[str, Any]]] = ContextVar("parent") 35 | T_SELF_VALUE = Union[Tuple[int, Any], Tuple[str, Any]] 36 | var_self: ContextVar[T_SELF_VALUE] = ContextVar("self") 37 | var_finding: ContextVar[bool] = ContextVar("finding", default=False) 38 | T_VALUE = Union[int, float, str, Literal[None], Literal[True], Literal[False]] 39 | 40 | T = TypeVar("T", bound="Expr") 41 | 42 | 43 | class JSONPathError(Exception): 44 | """ 45 | JSONPath Base Exception. 46 | """ 47 | 48 | 49 | class JSONPathSyntaxError(JSONPathError, SyntaxError): 50 | """ 51 | JSONPath expression syntax error. 52 | 53 | :param expr: JSONPath expression 54 | :type expr: str 55 | """ 56 | 57 | def __init__(self, expr: str): 58 | self.expr = expr 59 | super().__init__(str(self)) 60 | 61 | def __str__(self) -> str: 62 | return f"{self.expr!r} is not a valid JSONPath expression." 63 | 64 | def __repr__(self) -> str: 65 | return f"{self.__class__.__name__}({self.expr!r})" 66 | 67 | 68 | class JSONPathUndefinedFunctionError(JSONPathError): 69 | """ 70 | Undefined Function in JSONPath expression error. 71 | """ 72 | 73 | 74 | class JSONPathFindError(JSONPathError): 75 | """ 76 | JSONPath executable object finds nothing. 77 | """ 78 | 79 | 80 | @contextmanager 81 | def temporary_set( 82 | context_var: ContextVar[Any], value: Any 83 | ) -> Generator[None, None, None]: 84 | """ 85 | Set the context variable temporarily via the 'with' statement. 86 | 87 | >>> var_boo = ContextVar("boo") 88 | >>> with temporary_set(var_boo, True): 89 | ... assert var_boo.get() is True 90 | >>> var_boo.get() 91 | Traceback (most recent call last): 92 | ... 93 | LookupError: ... 94 | """ 95 | token = context_var.set(value) 96 | try: 97 | yield 98 | finally: 99 | context_var.reset(token) 100 | 101 | 102 | def _dfs_find(expr: "Expr", elements: List[Any]) -> Generator[Any, None, None]: 103 | """ 104 | use DFS to find all target elements. 105 | the next expr finds in the result found by the current expr. 106 | """ 107 | next_expr = expr.get_next() 108 | for element in elements: 109 | try: 110 | found_elements = expr.find(element) 111 | except JSONPathFindError: 112 | continue 113 | 114 | if not found_elements: 115 | continue 116 | 117 | if next_expr is None: 118 | # collect all found elements if there is no next expr. 119 | yield from found_elements 120 | continue 121 | 122 | with temporary_set(var_parent, element): 123 | yield from _dfs_find(next_expr, found_elements) 124 | 125 | 126 | class ExprMeta(type): 127 | """ 128 | JSONPath Expr Meta Class. 129 | """ 130 | 131 | _classes: Dict[str, "ExprMeta"] = {} 132 | 133 | def __new__( 134 | metacls, name: str, bases: Tuple[type], attr_dict: Dict[str, Any] 135 | ) -> "ExprMeta": 136 | if "find" not in attr_dict: 137 | return _create_expr_cls(metacls, name, bases, attr_dict) 138 | 139 | actual_find = attr_dict["find"] 140 | 141 | @functools.wraps(actual_find) 142 | def find(self: "Expr", element: Any) -> List[Any]: 143 | if var_finding.get(): 144 | # the chained expr in the finding process 145 | try: 146 | return actual_find(self, element) 147 | except JSONPathFindError: 148 | if self.ref_begin is None: 149 | raise 150 | 151 | return [] 152 | 153 | return list(self.find_iter(element)) 154 | 155 | attr_dict["find"] = find 156 | return _create_expr_cls(metacls, name, bases, attr_dict) 157 | 158 | 159 | def _create_expr_cls( 160 | metacls: Type[ExprMeta], 161 | name: str, 162 | bases: Tuple[type], 163 | attr_dict: Dict[str, Any], 164 | ) -> ExprMeta: 165 | cls = type.__new__(metacls, name, bases, attr_dict) 166 | if name != "Expr": 167 | # not registers the base class 168 | metacls._classes[name] = cls 169 | 170 | return cls 171 | 172 | 173 | class Expr(metaclass=ExprMeta): 174 | """ 175 | JSONPath executable Class. 176 | 177 | .. automethod:: find 178 | .. automethod:: find_first 179 | .. automethod:: find_iter 180 | .. automethod:: get_expression 181 | .. automethod:: get_begin 182 | .. automethod:: get_next 183 | .. automethod:: chain 184 | .. automethod:: __getattr__ 185 | """ 186 | 187 | def __init__(self) -> None: 188 | self.left: Optional[Expr] = None 189 | self.ref_right: Optional[ReferenceType[Expr]] = None 190 | self.ref_begin: Optional[ReferenceType[Expr]] = None 191 | 192 | def __repr__(self) -> str: 193 | args = [self.get_expression(), self._get_partial_expression()] 194 | right = self.ref_right and self.ref_right() 195 | if right: 196 | args.append(right._get_partial_expression()) 197 | 198 | return f"JSONPath({', '.join(map(repr, args))})" 199 | 200 | def get_expression(self) -> str: 201 | """ 202 | Get full JSONPath expression. 203 | """ 204 | expr: Optional[Expr] = self.get_begin() 205 | parts: List[str] = [] 206 | while expr: 207 | part = expr._get_partial_expression() 208 | if isinstance(expr, (Array, Predicate, Search, Compare)): 209 | if parts: 210 | parts[-1] += part 211 | else: 212 | parts.append(part) 213 | else: 214 | parts.append(part) 215 | 216 | expr = expr.get_next() 217 | 218 | return ".".join(parts) 219 | 220 | @abstractmethod 221 | def _get_partial_expression(self) -> str: 222 | raise NotImplementedError 223 | 224 | @abstractmethod 225 | def find(self, element: Any) -> List[Any]: 226 | """ 227 | Find target data by the JSONPath expression. 228 | 229 | :param element: Root data where target data found from 230 | :type element: Any 231 | 232 | :returns: A list of target data 233 | :rtype: List[Any] 234 | """ 235 | raise NotImplementedError 236 | 237 | def find_first(self, element: Any) -> Any: 238 | """ 239 | Find first target data by the JSONPath expression. 240 | 241 | :param element: Root data where target data found from 242 | :type element: Any 243 | 244 | :returns: the first target data 245 | :rtype: Any 246 | :raises JSONPathFindError: Found nothing 247 | """ 248 | notfound = object() 249 | rv = next(self.find_iter(element), notfound) 250 | if rv is notfound: 251 | raise JSONPathFindError("Found nothing") 252 | 253 | return rv 254 | 255 | def find_iter(self, element: Any) -> Generator[Any, None, None]: 256 | """ 257 | Iterable find target data by the JSONPath expression. 258 | 259 | :param element: Root data where target data found from 260 | :type element: Any 261 | 262 | :returns: the generator of target data list 263 | :rtype: Generator[Any, None, None] 264 | """ 265 | # the chained expr begins to find 266 | begin = self.get_begin() 267 | 268 | token_root = None 269 | try: 270 | var_root.get() 271 | except LookupError: 272 | # set the root element when the chained expr begins to find. 273 | # the partial exprs of the nested expr 274 | # can execute find method many times 275 | # but only the first time finding can set the root element. 276 | token_root = var_root.set(element) 277 | 278 | try: 279 | with temporary_set(var_finding, True): 280 | yield from _dfs_find(begin, [element]) 281 | finally: 282 | if token_root: 283 | var_root.reset(token_root) 284 | 285 | def get_begin(self) -> "Expr": 286 | """ 287 | Get the begin expr of the combined expr. 288 | 289 | :returns: The begin expr of the combined expr. 290 | :rtype: :class:`jsonpath.core.Expr` 291 | """ 292 | if self.ref_begin is None: 293 | # the unchained expr's ref_begin is None 294 | return self 295 | else: 296 | begin = self.ref_begin() 297 | assert begin, "the chained expr must have a beginning expr" 298 | return begin 299 | 300 | def get_next(self) -> Optional["Expr"]: 301 | """ 302 | Get the next part of expr in the combined expr. 303 | 304 | :returns: The next part of expr in the combined expr. 305 | :rtype: :class:`jsonpath.core.Expr` 306 | """ 307 | return self.ref_right() if self.ref_right else None 308 | 309 | def chain(self, next_expr: T) -> T: 310 | """ 311 | Chain the next part of expr as a combined expr. 312 | 313 | :param next_expr: The next part of expr in the combined expr. 314 | :type next_expr: :class:`jsonpath.core.Expr` 315 | 316 | :returns: The next part of expr in the combined expr. 317 | :rtype: :class:`jsonpath.core.Expr` 318 | """ 319 | if self.ref_begin is None: 320 | # the unchained expr become the first expr in chain 321 | self.ref_begin = weakref.ref(self) 322 | 323 | next_expr.ref_begin = self.ref_begin 324 | # it return next expr, 325 | # so need to keep the current expr's ref into next expr's left. 326 | # keeping the next expr's weak ref into current expr's ref_right 327 | # helps to find target elements in sequential. 328 | next_expr.left = self 329 | self.ref_right = weakref.ref(next_expr) 330 | return next_expr 331 | 332 | def __getattr__(self, name: str) -> Callable[..., "Expr"]: 333 | """ 334 | Create combined expr in a serial of chain class creations 335 | like `Root().Name("*")`. 336 | 337 | :param name: The name of the next part expr in combined expr. 338 | :type name: str 339 | 340 | :returns: The function for creating the next part of the combined expr. 341 | :rtype: Callable[[...], :class:`Expr`] 342 | 343 | :raises AttributeError: The name of Expr Component not found 344 | """ 345 | if name not in Expr._classes: 346 | return super().__getattribute__(name) # for raising AttributeError 347 | 348 | cls = Expr._classes[name] 349 | 350 | def cls_(*args: Any, **kwargs: Any) -> Expr: 351 | expr = cls(*args, **kwargs) 352 | return self.chain(next_expr=expr) 353 | 354 | return cls_ 355 | 356 | def __lt__(self, value: Any) -> "Expr": 357 | return self.LessThan(value) 358 | 359 | def __le__(self, value: Any) -> "Expr": 360 | return self.LessEqual(value) 361 | 362 | def __eq__(self, value: Any) -> "Expr": # type: ignore 363 | return self.Equal(value) 364 | 365 | def __ge__(self, value: Any) -> "Expr": 366 | return self.GreaterEqual(value) 367 | 368 | def __gt__(self, value: Any) -> "Expr": 369 | return self.GreaterThan(value) 370 | 371 | def __ne__(self, value: Any) -> "Expr": # type: ignore 372 | return self.NotEqual(value) 373 | 374 | 375 | class Value(Expr): 376 | """ 377 | Represent the value in the expression. 378 | 379 | It is used mainly to support parsing comparison expression 380 | which value is on the left. 381 | 382 | >>> p = Value("boo"); print(p.get_expression()) 383 | "boo" 384 | >>> p.find([]) 385 | ['boo'] 386 | >>> print(Value(True).get_expression()) 387 | true 388 | >>> print(Value(False).get_expression()) 389 | false 390 | >>> print(Value(None).get_expression()) 391 | null 392 | >>> print(Value(1).get_expression()) 393 | 1 394 | >>> print(Value(1.1).get_expression()) 395 | 1.1 396 | >>> print(Value(1).LessThan(Value(2)).get_expression()) 397 | 1 < 2 398 | 399 | """ 400 | 401 | def __init__(self, value: T_VALUE) -> None: 402 | super().__init__() 403 | self.value = value 404 | 405 | def _get_partial_expression(self) -> str: 406 | return json.dumps(self.value) 407 | 408 | def find(self, element: Any) -> List[Any]: 409 | return [self.value] 410 | 411 | 412 | class Root(Expr): 413 | """ 414 | Represent the root of data. 415 | 416 | >>> p = Root(); print(p.get_expression()) 417 | $ 418 | >>> p.find([1]) 419 | [[1]] 420 | 421 | """ 422 | 423 | def _get_partial_expression(self) -> str: 424 | return "$" 425 | 426 | def find(self, element: Any) -> List[Any]: 427 | return [var_root.get()] 428 | 429 | 430 | class Name(Expr): 431 | """ 432 | Represent the data of the field name. 433 | Represent the data of all fields if not providing the field name. 434 | 435 | :param name: The field name of the data. 436 | :type name: Optional[str] 437 | 438 | >>> p = Name("abc"); print(p.get_expression()) 439 | abc 440 | >>> p.find({"abc": 1}) 441 | [1] 442 | >>> p = Name(); print(p.get_expression()) 443 | * 444 | >>> p.find({"a": 1, "b": 2}) 445 | [1, 2] 446 | >>> p = Name("a").Name("b"); print(p.get_expression()) 447 | a.b 448 | >>> p.find({"a": {"b": 1}}) 449 | [1] 450 | 451 | """ 452 | 453 | def __init__(self, name: Optional[str] = None) -> None: 454 | super().__init__() 455 | self.name = name 456 | 457 | def _get_partial_expression(self) -> str: 458 | if self.name is None: 459 | return "*" 460 | 461 | name = self.name 462 | if name in ("*", "$", "@"): 463 | name = repr(name) 464 | 465 | return name 466 | 467 | def find(self, element: Any) -> List[Any]: 468 | if not isinstance(element, dict): 469 | raise JSONPathFindError 470 | 471 | if self.name is None: 472 | return list(element.values()) 473 | 474 | if self.name not in element: 475 | raise JSONPathFindError 476 | 477 | return [element[self.name]] 478 | 479 | 480 | class Array(Expr): 481 | """ 482 | Represent the array data 483 | if combine with expr (e.g., :class:`Name`, :class:`Root`) as the next part. 484 | 485 | Use an array index to get the item of the array. 486 | 487 | >>> p = Root().Array(0); print(p.get_expression()) 488 | $[0] 489 | >>> p.find([1, 2]) 490 | [1] 491 | 492 | Also can use a :class:`Slice` to get partial items of the array. 493 | 494 | >>> p = Root().Array(Slice(0, 3, 2)); print(p.get_expression()) 495 | $[:3:2] 496 | >>> p.find([1, 2, 3, 4]) 497 | [1, 3] 498 | 499 | Accept :data:`None` to get all items of the array. 500 | 501 | >>> p = Root().Array(); print(p.get_expression()) 502 | $[*] 503 | >>> p.find([1, 2, 3, 4]) 504 | [1, 2, 3, 4] 505 | 506 | """ 507 | 508 | def __init__(self, idx: Optional[Union[int, "Slice"]] = None) -> None: 509 | super().__init__() 510 | assert idx is None or isinstance(idx, (int, Slice)), ( 511 | '"idx" parameter must be an instance of the "int" or "Slice" class,' 512 | ' or "None" value' 513 | ) 514 | self.idx = idx 515 | 516 | def _get_partial_expression(self) -> str: 517 | if self.idx is None: 518 | return "[*]" 519 | else: 520 | idx_str = ( 521 | self.idx.get_expression() if isinstance(self.idx, Expr) else self.idx 522 | ) 523 | return f"[{idx_str}]" 524 | 525 | def find(self, element: Any) -> List[Any]: 526 | if isinstance(element, list): 527 | if self.idx is None: 528 | return element 529 | elif isinstance(self.idx, int): 530 | with suppress(IndexError): 531 | return [element[self.idx]] 532 | elif isinstance(self.idx, Slice): 533 | return self.idx.find(element) 534 | else: 535 | raise AssertionError(f"self.idx={self.idx!r} is not valid") 536 | 537 | raise JSONPathFindError 538 | 539 | 540 | class Predicate(Expr): 541 | """ 542 | Filter items from the array by expr(e.g., :class:`Compare`) 543 | if combine with expr (e.g., :class:`Name`, :class:`Root`) as the next part. 544 | It is also able to process dictionary. 545 | 546 | Accept comparison expr for filtering. 547 | See more in :class:`Compare`. 548 | 549 | >>> p = Root().Predicate(Name("a") == 1); print(p.get_expression()) 550 | $[a = 1] 551 | >>> p.find([{"a": 1}, {"a": 2}, {}]) 552 | [{'a': 1}] 553 | >>> p = Root().Predicate(Contains(Key(), "a")); print(p.get_expression()) 554 | $[contains(key(), "a")] 555 | >>> p.find({"a": 1, "ab": 2, "c": 3}) 556 | [1, 2] 557 | 558 | Or accept single expr for filtering. 559 | 560 | >>> p = Root().Predicate(Name("a")); print(p.get_expression()) 561 | $[a] 562 | >>> p.find([{"a": 0}, {"a": 1}, {}]) 563 | [{'a': 1}] 564 | """ 565 | 566 | def __init__(self, expr: Union["Compare", Expr]) -> None: 567 | super().__init__() 568 | assert isinstance( 569 | expr, Expr 570 | ), '"expr" parameter must be an instance of the "Expr" class.' 571 | self.expr = expr 572 | 573 | def _get_partial_expression(self) -> str: 574 | return f"[{self.expr.get_expression()}]" 575 | 576 | def find(self, element: Union[List[Any], Dict[str, Any]]) -> List[Any]: 577 | filtered_items = [] 578 | items: Union[Iterator[tuple[str, Any]], Iterator[tuple[int, Any]]] 579 | if isinstance(element, list): 580 | items = iter(enumerate(element)) 581 | elif isinstance(element, dict): 582 | items = iter(element.items()) 583 | else: 584 | raise JSONPathFindError 585 | 586 | for item in items: 587 | # save the current item into var_self for Self() 588 | with temporary_set(var_self, item): 589 | _, value = item 590 | # set var_finding False to 591 | # start new finding process for the nested expr: self.idx 592 | with temporary_set(var_finding, False): 593 | rv = self.expr.find(value) 594 | 595 | if rv and rv[0]: 596 | filtered_items.append(value) 597 | 598 | return filtered_items 599 | 600 | 601 | class Slice(Expr): 602 | """ 603 | Use it with :class:`Array` to get partial items from the array data. 604 | Work like the `Python slice(range)`_. 605 | 606 | .. _Python slice(range): https://docs.python.org/3/library/stdtypes.html#ranges 607 | """ 608 | 609 | def __init__( 610 | self, 611 | start: Union[Expr, int, None] = None, 612 | stop: Union[Expr, int, None] = None, 613 | step: Union[Expr, int, None] = None, 614 | ) -> None: 615 | super().__init__() 616 | self.start = start 617 | self.end = stop 618 | self.step = step 619 | 620 | def _get_partial_expression(self) -> str: 621 | parts = [] 622 | if self.start: 623 | parts.append( 624 | self.start.get_expression() 625 | if isinstance(self.start, Expr) 626 | else str(self.start) 627 | ) 628 | else: 629 | parts.append("") 630 | 631 | if self.end: 632 | parts.append( 633 | self.end.get_expression() 634 | if isinstance(self.end, Expr) 635 | else str(self.end) 636 | ) 637 | else: 638 | parts.append("") 639 | 640 | if self.step: 641 | parts.append( 642 | self.step.get_expression() 643 | if isinstance(self.step, Expr) 644 | else str(self.step) 645 | ) 646 | 647 | return ":".join(parts) 648 | 649 | def _ensure_int_or_none(self, value: Union[Expr, int, None]) -> Union[int, None]: 650 | if isinstance(value, Expr): 651 | # set var_finding False to start new finding process for the nested expr 652 | with temporary_set(var_finding, False): 653 | found_elements = value.find(var_parent.get()) 654 | if not found_elements or not isinstance(found_elements[0], int): 655 | raise JSONPathFindError 656 | return found_elements[0] 657 | else: 658 | return value 659 | 660 | def find(self, element: List[Any]) -> List[Any]: 661 | assert isinstance(element, list), "Slice.find apply on list only." 662 | 663 | start = self._ensure_int_or_none(self.start) 664 | end = self._ensure_int_or_none(self.end) 665 | step = self._ensure_int_or_none(self.step) 666 | 667 | if start is None: 668 | start = 0 669 | if end is None: 670 | end = len(element) 671 | if step is None: 672 | step = 1 673 | 674 | return element[start:end:step] 675 | 676 | 677 | class Brace(Expr): 678 | """ 679 | Brace groups part of expression, 680 | uses sub-expression to find the target data, 681 | and wraps the found result as an array. 682 | 683 | >>> p = Root().Array().Name("a"); print(p.get_expression()) 684 | $[*].a 685 | >>> p.find([{"a": 1}]) 686 | [1] 687 | 688 | >>> p = Brace(p); print(p.get_expression()) 689 | ($[*].a) 690 | >>> p.find([{"a": 1}]) 691 | [[1]] 692 | 693 | It seems to be useless but makes chaining filtering become possible. 694 | The expressions like `"$[@ < 100][@ >= 50]"` can not perform chaining filtering. 695 | Because the Predicate (and Array) class always unpacks the found elements to 696 | avoid the found result looking like `[[[[[[[...]]]]]]]`. 697 | So the right way to do chaining filter is that it should use with Brace class. 698 | 699 | >>> p = Brace(Root().Predicate(Self() < 100)).Predicate(Self() >= 50) 700 | >>> print(p.get_expression()) 701 | ($[@ < 100])[@ >= 50] 702 | >>> p.find([100, 99, 50, 1]) 703 | [99, 50] 704 | 705 | Generally, we will use And expression do that. e.g. `"$[@ < 100 and @ >= 50]"` 706 | 707 | >>> p = Brace( 708 | ... Root().Array().Name("a") 709 | ... ).Predicate(Self() == 1) 710 | >>> print(p.get_expression()) 711 | ($[*].a)[@ = 1] 712 | >>> p.find([{"a": 1}, {"a": 2}, {"a": 1}, {}]) 713 | [1, 1] 714 | 715 | """ 716 | 717 | def __init__(self, expr: Expr) -> None: 718 | super().__init__() 719 | assert isinstance( 720 | expr, Expr 721 | ), '"expr" parameter must be an instance of the "Expr" class.' 722 | self._expr = expr 723 | 724 | def _get_partial_expression(self) -> str: 725 | return f"({self._expr.get_expression()})" 726 | 727 | def find(self, element: Any) -> List[Any]: 728 | # set var_finding False to 729 | # start new finding process for the nested expr: self.expr 730 | with temporary_set(var_finding, False): 731 | return [self._expr.find(element)] 732 | 733 | 734 | def _recursive_find(expr: Expr, element: Any, rv: List[Any]) -> None: 735 | """ 736 | recursive find in every node. 737 | """ 738 | try: 739 | find_rv = expr.find(element) 740 | rv.extend(find_rv) 741 | except JSONPathFindError: 742 | pass 743 | 744 | with temporary_set(var_parent, element): 745 | if isinstance(element, list): 746 | for item in element: 747 | _recursive_find(expr, item, rv) 748 | elif isinstance(element, dict): 749 | for item in element.values(): 750 | _recursive_find(expr, item, rv) 751 | 752 | 753 | class Search(Expr): 754 | """ 755 | Recursively search target in data. 756 | 757 | :param expr: The expr is used to search in data recursively. 758 | :type expr: :class:`Expr` 759 | 760 | >>> p = Root().Search(Name("a")); print(p.get_expression()) 761 | $..a 762 | >>> p.find({"a":{"a": 0}}) 763 | [{'a': 0}, 0] 764 | 765 | """ 766 | 767 | def __init__(self, expr: Expr) -> None: 768 | super().__init__() 769 | assert isinstance( 770 | expr, Expr 771 | ), '"expr" parameter must be an instance of the "Expr" class.' 772 | # TODO: Not accepts mixed expr 773 | self._expr = expr 774 | 775 | def _get_partial_expression(self) -> str: 776 | return f"..{self._expr.get_expression()}" 777 | 778 | def find(self, element: Any) -> List[Any]: 779 | rv: List[Any] = [] 780 | if isinstance(self._expr, Predicate): 781 | # filtering find needs to begin on the current element 782 | _recursive_find(self._expr, [element], rv) 783 | else: 784 | _recursive_find(self._expr, element, rv) 785 | return rv 786 | 787 | 788 | class Self(Expr): 789 | """ 790 | Represent each item of the array data. 791 | 792 | >>> p = Root().Predicate(Self()==1); print(p.get_expression()) 793 | $[@ = 1] 794 | >>> p.find([1, 2, 1]) 795 | [1, 1] 796 | 797 | """ 798 | 799 | def _get_partial_expression(self) -> str: 800 | return "@" 801 | 802 | def find(self, element: Any) -> List[Any]: 803 | try: 804 | _, value = var_self.get() 805 | return [value] 806 | except LookupError: 807 | return [element] 808 | 809 | 810 | class Compare(Expr): 811 | """ 812 | Base class of comparison operators. 813 | 814 | Compare value between the first result of an expression, 815 | and the first result of an expression or simple value. 816 | 817 | >>> Root().Predicate(Self() == 1) 818 | JSONPath('$[@ = 1]', '[@ = 1]') 819 | >>> Root().Predicate(Self().Equal(1)) 820 | JSONPath('$[@ = 1]', '[@ = 1]') 821 | >>> Root().Predicate(Self() <= 1) 822 | JSONPath('$[@ <= 1]', '[@ <= 1]') 823 | >>> ( 824 | ... Root() 825 | ... .Name("data") 826 | ... .Predicate( 827 | ... Self() != Root().Name("target") 828 | ... ) 829 | ... ) 830 | JSONPath('$.data[@ != $.target]', '[@ != $.target]') 831 | 832 | """ 833 | 834 | def __init__(self, target: Any) -> None: 835 | super().__init__() 836 | self.target = target 837 | 838 | def _get_target_expression(self) -> str: 839 | if isinstance(self.target, Expr): 840 | return self.target.get_expression() 841 | else: 842 | return json.dumps(self.target) 843 | 844 | def get_target_value(self) -> Any: 845 | if isinstance(self.target, Expr): 846 | # set var_finding False to 847 | # start new finding process for the nested expr: self.target 848 | with temporary_set(var_finding, False): 849 | # multiple exprs begins on self-value in filtering find, 850 | # except the self.target expr starts with root-value. 851 | _, value = var_self.get() 852 | rv = self.target.find(value) 853 | if not rv: 854 | raise JSONPathFindError 855 | 856 | return rv[0] 857 | else: 858 | return self.target 859 | 860 | 861 | class LessThan(Compare): 862 | def _get_partial_expression(self) -> str: 863 | return f" < {self._get_target_expression()}" 864 | 865 | def find(self, element: Any) -> List[bool]: 866 | return [element < self.get_target_value()] 867 | 868 | 869 | class LessEqual(Compare): 870 | def _get_partial_expression(self) -> str: 871 | return f" <= {self._get_target_expression()}" 872 | 873 | def find(self, element: Any) -> List[bool]: 874 | return [element <= self.get_target_value()] 875 | 876 | 877 | class Equal(Compare): 878 | def _get_partial_expression(self) -> str: 879 | return f" = {self._get_target_expression()}" 880 | 881 | def find(self, element: Any) -> List[bool]: 882 | return [element == self.get_target_value()] 883 | 884 | 885 | class GreaterEqual(Compare): 886 | def _get_partial_expression(self) -> str: 887 | return f" >= {self._get_target_expression()}" 888 | 889 | def find(self, element: Any) -> List[bool]: 890 | return [element >= self.get_target_value()] 891 | 892 | 893 | class GreaterThan(Compare): 894 | def _get_partial_expression(self) -> str: 895 | return f" > {self._get_target_expression()}" 896 | 897 | def find(self, element: Any) -> List[bool]: 898 | return [element > self.get_target_value()] 899 | 900 | 901 | class NotEqual(Compare): 902 | def _get_partial_expression(self) -> str: 903 | return f" != {self._get_target_expression()}" 904 | 905 | def find(self, element: Any) -> List[bool]: 906 | return [element != self.get_target_value()] 907 | 908 | 909 | class And(Compare): 910 | """ 911 | And, a boolean operator. 912 | 913 | """ 914 | 915 | def _get_partial_expression(self) -> str: 916 | return f" and {self._get_target_expression()}" 917 | 918 | def find(self, element: Any) -> List[bool]: 919 | return [element and self.get_target_value()] 920 | 921 | 922 | class Or(Compare): 923 | """ 924 | Or, a boolean operator. 925 | 926 | """ 927 | 928 | def _get_partial_expression(self) -> str: 929 | return f" or {self._get_target_expression()}" 930 | 931 | def find(self, element: Any) -> List[bool]: 932 | return [element or self.get_target_value()] 933 | 934 | 935 | def _get_expression(target: Any) -> str: 936 | if isinstance(target, Expr): 937 | return target.get_expression() 938 | else: 939 | return json.dumps(target) 940 | 941 | 942 | class Function(Expr): 943 | """ 944 | Base class of functions. 945 | """ 946 | 947 | def __init__(self, *args: Any) -> None: 948 | super().__init__() 949 | self.args = args 950 | 951 | @abstractmethod 952 | def find(self, element: Any) -> List[Any]: 953 | raise NotImplementedError 954 | 955 | 956 | class Key(Function): 957 | """ 958 | Key function is used to get the field name from dictionary data. 959 | 960 | >>> Root().Predicate(Key() == "a") 961 | JSONPath('$[key() = "a"]', '[key() = "a"]') 962 | 963 | Same as :data:`Root().Name("a")`. 964 | 965 | Filter all values which field name contains :data:`"book"`. 966 | 967 | >>> p = Root().Predicate(Contains(Key(), "book")) 968 | >>> print(p.get_expression()) 969 | $[contains(key(), "book")] 970 | >>> p.find({"book 1": 1, "picture 2": 2}) 971 | [1] 972 | 973 | """ 974 | 975 | def __init__(self, *args: List[Any]) -> None: 976 | super().__init__(*args) 977 | assert not self.args 978 | 979 | def _get_partial_expression(self) -> str: 980 | return "key()" 981 | 982 | def find(self, element: Any) -> List[Union[int, str]]: 983 | # Key.find only executed in the predicate. 984 | # So Array.find being executed first that set the var_self 985 | key, _ = var_self.get() 986 | return [key] 987 | 988 | 989 | class Contains(Function): 990 | """ 991 | Determine the first result of expression contains the target substring. 992 | 993 | >>> p = Root().Predicate(Contains(Name("name"), "red")) 994 | >>> print(p.get_expression()) 995 | $[contains(name, "red")] 996 | >>> p.find([ 997 | ... {"name": "red book"}, 998 | ... {"name": "red pen"}, 999 | ... {"name": "green book"} 1000 | ... ]) 1001 | [{'name': 'red book'}, {'name': 'red pen'}] 1002 | 1003 | Check the specific key in the dictionary. 1004 | 1005 | >>> p = Root().Predicate(Contains(Self(), "a")) 1006 | >>> print(p.get_expression()) 1007 | $[contains(@, "a")] 1008 | >>> p.find([{"a": 0}, {"a": 1}, {}, {"b": 1}]) 1009 | [{'a': 0}, {'a': 1}] 1010 | 1011 | """ 1012 | 1013 | def __init__(self, expr: Expr, target: Any, *args: List[Any]) -> None: 1014 | super().__init__(expr, target, *args) 1015 | assert isinstance( 1016 | expr, Expr 1017 | ), '"expr" parameter must be an instance of the "Expr" class.' 1018 | assert not args 1019 | self._expr = expr 1020 | self._target = target 1021 | 1022 | def _get_partial_expression(self) -> str: 1023 | args_list = f"{_get_expression(self._expr)}, {_get_expression(self._target)}" 1024 | return f"contains({args_list})" 1025 | 1026 | def find(self, element: Any) -> List[bool]: 1027 | rv = self._expr.find(element) 1028 | if not rv: 1029 | return [] 1030 | root_arg = rv[0] 1031 | target_arg = self._target 1032 | if isinstance(target_arg, Expr): 1033 | # set var_finding False to 1034 | # start new finding process for the nested expr: target_arg 1035 | with temporary_set(var_finding, False): 1036 | rv = self._target.find(element) 1037 | 1038 | if not rv: 1039 | return [] 1040 | 1041 | # use the first value of results as target 1042 | target_arg = rv[0] 1043 | 1044 | return [target_arg in root_arg] 1045 | 1046 | 1047 | class Not(Function): 1048 | """ 1049 | Not, a boolean operator. 1050 | 1051 | >>> Root().Predicate(Not(Name("enable"))) 1052 | JSONPath('$[not(enable)]', '[not(enable)]') 1053 | 1054 | """ 1055 | 1056 | def __init__(self, expr: Expr, *args: List[Any]) -> None: 1057 | super().__init__(expr, *args) 1058 | assert not args 1059 | assert isinstance( 1060 | expr, Expr 1061 | ), '"expr" parameter must be an instance of the "Expr" class.' 1062 | self._expr = expr 1063 | 1064 | def _get_partial_expression(self) -> str: 1065 | return f"not({self._expr.get_expression()})" 1066 | 1067 | def find(self, element: Any) -> List[bool]: 1068 | # set var_finding False to 1069 | # start new finding process for the nested expr: target 1070 | with temporary_set(var_finding, False): 1071 | rv = self._expr.find(element) 1072 | 1073 | return [not v for v in rv] 1074 | 1075 | 1076 | __all__ = ( 1077 | "And", 1078 | "Array", 1079 | "Brace", 1080 | "Compare", 1081 | "Contains", 1082 | "Equal", 1083 | "Expr", 1084 | "ExprMeta", 1085 | "GreaterEqual", 1086 | "GreaterThan", 1087 | "Key", 1088 | "LessEqual", 1089 | "LessThan", 1090 | "Name", 1091 | "Not", 1092 | "NotEqual", 1093 | "Or", 1094 | "Predicate", 1095 | "Root", 1096 | "Search", 1097 | "Self", 1098 | "Slice", 1099 | "Value", 1100 | ) 1101 | -------------------------------------------------------------------------------- /jsonpath/grammar.lark: -------------------------------------------------------------------------------- 1 | start: path 2 | | func_call 3 | 4 | COLON: ":" 5 | slice: [expr] COLON [expr] COLON [expr] -> three_fields_slice 6 | | [expr] COLON [expr] -> two_fields_slice 7 | 8 | true: "true" 9 | false: "false" 10 | null: "null" 11 | STRING: /`([^`\\]|\\.)*?`|'([^'\\]|\\.)*?'|"([^"\\]|\\.)*?"/i 12 | 13 | value: NUMBER | true | false | null | STRING 14 | 15 | STAR: "*" 16 | DOT: "." 17 | DOUBLE_DOT.2: ".." 18 | 19 | identifier: CNAME | STRING 20 | self: "@" 21 | root: "$" 22 | first_path: CNAME 23 | | self 24 | | root 25 | 26 | COMPARE_OPERATOR: "<=" 27 | | ">=" 28 | | "<" 29 | | ">" 30 | | "!=" 31 | | "=" 32 | 33 | ?test_comparison: atom COMPARE_OPERATOR atom -> comparison_expr 34 | | atom 35 | 36 | AND: "and" 37 | OR: "or" 38 | 39 | ?test: test OR test_and -> or_expr 40 | | test_and 41 | ?test_and: test_and AND test_comparison -> and_expr 42 | | test_comparison 43 | 44 | predicate: "[" (expr | slice | STAR) "]" 45 | 46 | action_separate_with_dot: DOUBLE_DOT predicate -> search_with_predicate 47 | | DOUBLE_DOT identifier -> search_with_identifier 48 | | DOT identifier -> chain_with_identifier 49 | | DOT STAR -> chain_with_star 50 | 51 | path_with_action: atom action_separate_with_dot 52 | | atom predicate 53 | 54 | ?path: path_with_action 55 | | first_path 56 | 57 | args: args "," expr -> multi_args 58 | | expr -> single_arg 59 | func_call: CNAME "(" args ")" 60 | | CNAME "()" 61 | 62 | ?atom: value 63 | | path 64 | | func_call 65 | | "(" expr ")" -> parenthesized_expr 66 | 67 | ?expr: test 68 | 69 | %import common.SIGNED_NUMBER -> NUMBER 70 | %import common.CNAME -> CNAME 71 | %import common.WS_INLINE 72 | 73 | %ignore WS_INLINE 74 | -------------------------------------------------------------------------------- /jsonpath/lark.py: -------------------------------------------------------------------------------- 1 | # mypy: ignore-errors 2 | # Ignore variable already redefined error 3 | try: 4 | # Third Party Library 5 | from lark import Lark, Token, Transformer, v_args 6 | from lark.exceptions import UnexpectedToken, VisitError 7 | 8 | except ImportError: 9 | # Local Folder 10 | from .lark_parser import ( 11 | Lark, 12 | Lark_StandAlone, 13 | Token, 14 | Transformer, 15 | UnexpectedToken, 16 | VisitError, 17 | v_args, 18 | ) 19 | 20 | 21 | __all__ = ( 22 | "Token", 23 | "Transformer", 24 | "v_args", 25 | "Lark", 26 | "Lark_StandAlone", 27 | "UnexpectedToken", 28 | "VisitError", 29 | ) 30 | -------------------------------------------------------------------------------- /jsonpath/parser.py: -------------------------------------------------------------------------------- 1 | """ 2 | ===================================================== 3 | :mod:`parser` -- Translate expression into executable 4 | ===================================================== 5 | """ 6 | 7 | # Local Folder 8 | from .core import Expr, JSONPathSyntaxError, JSONPathUndefinedFunctionError 9 | from .lark import UnexpectedToken, VisitError 10 | from .transformer import JSONPathTransformer 11 | 12 | transformer = JSONPathTransformer(visit_tokens=True) 13 | try: 14 | # Local Folder 15 | from .lark import Lark 16 | 17 | parser = Lark.open_from_package( 18 | __name__, 19 | "grammar.lark", 20 | parser="lalr", 21 | maybe_placeholders=True, 22 | ) 23 | 24 | except NameError: 25 | # Local Folder 26 | from .lark import Lark_StandAlone 27 | 28 | parser = Lark_StandAlone() 29 | 30 | 31 | def parse(expr: str) -> Expr: 32 | """ 33 | Transform JSONPath expression into an executable object. 34 | 35 | >>> parse("$.a").find({"a": 1}) 36 | [1] 37 | 38 | :param expr: JSONPath expression 39 | :type expr: str 40 | 41 | :returns: An executable object. 42 | :rtype: :class:`jsonpath.core.Expr` 43 | :raises ~jsonpath.core.JSONPathError: \ 44 | Transform JSONPath expression error. 45 | """ 46 | try: 47 | tree = parser.parse(expr) 48 | except UnexpectedToken as exc: 49 | raise JSONPathSyntaxError(expr) from exc 50 | 51 | try: 52 | return transformer.transform(tree) 53 | except VisitError as exc: 54 | if isinstance(exc.orig_exc, JSONPathUndefinedFunctionError): 55 | raise exc.orig_exc 56 | 57 | raise 58 | -------------------------------------------------------------------------------- /jsonpath/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/linw1995/jsonpath/c2cbdac8a53a591ea0a8b7da8f94b8cee33163c9/jsonpath/py.typed -------------------------------------------------------------------------------- /jsonpath/transformer.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | from typing import Any, Iterable, List, Optional, Union 3 | 4 | # Third Party Library 5 | from typing_extensions import Literal 6 | 7 | # Local Folder 8 | from .core import ( 9 | T_VALUE, 10 | And, 11 | Array, 12 | Brace, 13 | Compare, 14 | Contains, 15 | Equal, 16 | Expr, 17 | Function, 18 | GreaterEqual, 19 | GreaterThan, 20 | JSONPathUndefinedFunctionError, 21 | Key, 22 | LessEqual, 23 | LessThan, 24 | Name, 25 | Not, 26 | NotEqual, 27 | Or, 28 | Predicate, 29 | Root, 30 | Search, 31 | Self, 32 | Slice, 33 | Value, 34 | ) 35 | from .lark import Token, Transformer, v_args 36 | 37 | T_OPERATOR = Literal["<=", ">=", "<", ">", "!=", "="] 38 | T_ARG = Union[Expr, T_VALUE] 39 | T_NO_ARG = Iterable[Any] 40 | T_ARGS = Union[T_NO_ARG, List[T_ARG]] 41 | 42 | 43 | @v_args(inline=True) 44 | class JSONPathTransformer(Transformer[Token, Expr]): 45 | """ 46 | Transform JSONPath expression AST parsed by lark into an executable object. 47 | """ 48 | 49 | INT = int 50 | 51 | def cdr(self, *args: Any) -> Any: 52 | return args[1] 53 | 54 | def true(self) -> Literal[True]: 55 | return True 56 | 57 | def false(self) -> Literal[False]: 58 | return False 59 | 60 | def null(self) -> Literal[None]: 61 | return None 62 | 63 | def NUMBER(self, number: str) -> Union[int, float]: 64 | if "." in number: 65 | return float(number) 66 | else: 67 | return int(number) 68 | 69 | def STRING(self, quoted_string: str) -> str: 70 | return quoted_string[1:-1] 71 | 72 | def identifier(self, string: str) -> Name: 73 | return Name(string) 74 | 75 | def STAR(self, star_: Literal["*"]) -> None: 76 | return None 77 | 78 | def self(self) -> Self: 79 | return Self() 80 | 81 | def root(self) -> Root: 82 | return Root() 83 | 84 | def value(self, value: T_VALUE) -> Value: 85 | return Value(value) 86 | 87 | def comparison_expr( 88 | self, 89 | left: Expr, 90 | operator: T_OPERATOR, 91 | right: Expr, 92 | ) -> Compare: 93 | rv: Compare 94 | if operator == "<": 95 | rv = LessThan(right) 96 | elif operator == "<=": 97 | rv = LessEqual(right) 98 | elif operator == "=": 99 | rv = Equal(right) 100 | elif operator == ">=": 101 | rv = GreaterEqual(right) 102 | elif operator == ">": 103 | rv = GreaterThan(right) 104 | elif operator == "!=": 105 | rv = NotEqual(right) 106 | else: 107 | raise AssertionError(f"Opertor {operator!r} is not supported") 108 | 109 | return left.chain(rv) 110 | 111 | def first_path(self, expr_or_str: Union[Expr, str]) -> Expr: 112 | if isinstance(expr_or_str, str): 113 | return Name(expr_or_str) 114 | return expr_or_str 115 | 116 | chain_with_identifier = cdr 117 | 118 | def search(self, double_dot_: Literal[".."], expr: Expr) -> Search: 119 | return Search(expr) 120 | 121 | search_with_identifier = search 122 | search_with_predicate = search 123 | 124 | def chain_with_star(self, dot_: Literal["."], star_: Literal["*"]) -> Name: 125 | return Name() 126 | 127 | def path_with_action(self, prev_path: Expr, action: Expr) -> Expr: 128 | return prev_path.chain(action) 129 | 130 | def predicate(self, expr: Union[Expr, None]) -> Union[Array, Predicate]: 131 | if isinstance(expr, Value): 132 | assert isinstance(expr.value, int) 133 | return Array(expr.value) 134 | elif isinstance(expr, Slice): 135 | return Array(expr) 136 | elif expr is None: 137 | # STAR token 138 | return Array() 139 | else: 140 | return Predicate(expr) 141 | 142 | def two_fields_slice( 143 | self, 144 | first_field: Optional[Expr], 145 | colon_1: Literal[":"], 146 | second_field: Optional[Expr], 147 | ) -> Slice: 148 | return Slice(start=first_field, stop=second_field) 149 | 150 | def three_fields_slice( 151 | self, 152 | first_field: Optional[Expr], 153 | colon_1: Literal[":"], 154 | second_field: Optional[Expr], 155 | colon_2: Literal[":"], 156 | third_field: Optional[Expr], 157 | ) -> Slice: 158 | return Slice(start=first_field, stop=second_field, step=third_field) 159 | 160 | def func_call(self, name: str, args: T_ARGS = tuple()) -> Function: 161 | if name == "key": 162 | return Key(*args) 163 | elif name == "contains": 164 | return Contains(*args) 165 | elif name == "not": 166 | return Not(*args) 167 | else: 168 | raise JSONPathUndefinedFunctionError(f"Function {name!r} not exists") 169 | 170 | def multi_args(self, args: List[T_ARG], single_arg: T_ARG) -> List[T_ARG]: 171 | args.append(single_arg) 172 | return args 173 | 174 | def single_arg(self, arg: T_ARG) -> List[T_ARG]: 175 | return [arg] 176 | 177 | def parenthesized_expr(self, expr: Expr) -> Brace: 178 | return Brace(expr) 179 | 180 | def and_expr(self, left_expr: Expr, and_: Literal["and"], right_expr: Expr) -> And: 181 | return left_expr.chain(And(right_expr)) 182 | 183 | def or_expr(self, left_expr: Expr, or_: Literal["or"], right_expr: Expr) -> Or: 184 | return left_expr.chain(Or(right_expr)) 185 | 186 | def start(self, expr: Expr) -> Expr: 187 | return expr 188 | -------------------------------------------------------------------------------- /jsonpath_build.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | from typing import Any, Mapping, Optional 3 | 4 | 5 | def build_lark_parser(pybin_path=None) -> None: 6 | # Standard Library 7 | import subprocess 8 | import sys 9 | 10 | output = subprocess.check_output( 11 | args=[ 12 | pybin_path or sys.executable, 13 | "-m", 14 | "lark.tools.standalone", 15 | "--maybe_placeholders", 16 | "jsonpath/grammar.lark", 17 | ] 18 | ) 19 | with open("jsonpath/lark_parser.py", "wb") as f: 20 | f.write(output) 21 | 22 | 23 | def __getattr__(name: str) -> Any: 24 | if name == "build_lark_parser": 25 | return build_lark_parser 26 | 27 | try: 28 | # Third Party Library 29 | import pdm.pep517.api 30 | 31 | func = getattr(pdm.pep517.api, name) 32 | 33 | if name == "build_wheel": 34 | 35 | def build_wheel( 36 | wheel_directory: str, 37 | config_settings: Optional[Mapping[str, Any]] = None, 38 | metadata_directory: Optional[str] = None, 39 | ) -> str: 40 | build_lark_parser() 41 | return func(wheel_directory, config_settings, metadata_directory) 42 | 43 | return build_wheel 44 | 45 | elif name == "build_sdist": 46 | 47 | def build_sdist( 48 | sdist_directory: str, 49 | config_settings: Optional[Mapping[str, Any]] = None, 50 | ) -> str: 51 | build_lark_parser() 52 | return func(sdist_directory, config_settings) 53 | 54 | return build_sdist 55 | 56 | else: 57 | return func 58 | 59 | except ImportError: 60 | return getattr(globals(), name) 61 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import os 3 | import sys 4 | 5 | from pathlib import Path 6 | 7 | # Third Party Library 8 | import nox 9 | 10 | sys.path.insert(0, "") 11 | # First Party Library 12 | from jsonpath_build import build_lark_parser # noqa: E402 13 | 14 | nox.options.stop_on_first_error = True 15 | 16 | pythons = ["3.10", "3.11", "3.12", "3.13"] 17 | 18 | os.environ.update({"PDM_IGNORE_SAVED_PYTHON": "1"}) 19 | os.environ.pop("PYTHONPATH", None) 20 | 21 | lark_parser_path = Path("jsonpath/lark_parser.py") 22 | 23 | 24 | def get_nox_session_pybin(session): 25 | return session.bin + "/python" 26 | 27 | 28 | @nox.session(python=pythons, venv_backend="venv") 29 | @nox.parametrize( 30 | "parser_backend", 31 | [ 32 | "standalone", 33 | "parser", 34 | ], 35 | ) 36 | def coverage_test(session, parser_backend): 37 | session.run( 38 | "pdm", 39 | "sync", 40 | "--no-editable", 41 | "-v", 42 | "-G", 43 | "test", 44 | "-G", 45 | "parser", 46 | external=True, 47 | ) 48 | if parser_backend == "standalone": 49 | if not lark_parser_path.exists(): 50 | pybin_path = get_nox_session_pybin(session) 51 | build_lark_parser(pybin_path) 52 | 53 | session.run("python", "-m", "pip", "uninstall", "lark", "-y") 54 | else: 55 | if lark_parser_path.exists(): 56 | lark_parser_path.unlink() 57 | 58 | session.run("pytest", "-vv", "--cov=jsonpath", "--cov-append", *session.posargs) 59 | 60 | 61 | @nox.session(python=pythons, venv_backend="venv") 62 | def coverage_report(session): 63 | session.run("pdm", "sync", "--no-editable", "-v", "-G", "test", external=True) 64 | session.run("coverage", "report") 65 | session.run("coverage", "xml") 66 | session.run("coverage", "html") 67 | session.log( 68 | f">> open file:/{(Path() / 'htmlcov/index.html').absolute()} to see coverage" 69 | ) 70 | 71 | 72 | @nox.session(venv_backend="venv") 73 | def build(session): 74 | if not lark_parser_path.exists(): 75 | build_lark_parser() 76 | session.run("pdm", "build", external=True) 77 | 78 | 79 | @nox.session(python=pythons[-1:], venv_backend="venv") 80 | def build_readme(session): 81 | session.run( 82 | "pdm", "sync", "--no-editable", "-v", "-G", "build_readme", external=True 83 | ) 84 | session.run( 85 | "python", "scripts/build_readme.py", "README.template.rst", "README.rst" 86 | ) 87 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "jsonpath-extractor" 3 | authors = [{ name = "林玮 (Jade Lin)", email = "linw1995@icloud.com" }] 4 | description = "A selector expression for extracting data from JSON." 5 | readme = "README.rst" 6 | classifiers = [ 7 | "Intended Audience :: Developers", 8 | "License :: OSI Approved :: MIT License", 9 | "Programming Language :: Python", 10 | "Programming Language :: Python :: 3", 11 | "Programming Language :: Python :: 3.10", 12 | "Programming Language :: Python :: 3.11", 13 | "Programming Language :: Python :: 3.12", 14 | "Programming Language :: Python :: 3.13", 15 | "Development Status :: 5 - Production/Stable", 16 | "Operating System :: POSIX", 17 | "Operating System :: MacOS :: MacOS X", 18 | "Operating System :: Microsoft :: Windows", 19 | ] 20 | keywords = ["data-extractor", "data-extraction", "jsonpath", "json"] 21 | dependencies = ["typing-extensions ~= 4.7"] 22 | requires-python = ">=3.10" 23 | license = { text = "MIT" } 24 | dynamic = ["version"] 25 | 26 | [project.urls] 27 | homepage = "https://github.com/linw1995/jsonpath" 28 | repository = "https://github.com/linw1995/jsonpath" 29 | documentation = "https://jsonpath.rtfd.io/en/latest/" 30 | 31 | [project.optional-dependencies] 32 | parser = [ 33 | "lark~=1.2", 34 | ] 35 | 36 | [project.scripts] 37 | jp = "jsonpath.cli:main" 38 | 39 | [build-system] 40 | requires = ["pdm-pep517", "lark~=1.2"] 41 | backend-path = ["."] 42 | build-backend = "jsonpath_build" 43 | 44 | [tool.pdm] 45 | includes = [ 46 | "jsonpath/*.py", 47 | "jsonpath_build.py", 48 | "jsonpath/lark_parser.py", 49 | "jsonpath/py.typed", 50 | "jsonpath/grammar.lark", 51 | ] 52 | excludes = ["**/.mypy_cache"] 53 | version = { use_scm = true } 54 | 55 | [tool.pdm.options] 56 | lock = ["-S", "inherit_metadata,no_cross_platform"] 57 | 58 | [tool.pdm.dev-dependencies] 59 | docs = ["sphinx~=7.4", "livereload~=2.6", "lark~=1.2", "tornado>=6.3.2"] 60 | test = ["pytest-cov~=5.0", "pytest~=8.0", "pexpect~=4.8", "sybil~=8.0"] 61 | build_readme = ["pygments~=2.8", "docutils>=0.16"] 62 | -------------------------------------------------------------------------------- /scripts/build_readme.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | .. _issues-172: https://github.com/github/markup/issues/172 4 | 5 | Because Github markup do not render :include: directive. (issues-172_) 6 | """ 7 | 8 | # Standard Library 9 | import re 10 | 11 | from pathlib import Path 12 | from unittest import mock 13 | 14 | # Third Party Library 15 | import docutils.nodes 16 | import docutils.parsers.rst 17 | import docutils.parsers.rst.directives.misc 18 | import docutils.statemachine 19 | import docutils.utils 20 | 21 | 22 | def parse_inv(): 23 | base = "https://jsonpath.readthedocs.io/en/latest" 24 | return {"jsonpath.core": f"{base}/api_core.html"} 25 | 26 | 27 | def translate_py_link(text: str) -> str: 28 | def pymod(match) -> str: 29 | module_name = match.groupdict()["module_name"] 30 | link = parse_inv()[module_name] 31 | return f"`{module_name} <{link}>`_" 32 | 33 | return re.sub(r":py:mod:`(?P.*)`", pymod, text) 34 | 35 | 36 | def build_readme(): 37 | old_string2lines = docutils.statemachine.string2lines 38 | old_run = docutils.parsers.rst.directives.misc.Include.run 39 | text = "" 40 | target_text = None 41 | 42 | def string2lines(*args, **kwargs): 43 | nonlocal text, target_text 44 | if target_text is not None: 45 | text = text.replace(target_text, args[0]) 46 | target_text = None 47 | else: 48 | text += args[0] 49 | 50 | rv = old_string2lines(*args, **kwargs) 51 | return rv 52 | 53 | def run(self): 54 | nonlocal target_text 55 | target_text = self.block_text 56 | rv = old_run(self) 57 | return rv 58 | 59 | with ( 60 | mock.patch.object(docutils.statemachine, "string2lines", string2lines), 61 | mock.patch.object(docutils.parsers.rst.directives.misc.Include, "run", run), 62 | ): 63 | source_file_path: Path = Path.cwd() / "README.template.rst" 64 | target_file_path: Path = Path.cwd() / "README.rst" 65 | parser = docutils.parsers.rst.Parser() 66 | default_settings = docutils.frontend.OptionParser( 67 | components=(docutils.parsers.rst.Parser,) 68 | ).get_default_values() 69 | document = docutils.utils.new_document(source_file_path.name, default_settings) 70 | parser.parse(source_file_path.read_text(encoding="utf-8"), document) 71 | text = text.rstrip() + "\n" 72 | text = translate_py_link(text) 73 | if ( 74 | target_file_path.exists() 75 | and target_file_path.read_text(encoding="utf-8") == text 76 | ): 77 | return 78 | 79 | target_file_path.write_text(text, encoding="utf-8") 80 | 81 | 82 | if __name__ == "__main__": 83 | build_readme() 84 | -------------------------------------------------------------------------------- /scripts/export_requirements_txt.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import enum 3 | import shlex 4 | import subprocess 5 | 6 | from pathlib import Path 7 | 8 | Format = enum.Enum("Format", "requirements setuppy") 9 | BASE_DIR = Path(__file__).parent / "requirements" 10 | 11 | 12 | def fix_end_of_file(text): 13 | return text.rstrip() + "\n" 14 | 15 | 16 | def pdm_export(args, filename, format: Format): 17 | output = subprocess.check_output( 18 | shlex.split(f"pdm export -f {format.name} {' '.join(args)}"), encoding="utf-8" 19 | ) 20 | output = fix_end_of_file(output) 21 | if format is Format.setuppy: 22 | output = "\n".join( 23 | ['# This a dummy setup.py to enable GitHub "Used By" stats', output] 24 | ) 25 | p = Path(filename) 26 | if not p.parent.exists(): 27 | p.parent.mkdir(parents=True) 28 | is_new = not p.exists() 29 | if is_new or p.read_text() != output: 30 | p.write_text(output) 31 | if is_new: 32 | raise RuntimeError("Create a new file") 33 | 34 | 35 | pdm_export( 36 | args=["--prod"], 37 | filename=BASE_DIR / "requirements-mini.txt", 38 | format=Format.requirements, 39 | ) 40 | pdm_export( 41 | args=[ 42 | "--prod", 43 | "-G:all", 44 | ], 45 | filename=BASE_DIR / "requirements.txt", 46 | format=Format.requirements, 47 | ) 48 | pdm_export( 49 | args=["-G:all"], 50 | filename=BASE_DIR / "requirements-dev.txt", 51 | format=Format.requirements, 52 | ) 53 | pdm_export( 54 | args=["-G", "docs"], 55 | filename=BASE_DIR / "requirements-docs.txt", 56 | format=Format.requirements, 57 | ) 58 | # pdm_export(args=[], filename=BASE_DIR / "setup.py", format=Format.setuppy) 59 | -------------------------------------------------------------------------------- /scripts/requirements/requirements-dev.txt: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # Please do not edit it manually. 3 | 4 | alabaster==0.7.16 \ 5 | --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ 6 | --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 7 | babel==2.16.0 \ 8 | --hash=sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b \ 9 | --hash=sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316 10 | certifi==2024.12.14 \ 11 | --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ 12 | --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db 13 | charset-normalizer==3.4.0 \ 14 | --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ 15 | --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ 16 | --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ 17 | --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ 18 | --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ 19 | --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ 20 | --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ 21 | --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ 22 | --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ 23 | --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ 24 | --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ 25 | --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ 26 | --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ 27 | --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ 28 | --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ 29 | --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ 30 | --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ 31 | --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ 32 | --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ 33 | --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ 34 | --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ 35 | --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ 36 | --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ 37 | --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ 38 | --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ 39 | --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ 40 | --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ 41 | --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ 42 | --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ 43 | --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ 44 | --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ 45 | --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ 46 | --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ 47 | --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ 48 | --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ 49 | --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ 50 | --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ 51 | --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ 52 | --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ 53 | --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ 54 | --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ 55 | --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ 56 | --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ 57 | --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ 58 | --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ 59 | --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ 60 | --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ 61 | --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ 62 | --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ 63 | --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ 64 | --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ 65 | --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ 66 | --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ 67 | --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ 68 | --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ 69 | --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ 70 | --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ 71 | --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ 72 | --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ 73 | --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ 74 | --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ 75 | --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 76 | colorama==0.4.6; sys_platform == "win32" \ 77 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ 78 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 79 | coverage[toml]==7.6.9 \ 80 | --hash=sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4 \ 81 | --hash=sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c \ 82 | --hash=sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f \ 83 | --hash=sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b \ 84 | --hash=sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6 \ 85 | --hash=sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae \ 86 | --hash=sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692 \ 87 | --hash=sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4 \ 88 | --hash=sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4 \ 89 | --hash=sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717 \ 90 | --hash=sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d \ 91 | --hash=sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198 \ 92 | --hash=sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1 \ 93 | --hash=sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3 \ 94 | --hash=sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb \ 95 | --hash=sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d \ 96 | --hash=sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b \ 97 | --hash=sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710 \ 98 | --hash=sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c \ 99 | --hash=sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae \ 100 | --hash=sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077 \ 101 | --hash=sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00 \ 102 | --hash=sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb \ 103 | --hash=sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664 \ 104 | --hash=sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014 \ 105 | --hash=sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9 \ 106 | --hash=sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e \ 107 | --hash=sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9 \ 108 | --hash=sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa \ 109 | --hash=sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611 \ 110 | --hash=sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b \ 111 | --hash=sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a \ 112 | --hash=sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8 \ 113 | --hash=sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030 \ 114 | --hash=sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015 \ 115 | --hash=sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97 \ 116 | --hash=sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419 \ 117 | --hash=sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9 \ 118 | --hash=sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7 \ 119 | --hash=sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be \ 120 | --hash=sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1 \ 121 | --hash=sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba \ 122 | --hash=sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5 \ 123 | --hash=sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073 \ 124 | --hash=sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a \ 125 | --hash=sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a \ 126 | --hash=sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3 \ 127 | --hash=sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0 \ 128 | --hash=sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b \ 129 | --hash=sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec \ 130 | --hash=sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1 \ 131 | --hash=sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3 132 | docutils==0.21.2 \ 133 | --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ 134 | --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 135 | exceptiongroup==1.2.2; python_version < "3.11" \ 136 | --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ 137 | --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc 138 | idna==3.10 \ 139 | --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ 140 | --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 141 | imagesize==1.4.1 \ 142 | --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ 143 | --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a 144 | iniconfig==2.0.0 \ 145 | --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ 146 | --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 147 | jinja2==3.1.5 \ 148 | --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ 149 | --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb 150 | lark==1.2.2 \ 151 | --hash=sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c \ 152 | --hash=sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80 153 | livereload==2.7.1 \ 154 | --hash=sha256:3d9bf7c05673df06e32bea23b494b8d36ca6d10f7d5c3c8a6989608c09c986a9 \ 155 | --hash=sha256:5201740078c1b9433f4b2ba22cd2729a39b9d0ec0a2cc6b4d3df257df5ad0564 156 | markupsafe==3.0.2 \ 157 | --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ 158 | --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ 159 | --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ 160 | --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ 161 | --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ 162 | --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ 163 | --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ 164 | --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ 165 | --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ 166 | --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ 167 | --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ 168 | --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ 169 | --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ 170 | --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ 171 | --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ 172 | --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ 173 | --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ 174 | --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ 175 | --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ 176 | --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ 177 | --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ 178 | --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ 179 | --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ 180 | --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ 181 | --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ 182 | --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ 183 | --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ 184 | --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ 185 | --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ 186 | --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ 187 | --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ 188 | --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ 189 | --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ 190 | --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ 191 | --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ 192 | --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ 193 | --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ 194 | --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ 195 | --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ 196 | --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ 197 | --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ 198 | --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ 199 | --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ 200 | --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ 201 | --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ 202 | --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ 203 | --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ 204 | --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ 205 | --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ 206 | --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ 207 | --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 208 | packaging==24.2 \ 209 | --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ 210 | --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f 211 | pexpect==4.9.0 \ 212 | --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ 213 | --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f 214 | pluggy==1.5.0 \ 215 | --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ 216 | --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 217 | ptyprocess==0.7.0 \ 218 | --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ 219 | --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220 220 | pygments==2.18.0 \ 221 | --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ 222 | --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a 223 | pytest==8.3.4 \ 224 | --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ 225 | --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 226 | pytest-cov==5.0.0 \ 227 | --hash=sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652 \ 228 | --hash=sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857 229 | requests==2.32.3 \ 230 | --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ 231 | --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 232 | snowballstemmer==2.2.0 \ 233 | --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \ 234 | --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a 235 | sphinx==7.4.7 \ 236 | --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \ 237 | --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239 238 | sphinxcontrib-applehelp==2.0.0 \ 239 | --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ 240 | --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 241 | sphinxcontrib-devhelp==2.0.0 \ 242 | --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \ 243 | --hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2 244 | sphinxcontrib-htmlhelp==2.1.0 \ 245 | --hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \ 246 | --hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9 247 | sphinxcontrib-jsmath==1.0.1 \ 248 | --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ 249 | --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 250 | sphinxcontrib-qthelp==2.0.0 \ 251 | --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ 252 | --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb 253 | sphinxcontrib-serializinghtml==2.0.0 \ 254 | --hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \ 255 | --hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d 256 | sybil==8.0.1 \ 257 | --hash=sha256:8696f007005eee4bc79be7af6dd1739397f34122517d38b7a21c088f716f7a23 \ 258 | --hash=sha256:b563b4e44fb315d0f5a0fc6f6364e63fe5058e0726f25dc95b609dbed3a41f34 259 | tomli==2.2.1; python_version < "3.11" \ 260 | --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ 261 | --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ 262 | --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ 263 | --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ 264 | --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ 265 | --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ 266 | --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ 267 | --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ 268 | --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ 269 | --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ 270 | --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ 271 | --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ 272 | --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ 273 | --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ 274 | --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ 275 | --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ 276 | --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ 277 | --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ 278 | --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ 279 | --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ 280 | --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ 281 | --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ 282 | --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ 283 | --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ 284 | --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ 285 | --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ 286 | --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ 287 | --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ 288 | --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ 289 | --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ 290 | --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ 291 | --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 292 | tornado==6.4.2 \ 293 | --hash=sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803 \ 294 | --hash=sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec \ 295 | --hash=sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482 \ 296 | --hash=sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634 \ 297 | --hash=sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38 \ 298 | --hash=sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b \ 299 | --hash=sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c \ 300 | --hash=sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf \ 301 | --hash=sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946 \ 302 | --hash=sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73 \ 303 | --hash=sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1 304 | typing-extensions==4.12.2 \ 305 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 306 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 307 | urllib3==2.3.0 \ 308 | --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ 309 | --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d 310 | -------------------------------------------------------------------------------- /scripts/requirements/requirements-docs.txt: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # Please do not edit it manually. 3 | 4 | alabaster==0.7.16 \ 5 | --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ 6 | --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 7 | babel==2.16.0 \ 8 | --hash=sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b \ 9 | --hash=sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316 10 | certifi==2024.12.14 \ 11 | --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ 12 | --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db 13 | charset-normalizer==3.4.0 \ 14 | --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ 15 | --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ 16 | --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ 17 | --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ 18 | --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ 19 | --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ 20 | --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ 21 | --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ 22 | --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ 23 | --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ 24 | --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ 25 | --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ 26 | --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ 27 | --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ 28 | --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ 29 | --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ 30 | --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ 31 | --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ 32 | --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ 33 | --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ 34 | --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ 35 | --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ 36 | --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ 37 | --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ 38 | --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ 39 | --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ 40 | --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ 41 | --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ 42 | --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ 43 | --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ 44 | --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ 45 | --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ 46 | --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ 47 | --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ 48 | --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ 49 | --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ 50 | --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ 51 | --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ 52 | --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ 53 | --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ 54 | --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ 55 | --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ 56 | --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ 57 | --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ 58 | --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ 59 | --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ 60 | --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ 61 | --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ 62 | --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ 63 | --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ 64 | --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ 65 | --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ 66 | --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ 67 | --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ 68 | --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ 69 | --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ 70 | --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ 71 | --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ 72 | --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ 73 | --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ 74 | --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ 75 | --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 76 | colorama==0.4.6; sys_platform == "win32" \ 77 | --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ 78 | --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 79 | docutils==0.21.2 \ 80 | --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ 81 | --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 82 | idna==3.10 \ 83 | --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ 84 | --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 85 | imagesize==1.4.1 \ 86 | --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ 87 | --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a 88 | jinja2==3.1.5 \ 89 | --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ 90 | --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb 91 | lark==1.2.2 \ 92 | --hash=sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c \ 93 | --hash=sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80 94 | livereload==2.7.1 \ 95 | --hash=sha256:3d9bf7c05673df06e32bea23b494b8d36ca6d10f7d5c3c8a6989608c09c986a9 \ 96 | --hash=sha256:5201740078c1b9433f4b2ba22cd2729a39b9d0ec0a2cc6b4d3df257df5ad0564 97 | markupsafe==3.0.2 \ 98 | --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ 99 | --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ 100 | --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ 101 | --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ 102 | --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ 103 | --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ 104 | --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ 105 | --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ 106 | --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ 107 | --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ 108 | --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ 109 | --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ 110 | --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ 111 | --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ 112 | --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ 113 | --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ 114 | --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ 115 | --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ 116 | --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ 117 | --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ 118 | --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ 119 | --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ 120 | --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ 121 | --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ 122 | --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ 123 | --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ 124 | --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ 125 | --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ 126 | --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ 127 | --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ 128 | --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ 129 | --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ 130 | --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ 131 | --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ 132 | --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ 133 | --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ 134 | --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ 135 | --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ 136 | --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ 137 | --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ 138 | --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ 139 | --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ 140 | --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ 141 | --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ 142 | --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ 143 | --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ 144 | --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ 145 | --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ 146 | --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ 147 | --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ 148 | --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 149 | packaging==24.2 \ 150 | --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ 151 | --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f 152 | pygments==2.18.0 \ 153 | --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ 154 | --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a 155 | requests==2.32.3 \ 156 | --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ 157 | --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 158 | snowballstemmer==2.2.0 \ 159 | --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \ 160 | --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a 161 | sphinx==7.4.7 \ 162 | --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \ 163 | --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239 164 | sphinxcontrib-applehelp==2.0.0 \ 165 | --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ 166 | --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 167 | sphinxcontrib-devhelp==2.0.0 \ 168 | --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \ 169 | --hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2 170 | sphinxcontrib-htmlhelp==2.1.0 \ 171 | --hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \ 172 | --hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9 173 | sphinxcontrib-jsmath==1.0.1 \ 174 | --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ 175 | --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 176 | sphinxcontrib-qthelp==2.0.0 \ 177 | --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ 178 | --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb 179 | sphinxcontrib-serializinghtml==2.0.0 \ 180 | --hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \ 181 | --hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d 182 | tomli==2.2.1; python_version < "3.11" \ 183 | --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ 184 | --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ 185 | --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ 186 | --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ 187 | --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ 188 | --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ 189 | --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ 190 | --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ 191 | --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ 192 | --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ 193 | --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ 194 | --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ 195 | --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ 196 | --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ 197 | --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ 198 | --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ 199 | --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ 200 | --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ 201 | --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ 202 | --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ 203 | --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ 204 | --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ 205 | --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ 206 | --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ 207 | --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ 208 | --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ 209 | --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ 210 | --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ 211 | --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ 212 | --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ 213 | --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ 214 | --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 215 | tornado==6.4.2 \ 216 | --hash=sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803 \ 217 | --hash=sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec \ 218 | --hash=sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482 \ 219 | --hash=sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634 \ 220 | --hash=sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38 \ 221 | --hash=sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b \ 222 | --hash=sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c \ 223 | --hash=sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf \ 224 | --hash=sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946 \ 225 | --hash=sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73 \ 226 | --hash=sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1 227 | typing-extensions==4.12.2 \ 228 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 229 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 230 | urllib3==2.3.0 \ 231 | --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ 232 | --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d 233 | -------------------------------------------------------------------------------- /scripts/requirements/requirements-mini.txt: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # Please do not edit it manually. 3 | 4 | typing-extensions==4.12.2 \ 5 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 6 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 7 | -------------------------------------------------------------------------------- /scripts/requirements/requirements.txt: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # Please do not edit it manually. 3 | 4 | lark==1.2.2 \ 5 | --hash=sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c \ 6 | --hash=sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80 7 | typing-extensions==4.12.2 \ 8 | --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ 9 | --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 10 | -------------------------------------------------------------------------------- /scripts/watch_build_and_serve_html_docs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Watch related files, build and serve Sphinx documentation automatically. 3 | """ 4 | 5 | # Standard Library 6 | import os 7 | import shlex 8 | import subprocess 9 | import sys 10 | 11 | # Third Party Library 12 | from livereload import Server 13 | 14 | 15 | def shell(cmd, output=None, cwd=None, env=None, shell=False): 16 | if output is None: 17 | output = subprocess.DEVNULL 18 | 19 | if not isinstance(cmd, (list, tuple)) and not shell: 20 | cmd = shlex.split(cmd) 21 | 22 | def run(): 23 | p = subprocess.Popen( 24 | cmd, 25 | stdin=subprocess.DEVNULL, 26 | stdout=output, 27 | stderr=output, 28 | cwd=cwd, 29 | shell=shell, 30 | env=env or os.environ.copy(), 31 | ) 32 | p.wait() 33 | 34 | return run 35 | 36 | 37 | def main(): 38 | server = Server() 39 | 40 | # https://github.com/pypa/virtualenv/issues/906#issuecomment-244394963 41 | # the Python executable from virtualenv 42 | # doesn't set the PATH or VIRTUAL_ENV environment variables. 43 | # That's by design, and is not a bug. 44 | env = os.environ.copy() 45 | env["PATH"] = ( 46 | os.path.dirname(os.path.abspath(sys.executable)) 47 | + os.pathsep 48 | + os.environ.get("PATH", "") 49 | ) 50 | build_docs = shell("make html", cwd="docs", output=sys.stderr, env=env) 51 | # watcher use glob.glob without setting recursive=True 52 | # ref: 53 | # https://github.com/lepture/python-livereload/pull/203 54 | # https://docs.python.org/3/library/glob.html#glob.glob 55 | server.watch("docs/source/*.rst", build_docs) 56 | server.watch("docs/source/**/*.rst", build_docs) 57 | server.watch("jsonpath/*.py", build_docs) 58 | server.watch("jsonpath/**/*.py", build_docs) 59 | 60 | build_docs() 61 | server.serve(root="docs/build/html") 62 | 63 | 64 | if __name__ == "__main__": 65 | main() 66 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | extend-ignore = E203, W503 4 | 5 | [isort] 6 | profile=black 7 | lines_between_types=1 8 | known_third_party=livereload, poetry, clikit 9 | 10 | import_heading_stdlib=Standard Library 11 | import_heading_thirdparty=Third Party Library 12 | import_heading_firstparty=First Party Library 13 | import_heading_localfolder=Local Folder 14 | 15 | [tool:pytest] # https://docs.pytest.org/en/5.4.3/customize.html 16 | testpaths = tests jsonpath docs 17 | norecursedirs = 18 | scripts/ 19 | noxfile.py 20 | addopts = 21 | --doctest-modules 22 | filterwarnings = 23 | ignore::DeprecationWarning 24 | 25 | [coverage:run] 26 | branch = true 27 | omit = 28 | site-packages 29 | jsonpath/lark_parser.py 30 | 31 | [coverage:report] 32 | precision = 2 33 | # Regexes for lines to exclude from consideration 34 | exclude_lines = 35 | # Have to re-enable the standard pragma 36 | pragma: no cover 37 | 38 | # Don't complain about missing debug-only code: 39 | def __repr__ 40 | if self\.debug 41 | 42 | # Don't complain if tests don't hit defensive assertion code: 43 | raise AssertionError 44 | raise NotImplementedError 45 | 46 | # Don't complain if non-runnable code isn't run: 47 | if 0: 48 | if __name__ == .__main__.: 49 | if TYPE_CHECKING: 50 | raise$ 51 | 52 | ignore_errors = True 53 | 54 | [mypy] 55 | follow_imports = silent 56 | strict_optional = true 57 | warn_redundant_casts = true 58 | check_untyped_defs = true 59 | disallow_any_generics = true 60 | disallow_untyped_defs = true 61 | warn_unused_ignores = true 62 | ignore_missing_imports = true 63 | exclude = 64 | jsonpath/lark_parser.py 65 | 66 | 67 | [mypy-tests.*] 68 | disallow_untyped_defs = false 69 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/linw1995/jsonpath/c2cbdac8a53a591ea0a8b7da8f94b8cee33163c9/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import json 3 | import sys 4 | 5 | from functools import wraps 6 | from pathlib import Path 7 | 8 | # Third Party Library 9 | import pexpect 10 | import pytest 11 | 12 | JP = f"{sys.executable} -m jsonpath.cli" 13 | 14 | common_testcases = [ 15 | ("boo", {"boo": 1}, [1]), 16 | ("$.*", {"boo": 1, "bar": 2}, [1, 2]), 17 | ] 18 | 19 | 20 | @pytest.fixture 21 | def spawn(): 22 | ps = [] 23 | 24 | @wraps(pexpect.spawn) 25 | def wrapper(*args, **kwargs): 26 | p = pexpect.spawn( 27 | *args, 28 | logfile=sys.stdout.buffer, 29 | **kwargs, 30 | ) 31 | ps.append(p) 32 | return p 33 | 34 | yield wrapper 35 | 36 | for p in ps: 37 | p.close() 38 | 39 | sys.stdout.flush() 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "expression, data, expect", 44 | common_testcases, 45 | ) 46 | def test_parse_json_file_and_extract(spawn, expression, data, expect, tmpdir): 47 | json_file_path = Path(tmpdir) / "test_example.json" 48 | with json_file_path.open("w") as f: 49 | json.dump(data, f) 50 | 51 | p = spawn(f"{JP} {expression} -f {json_file_path}") 52 | p.expect_exact(json.dumps(expect, indent=2).split("\n")) 53 | p.wait() 54 | assert p.exitstatus == 0 55 | 56 | 57 | @pytest.mark.parametrize("expression, data, expect", common_testcases) 58 | def test_parse_json_from_stdin_and_extract(spawn, expression, data, expect, tmpdir): 59 | json_file_path = Path(tmpdir) / "test_example.json" 60 | with json_file_path.open("w") as f: 61 | json.dump(data, f) 62 | 63 | p = spawn( 64 | "/bin/bash", 65 | ["-c", f"cat {json_file_path} | {JP} {expression}"], 66 | ) 67 | p.expect_exact(json.dumps(expect, indent=2).split("\n")) 68 | p.wait() 69 | assert p.exitstatus == 0 70 | 71 | 72 | def test_no_json_input_error(spawn): 73 | p = spawn(f"{JP} boo") 74 | p.expect("JSON file is needed.") 75 | p.wait() 76 | assert p.exitstatus == 1 77 | 78 | 79 | def test_invalid_expression_errror(spawn): 80 | p = spawn(f"{JP} []") 81 | p.expect_exact("'[]' is not a valid JSONPath expression.") 82 | p.wait() 83 | assert p.exitstatus == 1 84 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import reprlib 3 | 4 | # Third Party Library 5 | import pytest 6 | 7 | # First Party Library 8 | from jsonpath.core import ( 9 | Array, 10 | Brace, 11 | Contains, 12 | Key, 13 | Name, 14 | Not, 15 | Predicate, 16 | Root, 17 | Self, 18 | Slice, 19 | Value, 20 | var_parent, 21 | ) 22 | 23 | # Local Folder 24 | from .utils import assert_find 25 | 26 | 27 | @pytest.mark.parametrize( 28 | "name,data,expect", 29 | [("boo", {"boo": 1}, [1]), (None, {"boo": 1, "bar": 2}, [1, 2])], 30 | ids=reprlib.repr, 31 | ) 32 | def test_name_find(name, data, expect): 33 | assert_find(Name(name), data, expect) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "names,data,expect", 38 | [ 39 | (["boo", "bar"], {"boo": {"bar": 1}}, [1]), 40 | (["boo", "bar", "boo"], {"boo": {"bar": {"boo": 1}}}, [1]), 41 | ( 42 | ["boo", None, "boo"], 43 | {"boo": {"boo": {"boo": 1}, "bar": {"boo": 2}}}, 44 | [1, 2], 45 | ), 46 | ], 47 | ids=reprlib.repr, 48 | ) 49 | def test_name_chain_find(names, data, expect): 50 | jp = Name(names[0]) 51 | for name in names[1:]: 52 | jp = jp.Name(name) # type: ignore 53 | 54 | assert_find(jp, data, expect) 55 | 56 | 57 | @pytest.mark.parametrize("data", [[], "abc", {"a": "b"}, 1, 1.0], ids=reprlib.repr) 58 | def test_root(data): 59 | assert_find(Root(), data, [data]) 60 | 61 | 62 | @pytest.mark.parametrize( 63 | "start,end,step,data,expect", 64 | [ 65 | (0, 1, 1, [1, 2], [1]), 66 | (0, None, 1, [1, 2], [1, 2]), 67 | (None, -1, 1, [1, 2, 3], [1, 2]), 68 | (None, None, 2, [1, 2, 3], [1, 3]), 69 | ], 70 | ids=reprlib.repr, 71 | ) 72 | def test_slice_in_array(start, end, step, data, expect): 73 | jp = Root().Array(Slice(start, end, step)) 74 | assert_find(jp, data, expect) 75 | 76 | 77 | @pytest.mark.parametrize( 78 | "expr,data,expect", 79 | [ 80 | (Root().Search(Array(0)), {"boo": [{"boo": [1]}]}, [{"boo": [1]}, 1]), 81 | ( 82 | Root().Search(Array()), 83 | {"boo": [{"boo": [1, 2]}]}, 84 | [{"boo": [1, 2]}, 1, 2], 85 | ), 86 | ( 87 | Root().Search(Array(Slice(None, -1, 2))), 88 | {"boo": [{"boo": [1, 2, 3, 4, 5]}, 1]}, 89 | [{"boo": [1, 2, 3, 4, 5]}, 1, 3], 90 | ), 91 | ], 92 | ) 93 | def test_search(expr, data, expect): 94 | assert_find(expr, data, expect) 95 | 96 | 97 | @pytest.mark.parametrize( 98 | "start,end,step,data,expect", 99 | [ 100 | (0, 1, 1, [1, 2], [1]), 101 | (1, 1, 1, [1, 2], []), 102 | (0, 10, 1, [1, 2, 3], [1, 2, 3]), 103 | (0, 3, 2, [1, 2, 3], [1, 3]), 104 | (0, None, 1, [1, 2], [1, 2]), 105 | (None, -1, 1, [1, 2, 3], [1, 2]), 106 | (None, None, 2, [1, 2, 3], [1, 3]), 107 | ], 108 | ids=reprlib.repr, 109 | ) 110 | def test_slice(start, end, step, data, expect): 111 | jp = Root().Slice(start, end, step) 112 | assert_find(jp, data, expect) 113 | 114 | 115 | @pytest.mark.parametrize( 116 | "expr,data,expect", 117 | [ 118 | ( 119 | Root().Predicate(Name("price") > 100), 120 | [{"price": 100}, {"price": 200}], 121 | [{"price": 200}], 122 | ), 123 | ( 124 | Root().Predicate(Name("price").GreaterThan(100)), 125 | [{"price": 100}, {"price": 200}], 126 | [{"price": 200}], 127 | ), 128 | ( 129 | Root().Predicate(Name("price") >= 100), 130 | [{"price": 100}, {"price": 200}], 131 | [{"price": 100}, {"price": 200}], 132 | ), 133 | ( 134 | Root().Predicate(Name("price").GreaterEqual(100)), 135 | [{"price": 100}, {"price": 200}], 136 | [{"price": 100}, {"price": 200}], 137 | ), 138 | ( 139 | Root().Predicate(Name("price") < 100), 140 | [{"price": 100}, {"price": 200}], 141 | [], 142 | ), 143 | ( 144 | Root().Predicate(Name("price").LessThan(100)), 145 | [{"price": 100}, {"price": 200}], 146 | [], 147 | ), 148 | ( 149 | Root().Predicate(Name("price") <= 100), 150 | [{"price": 100}, {"price": 200}], 151 | [{"price": 100}], 152 | ), 153 | ( 154 | Root().Predicate(Name("price").LessEqual(100)), 155 | [{"price": 100}, {"price": 200}], 156 | [{"price": 100}], 157 | ), 158 | ( 159 | Root().Predicate(Name("price") == 100), 160 | [{"price": 100}, {"price": 200}], 161 | [{"price": 100}], 162 | ), 163 | ( 164 | Root().Predicate(Name("price").Equal(100)), 165 | [{"price": 100}, {"price": 200}], 166 | [{"price": 100}], 167 | ), 168 | ( 169 | Root().Predicate(Name("price") != 100), 170 | [{"price": 100}, {"price": 200}], 171 | [{"price": 200}], 172 | ), 173 | ( 174 | Root().Predicate(Name("price").NotEqual(100)), 175 | [{"price": 100}, {"price": 200}], 176 | [{"price": 200}], 177 | ), 178 | ], 179 | ids=reprlib.repr, 180 | ) 181 | def test_comparison(expr, data, expect): 182 | assert_find(expr, data, expect) 183 | 184 | 185 | @pytest.mark.parametrize( 186 | "expr,data,expect", 187 | [ 188 | ( 189 | Root().Search(Predicate(Name("price") > 100)), 190 | { 191 | "price": 200, 192 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 193 | }, 194 | [ 195 | { 196 | "price": 200, 197 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 198 | }, 199 | {"price": 200}, 200 | {"price": 300}, 201 | ], 202 | ), 203 | ( 204 | Root().Search(Predicate(Name("price") >= 100)), 205 | { 206 | "price": 200, 207 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 208 | }, 209 | [ 210 | { 211 | "price": 200, 212 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 213 | }, 214 | {"price": 100}, 215 | {"price": 200}, 216 | {"price": 300}, 217 | ], 218 | ), 219 | ( 220 | Root().Search(Predicate(Name("price") <= 100)), 221 | { 222 | "price": 200, 223 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 224 | }, 225 | [{"price": 100}], 226 | ), 227 | ( 228 | Root().Search(Predicate(Name("price") < 100)), 229 | { 230 | "price": 200, 231 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 232 | }, 233 | [], 234 | ), 235 | ( 236 | Root().Search(Predicate(Name("price") == 100)), 237 | { 238 | "price": 200, 239 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 240 | }, 241 | [{"price": 100}], 242 | ), 243 | ( 244 | Root().Search(Predicate(Name("price") != 100)), 245 | { 246 | "price": 200, 247 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 248 | }, 249 | [ 250 | { 251 | "price": 200, 252 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 253 | }, 254 | {"price": 200}, 255 | {"price": 300}, 256 | ], 257 | ), 258 | ], 259 | ids=reprlib.repr, 260 | ) 261 | def test_comparison_in_search(expr, data, expect): 262 | assert_find(expr, data, expect) 263 | 264 | 265 | @pytest.mark.parametrize( 266 | "expr,data,expect", 267 | [ 268 | (Root().Name(), {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [[1, 2, 3], [2, 3, 4]]), 269 | (Root().Name().Array(0), {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [1, 2]), 270 | ( 271 | Root().Name().Array(), 272 | {"boo": [1, 2, 3], "bar": [2, 3, 4]}, 273 | [1, 2, 3, 2, 3, 4], 274 | ), 275 | ( 276 | Brace(Root().Name()).Array(0), 277 | {"boo": [1, 2, 3], "bar": [2, 3, 4]}, 278 | [[1, 2, 3]], 279 | ), 280 | ( 281 | Brace(Root().Name().Array()).Array(0), 282 | {"boo": [1, 2, 3], "bar": [2, 3, 4]}, 283 | [1], 284 | ), 285 | ( 286 | Root().Predicate(Contains(Self(), "is")), 287 | [ 288 | {"is": 1}, 289 | {"is": 0}, 290 | {"is": True}, 291 | {"is": False}, 292 | {"is": []}, 293 | {"is": None}, 294 | {"is": {}}, 295 | {"is": "str"}, 296 | {"is": 1.1}, 297 | {"is": 0.0}, 298 | {}, 299 | ], 300 | [ 301 | {"is": 1}, 302 | {"is": 0}, 303 | {"is": True}, 304 | {"is": False}, 305 | {"is": []}, 306 | {"is": None}, 307 | {"is": {}}, 308 | {"is": "str"}, 309 | {"is": 1.1}, 310 | {"is": 0.0}, 311 | ], 312 | ), 313 | ( 314 | Root().Predicate(Contains(Self(), Value("is"))), 315 | [ 316 | {"is": 1}, 317 | {"is": 0}, 318 | {"is": True}, 319 | {"is": False}, 320 | {"is": []}, 321 | {"is": None}, 322 | {"is": {}}, 323 | {"is": "str"}, 324 | {"is": 1.1}, 325 | {"is": 0.0}, 326 | {}, 327 | ], 328 | [ 329 | {"is": 1}, 330 | {"is": 0}, 331 | {"is": True}, 332 | {"is": False}, 333 | {"is": []}, 334 | {"is": None}, 335 | {"is": {}}, 336 | {"is": "str"}, 337 | {"is": 1.1}, 338 | {"is": 0.0}, 339 | ], 340 | ), 341 | ], 342 | ids=reprlib.repr, 343 | ) 344 | def test_others(expr, data, expect): 345 | assert_find(expr, data, expect) 346 | 347 | 348 | def test_get_expression(expr, expect): 349 | assert expr.get_expression() == expect 350 | 351 | 352 | test_get_expression = pytest.mark.parametrize( 353 | "expr,expect", 354 | [ 355 | (Name("abc").Name("def"), "abc.def"), 356 | (Name("abc").Name().Name("ghi"), "abc.*.ghi"), 357 | (Name("abc").Array(1), "abc[1]"), 358 | (Name("abc").Array(), "abc[*]"), 359 | (Name("abc").Array(Slice()), "abc[:]"), 360 | (Name("abc").Array(Slice(1)), "abc[1:]"), 361 | (Name("abc").Array(Slice(None, 1)), "abc[:1]"), 362 | (Name("abc").Array(Slice(1, -1)), "abc[1:-1]"), 363 | (Name("abc").Array(Slice(1, -1, 2)), "abc[1:-1:2]"), 364 | (Name("abc").Array(Slice(step=2)), "abc[::2]"), 365 | (Root().Array(), "$[*]"), 366 | (Root().Name(), "$.*"), 367 | (Brace(Root().Name("abc")).Array(1), "($.abc)[1]"), 368 | (Root().Search(Name("abc")).Array(1), "$..abc[1]"), 369 | (Root().Search(Array()), "$..[*]"), 370 | (Root().Predicate(Name("abc").GreaterThan(1)), "$[abc > 1]"), 371 | (Root().Predicate(Name("abc").GreaterEqual(1)), "$[abc >= 1]"), 372 | (Root().Predicate(Name("abc").Equal(1)), "$[abc = 1]"), 373 | (Root().Predicate(Name("abc").NotEqual(1)), "$[abc != 1]"), 374 | (Root().Predicate(Name("abc").LessEqual(1)), "$[abc <= 1]"), 375 | (Root().Predicate(Name("abc").LessThan(1)), "$[abc < 1]"), 376 | (Root().Predicate(Self().Name("abc").LessThan(1)), "$[@.abc < 1]"), 377 | ( 378 | Name("list").Predicate(Name("abc") < Root().Name("abc")), 379 | "list[abc < $.abc]", 380 | ), 381 | ( 382 | Name("list").Predicate(Name("abc").And(Root().Name("abc"))), 383 | "list[abc and $.abc]", 384 | ), 385 | ( 386 | Name("list").Predicate(Name("abc").Or(Root().Name("abc"))), 387 | "list[abc or $.abc]", 388 | ), 389 | ( 390 | Name("list").Predicate(Name("abc").Or(Root().Name("abc")).Or(Name("def"))), 391 | "list[abc or $.abc or def]", 392 | ), 393 | (Root().Predicate(Name("name") == "name"), '$[name = "name"]'), 394 | (Root().Predicate(Key() == "bookA"), '$[key() = "bookA"]'), 395 | ( 396 | Root().Predicate(Contains(Key(), "book")), 397 | '$[contains(key(), "book")]', 398 | ), 399 | ( 400 | Root().Predicate(Not(Contains(Key(), "book"))), 401 | '$[not(contains(key(), "book"))]', 402 | ), 403 | (Value(1), "1"), 404 | (Value(1.1), "1.1"), 405 | (Value("boo"), '"boo"'), 406 | (Value(None), "null"), 407 | (Value(True), "true"), 408 | (Value(False), "false"), 409 | (Value(1).LessThan(Value(2)), "1 < 2"), 410 | ], 411 | ids=reprlib.repr, 412 | )(test_get_expression) 413 | 414 | 415 | def test_get_parent_object(): 416 | root = {"a": 1} 417 | 418 | class TestName1(Name): 419 | def find(self, element): 420 | with pytest.raises(LookupError): 421 | var_parent.get() 422 | 423 | assert element == root 424 | return super().find(element) 425 | 426 | assert TestName1("a").find(root) == [1] 427 | 428 | root_2 = {"a": {"b": 1}} 429 | 430 | class TestName2(Name): 431 | def find(self, element): 432 | assert var_parent.get() == root_2 433 | assert element == {"b": 1} 434 | return super().find(element) 435 | 436 | assert Name("a").chain(TestName2("b")).find(root_2) == [1] 437 | 438 | 439 | def test_get_parent_array(): 440 | root = [{"a": 1}, {"a": 2}] 441 | 442 | class TestName(Name): 443 | def find(self, element): 444 | assert var_parent.get() == root 445 | assert element in root 446 | return super().find(element) 447 | 448 | assert Array().chain(TestName("a")).find(root) == [1, 2] 449 | 450 | 451 | def test_get_parent_while_searching(): 452 | root = {"a": {"b": {"c": 1}}} 453 | 454 | parents = [] 455 | history = [] 456 | 457 | class TestName(Name): 458 | def find(self, element): 459 | parents.append(var_parent.get()) 460 | history.append(element) 461 | return super().find(element) 462 | 463 | assert Root().Search(TestName("c")).find(root) == [1] 464 | assert parents == [root, root, root["a"], root["a"]["b"]] 465 | assert history == [root, root["a"], root["a"]["b"], 1] 466 | -------------------------------------------------------------------------------- /tests/test_lark.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | import json 3 | import logging 4 | 5 | from contextlib import nullcontext 6 | 7 | # Third Party Library 8 | import _pytest.python_api 9 | import pytest 10 | 11 | # First Party Library 12 | from jsonpath.core import JSONPathSyntaxError, JSONPathUndefinedFunctionError 13 | from jsonpath.lark import Lark, UnexpectedToken 14 | from jsonpath.parser import parse, parser 15 | 16 | # Local Folder 17 | from .utils import assert_find 18 | 19 | does_not_raise = nullcontext() 20 | 21 | 22 | @pytest.mark.xfail(raises=NameError) 23 | def test_no_conflict(caplog): 24 | with caplog.at_level(logging.DEBUG, logger="lark"): 25 | Lark.open( 26 | "jsonpath/grammar.lark", 27 | parser="lalr", 28 | debug=True, 29 | keep_all_tokens=True, 30 | ) 31 | 32 | assert len(caplog.records) == 0 33 | 34 | 35 | def ids(x): 36 | if x is does_not_raise: 37 | return "does not raise" 38 | elif isinstance(x, _pytest.python_api.RaisesContext): 39 | return f"raise {x.expected_exception.__name__}" 40 | else: 41 | return json.dumps(x) 42 | 43 | 44 | def test_parser_parse(expression, raises_what): 45 | with raises_what: 46 | tree = parser.parse(expression) 47 | logging.debug(f"parser.parse {expression!r} result: {tree.pretty()}") 48 | 49 | 50 | parser_parse_not_raises_exception_testcases = [ 51 | "a.b", 52 | "a.b.c", 53 | "a.b.c.d", 54 | "a[:3]", 55 | "a[:-3]", 56 | "a[-1]", 57 | "a[0]", 58 | "a[*]", 59 | "$[@]", 60 | "$[$]", 61 | "a[b]", 62 | "a[b][c]", 63 | "(a[b])[c]", 64 | "a.*", 65 | "a.'*'", 66 | "a.'b'", 67 | "a..[b]", 68 | "a..[0]", 69 | "a..[:3]", 70 | "$.goods[contains(@.category, $.targetCategory)]", 71 | "$.goods[@.category]", 72 | "$.goods[@.price > 10]", 73 | "$.goods[@.price >= 10 and @.price < 20]", 74 | "$.goods[@.price >= 10 and @.price < 20 and @.category = $.targetCategory]", 75 | "$.goods" "[(@.price >= 10 and @.price < 20) or @.category = $.targetCategory]", 76 | "$.goods[@.'price' > 10]", 77 | '$."price"', 78 | "$.'price'", 79 | "$.`price`", 80 | "($.*[*])[0]", 81 | "($.*)[0]", 82 | "$..boo.bar", 83 | "$[(type='book' or type='video') and price > 100]", 84 | "(a.b).b", 85 | "a[(b[0] or b[1]) and (b.c)[0].d]", 86 | "a[((b[0]).c or b[1]) and d]", 87 | "$[c and (a or b)]", 88 | "$[c and ((a or b) or c)]", 89 | "((a.b).c).d", 90 | "((a.b)).c", 91 | "$[((a.b).c)]", 92 | "$[1 < @]", 93 | "$[@ and 1]", 94 | "$[(a and b)]", 95 | "$[((a and b)) or c]", 96 | "$[not((a and b)) or c]", 97 | ] 98 | 99 | parser_parse_raises_exception_testcases = [ 100 | "array[]", 101 | "$*", 102 | "[*]", 103 | '"abc"', 104 | "'abc'", 105 | "`abc`", 106 | ] 107 | parser_parse_testcases = [ 108 | *( 109 | (expression, does_not_raise) 110 | for expression in parser_parse_not_raises_exception_testcases 111 | ), 112 | *( 113 | (expression, pytest.raises(UnexpectedToken)) 114 | for expression in parser_parse_raises_exception_testcases 115 | ), 116 | ] 117 | 118 | pytest.mark.parametrize( 119 | "expression, raises_what", 120 | parser_parse_testcases, 121 | ids=ids, 122 | )(test_parser_parse) 123 | 124 | 125 | def test_parse_check_and_extract(expression, data, expect): 126 | jp = parse(expression) 127 | logging.debug(f"parse {expression!r} result: {jp}") 128 | assert jp.get_expression() == expression 129 | assert_find(jp, data, expect) 130 | 131 | 132 | pytest.mark.parametrize( 133 | "expression, data, expect", 134 | [ 135 | ("boo", {"boo": 1}, [1]), 136 | ("boo.bar", {"boo": {"bar": 1}}, [1]), 137 | ("boo.bar.boo", {"boo": {"bar": {"boo": 1}}}, [1]), 138 | ("$.*", {"boo": 1, "bar": 2}, [1, 2]), 139 | ("$.'*'", {"boo": 1, "bar": 2, "*": 3}, [3]), 140 | ("boo.*", {"boo": {"boo": 1, "bar": 2}}, [1, 2]), 141 | ("boo.*.boo", {"boo": {"boo": {"boo": 1}, "bar": {"boo": 2}}}, [1, 2]), 142 | ("boo.*.boo", {"boo": {"boo": {"boo": 1}, "bar": {"bar": 2}}}, [1]), 143 | ("boo.*.boo", {"boo": {"boo": {"boo": 1}, "bar": 1}}, [1]), 144 | ("$[0]", [1, 2], [1]), 145 | ("$[1]", [1, 2], [2]), 146 | ("$[2]", [1, 2], []), 147 | ("$[-1]", [], []), 148 | ("$[-1]", [1], [1]), 149 | ("boo[0]", {"boo": [1, 2]}, [1]), 150 | ("$[*]", [1, 2], [1, 2]), 151 | ("$[:1]", [1, 2], [1]), 152 | ("$[1:2]", [1, 2], [2]), 153 | ("$[:]", [1, 2], [1, 2]), 154 | ("$[:-1]", [1, 2, 3], [1, 2]), 155 | ("$[::2]", [1, 2, 3], [1, 3]), 156 | ("$[a:]", [1, 2], []), 157 | ("$[:a]", [1, 2], []), 158 | ("$[::a]", [1, 2], []), 159 | ("$.data[$.a:]", {"data": [1, 2]}, []), 160 | ("$.data[$.a:]", {"data": [1, 2], "a": 1}, [2]), 161 | ("$.data[a:]", {"data": [1, 2], "a": 1}, [2]), 162 | ("$.data[a:b]", {"data": [1, 2], "a": 1, "b": 1}, []), 163 | ("$.*[0]", {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [1, 2]), 164 | ("$.*[*]", {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [1, 2, 3, 2, 3, 4]), 165 | ("($.*)[0]", {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [[1, 2, 3]]), 166 | ("($.*[*])[0]", {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [1]), 167 | ("$.*", {"boo": [1, 2, 3], "bar": [2, 3, 4]}, [[1, 2, 3], [2, 3, 4]]), 168 | ( 169 | "$..boo", 170 | {"boo": {"boo": {"boo": 1}, "bar": {"boo": 2}}}, 171 | [{"boo": {"boo": 1}, "bar": {"boo": 2}}, {"boo": 1}, 1, 2], 172 | ), 173 | ( 174 | "$..boo.bar", 175 | {"boo": {"boo": {"boo": 1}, "bar": {"boo": 2}}}, 176 | [{"boo": 2}], 177 | ), 178 | ("$..[0]", {"boo": [{"boo": [1]}]}, [{"boo": [1]}, 1]), 179 | ("$..[*]", {"boo": [{"boo": [1, 2]}]}, [{"boo": [1, 2]}, 1, 2]), 180 | ( 181 | "$..[:-1:2]", 182 | {"boo": [{"boo": [1, 2, 3, 4, 5]}, 1]}, 183 | [{"boo": [1, 2, 3, 4, 5]}, 1, 3], 184 | ), 185 | ("@", "abc", ["abc"]), 186 | ("$[@ < 10]", [0, 10, 12, 1, 3], [0, 1, 3]), 187 | ("($[@ < 10])[@ > 1]", [0, 10, 12, 1, 3], [3]), 188 | ( 189 | "$[@.price > 100]", 190 | [{"price": 100}, {"price": 200}], 191 | [{"price": 200}], 192 | ), 193 | ( 194 | "$[price > 100]", 195 | [{"price": 100}, {"price": 200}], 196 | [{"price": 200}], 197 | ), 198 | ( 199 | "$[price >= 100]", 200 | [{"price": 100}, {"price": 200}], 201 | [{"price": 100}, {"price": 200}], 202 | ), 203 | ( 204 | "$[price < 100]", 205 | [{"price": 100}, {"price": 200}], 206 | [], 207 | ), 208 | ( 209 | "$[price <= 100]", 210 | [{"price": 100}, {"price": 200}], 211 | [{"price": 100}], 212 | ), 213 | ( 214 | "$[price = 100]", 215 | [{"price": 100}, {"price": 200}], 216 | [{"price": 100}], 217 | ), 218 | ( 219 | "$[price != 100]", 220 | [{"price": 100}, {"price": 200}], 221 | [{"price": 200}], 222 | ), 223 | ("$[@.price]", [{"price": 100}, {}], [{"price": 100}]), 224 | ( 225 | "$[@]", 226 | [{"price": 100}, {"isbn": ""}, {}], 227 | [{"price": 100}, {"isbn": ""}], 228 | ), 229 | ( 230 | "$[@.*]", 231 | [{"price": 100}, {"isbn": ""}, {"isbn": "", "price": 100}], 232 | [{"price": 100}], 233 | ), 234 | ("$[price]", [{"price": 100}, {}], [{"price": 100}]), 235 | ("$[price]", {"bookA": {"price": 100}, "bookB": {}}, [{"price": 100}]), 236 | ( 237 | "$..[result]", 238 | [{"result": {"result": "result"}}], 239 | [{"result": {"result": "result"}}, {"result": "result"}], 240 | ), 241 | ( 242 | "$..[result]", 243 | {"result": {"result": "result"}}, 244 | [{"result": {"result": "result"}}, {"result": "result"}], 245 | ), 246 | ( 247 | "$..[price > 100]", 248 | { 249 | "price": 200, 250 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 251 | }, 252 | [ 253 | { 254 | "price": 200, 255 | "charpter": [{"price": 100}, {"price": 200}, {"price": 300}], 256 | }, 257 | {"price": 200}, 258 | {"price": 300}, 259 | ], 260 | ), 261 | ("$[price > 100.5]", [{"price": 100}, {"price": 200}], [{"price": 200}]), 262 | ("$[on = null]", [{"on": None}, {"on": False}], [{"on": None}]), 263 | ("$[on = true]", [{"on": True}, {"on": False}], [{"on": True}]), 264 | ("$[on = false]", [{"on": True}, {"on": False}], [{"on": False}]), 265 | ( 266 | "$.systems[on = $.on]", 267 | {"systems": [{"on": True}, {"on": False}], "on": False}, 268 | [{"on": False}], 269 | ), 270 | ( 271 | "$.systems[on = $.notexists]", 272 | {"systems": [{"on": True}, {"on": False}], "on": False}, 273 | [], 274 | ), 275 | ('$[name = "john"]', [{"name": "jack"}, {"name": "john"}], [{"name": "john"}]), 276 | ('$[name = "john"]', [{"name": "jack"}, {"name": "john"}], [{"name": "john"}]), 277 | ("$[*].name", [{"name": "jack"}, {"name": "john"}], ["jack", "john"]), 278 | ( 279 | '$[key() = "bookA"]', 280 | {"bookA": {"price": 100}, "bookB": {}}, 281 | [{"price": 100}], 282 | ), 283 | ("$[key() = 0]", [{"price": 100}, {"price": 200}], [{"price": 100}]), 284 | ( 285 | '$[contains(key(), "book")]', 286 | {"bookA": {"price": 100}, "bookB": {"price": 200}, "pictureA": {}}, 287 | [{"price": 100}, {"price": 200}], 288 | ), 289 | ( 290 | '$[contains(@.category, "book")]', 291 | [ 292 | {"price": 100, "category": "Comic book"}, 293 | {"price": 200, "category": "magazine"}, 294 | {"price": 200, "no category": ""}, 295 | ], 296 | [{"price": 100, "category": "Comic book"}], 297 | ), 298 | ( 299 | "$.goods[contains(@.category, $.targetCategory)]", 300 | { 301 | "goods": [ 302 | {"price": 100, "category": "Comic book"}, 303 | {"price": 200, "category": "magazine"}, 304 | {"price": 200, "no category": ""}, 305 | ], 306 | "targetCategory": "book", 307 | }, 308 | [{"price": 100, "category": "Comic book"}], 309 | ), 310 | ( 311 | "$[contains(@.category, $.targetCategory)]", 312 | [{"price": 100, "category": "Comic book"}], 313 | [], 314 | ), 315 | ( 316 | '$[type = "book" and price > 100]', 317 | [ 318 | {"type": "book", "price": 100}, 319 | {"type": "book", "price": 200}, 320 | {"type": "video", "price": 200}, 321 | ], 322 | [{"type": "book", "price": 200}], 323 | ), 324 | ( 325 | '$[(type = "book" or type = "video") and price > 100]', 326 | [ 327 | {"type": "book", "price": 100}, 328 | {"type": "book", "price": 200}, 329 | {"type": "video", "price": 200}, 330 | ], 331 | [{"type": "book", "price": 200}, {"type": "video", "price": 200}], 332 | ), 333 | ( 334 | '$[type = "book" or type = "video"]', 335 | [ 336 | {"type": "book", "price": 100}, 337 | {"type": "book", "price": 200}, 338 | {"type": "video", "price": 200}, 339 | {"type": "audio", "price": 100}, 340 | ], 341 | [ 342 | {"type": "book", "price": 100}, 343 | {"type": "book", "price": 200}, 344 | {"type": "video", "price": 200}, 345 | ], 346 | ), 347 | ( 348 | '$[type = "book" or type = "video" or type = "audio"]', 349 | [ 350 | {"type": "book", "price": 200}, 351 | {"type": "video", "price": 200}, 352 | {"type": "audio", "price": 100}, 353 | ], 354 | [ 355 | {"type": "book", "price": 200}, 356 | {"type": "video", "price": 200}, 357 | {"type": "audio", "price": 100}, 358 | ], 359 | ), 360 | ( 361 | "$[is]", 362 | [ 363 | {"is": 1}, 364 | {"is": 0}, 365 | {"is": True}, 366 | {"is": False}, 367 | {"is": []}, 368 | {"is": None}, 369 | {"is": {}}, 370 | {"is": "str"}, 371 | {"is": 1.1}, 372 | {"is": 0.0}, 373 | {}, 374 | ], 375 | [{"is": 1}, {"is": True}, {"is": "str"}, {"is": 1.1}], 376 | ), 377 | ( 378 | '$[contains(@, "is")]', 379 | [ 380 | {"is": 1}, 381 | {"is": 0}, 382 | {"is": True}, 383 | {"is": False}, 384 | {"is": []}, 385 | {"is": None}, 386 | {"is": {}}, 387 | {"is": "str"}, 388 | {"is": 1.1}, 389 | {"is": 0.0}, 390 | {}, 391 | ], 392 | [ 393 | {"is": 1}, 394 | {"is": 0}, 395 | {"is": True}, 396 | {"is": False}, 397 | {"is": []}, 398 | {"is": None}, 399 | {"is": {}}, 400 | {"is": "str"}, 401 | {"is": 1.1}, 402 | {"is": 0.0}, 403 | ], 404 | ), 405 | ( 406 | "$[not(is)]", 407 | [ 408 | {"is": 1}, 409 | {"is": 0}, 410 | {"is": True}, 411 | {"is": False}, 412 | {"is": []}, 413 | {"is": None}, 414 | {"is": {}}, 415 | {"is": "str"}, 416 | {"is": 1.1}, 417 | {"is": 0.0}, 418 | {}, 419 | ], 420 | [ 421 | {"is": 0}, 422 | {"is": False}, 423 | {"is": []}, 424 | {"is": None}, 425 | {"is": {}}, 426 | {"is": 0.0}, 427 | ], 428 | ), 429 | ( 430 | '$[not(contains(@, "is"))]', 431 | [ 432 | {"is": 1}, 433 | {"is": 0}, 434 | {"is": True}, 435 | {"is": False}, 436 | {"is": []}, 437 | {"is": None}, 438 | {"is": {}}, 439 | {"is": "str"}, 440 | {"is": 1.1}, 441 | {"is": 0.0}, 442 | {}, 443 | ], 444 | [{}], 445 | ), 446 | ( 447 | '$[not(type = "book" or type = "video")]', 448 | [ 449 | {"type": "book", "price": 100}, 450 | {"type": "video", "price": 200}, 451 | {"type": "audio", "price": 100}, 452 | ], 453 | [{"type": "audio", "price": 100}], 454 | ), 455 | ("$[100 = price]", [{"price": 100}, {"price": 0}], [{"price": 100}]), 456 | ( 457 | "$.data[$.start:$.stop:$.step]", 458 | {"data": [0, 1, 2, 3, 4], "start": 1, "stop": 10, "step": 2}, 459 | [1, 3], 460 | ), 461 | ( 462 | "$.data[$.start:$.stop:$.step]", 463 | {"data": [0, 1, 2, 3, 4], "start": "not integer", "stop": 10, "step": 2}, 464 | [], 465 | ), 466 | ( 467 | "$.data[$.start:$.stop:$.step]", 468 | {"data": [0, 1, 2, 3, 4], "start": 1, "stop": "not integer", "step": 2}, 469 | [], 470 | ), 471 | ( 472 | "$.data[$.start:$.stop:$.step]", 473 | {"data": [0, 1, 2, 3, 4], "start": 1, "stop": 10, "step": "not integer"}, 474 | [], 475 | ), 476 | ], 477 | ids=ids, 478 | )(test_parse_check_and_extract) 479 | 480 | 481 | def test_parse_and_extract(expression, data, expect): 482 | jp = parse(expression) 483 | logging.debug(f"parse {expression!r} result: {jp}") 484 | assert_find(jp, data, expect) 485 | 486 | 487 | pytest.mark.parametrize( 488 | "expression, data, expect", 489 | [ 490 | ('$."*"', {"*": ""}, [""]), 491 | ("$.'*'", {"*": ""}, [""]), 492 | ("$.`*`", {"*": ""}, [""]), 493 | ( 494 | """$[name='"john"']""", 495 | [{"name": "jack"}, {"name": '"john"'}], 496 | [{"name": '"john"'}], 497 | ), 498 | ( 499 | """$[name="'john'"]""", 500 | [{"name": "jack"}, {"name": "'john'"}], 501 | [{"name": "'john'"}], 502 | ), 503 | ( 504 | """$[name=`john'`]""", 505 | [{"name": "jack"}, {"name": "john'"}], 506 | [{"name": "john'"}], 507 | ), 508 | ( 509 | """$[*].'name'""", 510 | [{"name": "jack"}, {"name": "john"}], 511 | ["jack", "john"], 512 | ), 513 | ], 514 | ids=ids, 515 | )(test_parse_and_extract) 516 | 517 | 518 | @pytest.mark.parametrize("expression", parser_parse_raises_exception_testcases) 519 | def test_syntax_error(expression): 520 | with pytest.raises(JSONPathSyntaxError): 521 | parse(expression) 522 | 523 | 524 | def test_undefined_function_error(): 525 | with pytest.raises(JSONPathUndefinedFunctionError): 526 | parse("$[abc(@)]") 527 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | # Standard Library 2 | from typing import Any, List 3 | 4 | # Third Party Library 5 | import pytest 6 | 7 | # First Party Library 8 | from jsonpath import Expr, JSONPathFindError 9 | 10 | 11 | def assert_find(jp: Expr, data: Any, expect: List[Any]): 12 | if expect: 13 | assert expect[0] == jp.find_first(data) 14 | else: 15 | with pytest.raises(JSONPathFindError): 16 | jp.find_first(data) 17 | 18 | assert expect == jp.find(data) 19 | --------------------------------------------------------------------------------