├── test
├── __init__.py
├── test_nested_spec.py
├── utils.py
├── test_class_type_spec.py
├── test_decorator_drf.py
├── test_decorator_dj.py
└── test_spec.py
├── data_spec_validator
├── __init__.py
├── __version__.py
├── spec
│ ├── custom_spec
│ │ ├── __init__.py
│ │ └── defines.py
│ ├── utils.py
│ ├── wrappers.py
│ ├── __init__.py
│ ├── features.py
│ ├── defines.py
│ ├── actions.py
│ ├── checks.py
│ └── validators.py
└── decorator
│ ├── __init__.py
│ └── decorators.py
├── requirements.txt
├── requirements-decorator.txt
├── .git-blame-ignore-revs
├── requirements-dev.txt
├── .github
├── pull_request_template.md
└── workflows
│ ├── lint.yml
│ └── publish.yml
├── pyproject.toml
├── setup.cfg
├── .pre-commit-config.yaml
├── LICENSE
├── setup.py
├── bin
└── admin.py
├── .gitignore
├── CHANGELOG.md
├── DEPLOYMENT.md
└── README.md
/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/data_spec_validator/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | python-dateutil>=2.8.2
2 |
--------------------------------------------------------------------------------
/data_spec_validator/__version__.py:
--------------------------------------------------------------------------------
1 | __version__ = '3.5.1'
2 |
--------------------------------------------------------------------------------
/requirements-decorator.txt:
--------------------------------------------------------------------------------
1 | Django>=3.0.3
2 | djangorestframework>=3.11.0
3 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Add to ignored commit here
2 | # Linting #
3 | #################
4 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/custom_spec/__init__.py:
--------------------------------------------------------------------------------
1 | from .defines import register
2 |
3 | __all__ = ["register"]
4 |
--------------------------------------------------------------------------------
/data_spec_validator/decorator/__init__.py:
--------------------------------------------------------------------------------
1 | from .decorators import dsv, dsv_request_meta
2 |
3 | __all__ = ['dsv', 'dsv_request_meta']
4 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/utils.py:
--------------------------------------------------------------------------------
1 | from typing import Union
2 |
3 |
4 | def raise_if(condition: bool, error: Union[TypeError, RuntimeError, ValueError, NotImplementedError]):
5 | if condition:
6 | raise error
7 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | Django>=3.0.3
2 | djangorestframework>=3.11.0
3 | python-dateutil>=2.8.2
4 | pre-commit==2.9.2
5 | autoflake==1.4
6 | isort==5.10.0
7 | black==22.8.0
8 | flake8==4.0.1
9 | twine==3.4.2
10 | parameterized==0.8.1
11 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | Reason
2 | --------------
3 |
4 |
5 | Changes
6 | --------------
7 |
8 |
9 | Test Scope
10 | --------------
11 |
12 |
13 | Checks
14 | --------------
15 | - [ ] Unit tests are included or not applicable.
16 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 120
3 | skip-string-normalization = true
4 | target-version = ['py36']
5 | include = '\.pyi?$'
6 | exclude = '''
7 | (
8 | /(
9 | \.eggs # exclude a few common directories in the
10 | | \.git # root of the project
11 | | \.tox
12 | | \.venv
13 | | build
14 | | dist
15 | | static
16 | )/
17 | )
18 | '''
19 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/wrappers.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple
2 |
3 | from .defines import BaseValidator, BaseWrapper
4 |
5 |
6 | class NotWrapper(BaseWrapper, BaseValidator):
7 | name = 'not'
8 |
9 | def validate(self, value, extra, data) -> Tuple[bool, Exception]:
10 | ok, error = self.wrapped_func(value, extra, data)
11 | info = '' if not ok else TypeError(f'Value({value}) should not pass {self.wrapped_func.__str__()}')
12 | return not ok, info
13 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | extend-ignore=
3 | # See https://github.com/PyCQA/pycodestyle/issues/373
4 | E203,
5 | # follow existing code style
6 | E721,
7 | # style related, follow black
8 | E501,
9 | # style related, follow black
10 | W503,
11 | max-line-length = 120
12 | per-file-ignores =
13 | # imported but unused
14 | __init__.py: F401
15 | max-complexity = 100
16 |
17 | [isort]
18 | profile = black
19 | multi_line_output = 3
20 | include_trailing_comma = True
21 | force_grid_wrap = 0
22 | use_parentheses = True
23 | ensure_newline_before_comments = True
24 | line_length = 120
25 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on: [pull_request]
4 |
5 | jobs:
6 | pre-commit:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v3
10 | with:
11 | fetch-depth: 1024
12 | - uses: actions/setup-python@v4
13 | with:
14 | python-version: 3.11
15 | - name: pre-commit
16 | env:
17 | BASE_SHA: ${{ github.event.pull_request.base.sha}}
18 | HEAD_SHA: ${{ github.event.pull_request.head.sha}}
19 | run: |
20 | python -m pip install pre-commit
21 | pre-commit run --from-ref $BASE_SHA --to-ref $HEAD_SHA --all-files
22 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | default_stages: [commit, push]
2 | fail_fast: false
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v4.6.0
6 | hooks:
7 | - id: end-of-file-fixer
8 | - id: trailing-whitespace
9 | - repo: https://github.com/myint/autoflake
10 | rev: v2.3.1
11 | hooks:
12 | - id: autoflake
13 | args:
14 | - --in-place
15 | - --remove-unused-variables
16 | - --remove-all-unused-imports
17 | - repo: https://github.com/pycqa/isort
18 | rev: 5.13.2
19 | hooks:
20 | - id: isort
21 | - repo: https://github.com/psf/black
22 | rev: 24.4.2
23 | hooks:
24 | - id: black
25 | language_version: python3.11
26 | args:
27 | - --target-version=py38
28 | - --line-length=120
29 | - --skip-string-normalization
30 | - repo: https://github.com/PyCQA/flake8
31 | rev: 7.0.0
32 | hooks:
33 | - id: flake8
34 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/custom_spec/defines.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from typing import Dict
3 |
4 | from data_spec_validator.spec.defines import BaseValidator
5 |
6 | _custom_map = dict()
7 |
8 |
9 | def get_custom_check_2_validator_map() -> Dict[str, BaseValidator]:
10 | return _custom_map
11 |
12 |
13 | def _get_class_name(instance) -> str:
14 | return instance.__class__.__name__
15 |
16 |
17 | def register(check_2_validator_map) -> bool:
18 | for check, validator in check_2_validator_map.items():
19 | if not issubclass(type(validator), BaseValidator):
20 | raise TypeError(f'{_get_class_name(validator)} is not a subclass of BaseValidator')
21 |
22 | if check in _custom_map:
23 | ori_validator = _custom_map[check]
24 | warnings.warn(
25 | f'[DSV][WARNING] Check({check}) already exists, gonna overwrite the validator from '
26 | f'{_get_class_name(ori_validator)} to {_get_class_name(validator)}'
27 | )
28 | _custom_map[check] = validator
29 | return True
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Hard Core Technology Corp.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import setuptools
4 |
5 | CUR_DIR = os.path.abspath(os.path.dirname(__file__))
6 |
7 | about = {}
8 | with open(os.path.join(CUR_DIR, "data_spec_validator", "__version__.py"), "r") as f:
9 | exec(f.read(), about)
10 |
11 | with open("README.md", "r", encoding="utf-8") as fh:
12 | long_description = fh.read()
13 |
14 | setuptools.setup(
15 | name="data-spec-validator",
16 | version=about['__version__'],
17 | author="GoFreight",
18 | author_email="pypi@hardcoretech.co",
19 | description="Validation tool for API/Function parameters",
20 | long_description=long_description,
21 | long_description_content_type="text/markdown",
22 | url="https://github.com/hardcoretech/data-spec-validator",
23 | classifiers=[
24 | "Programming Language :: Python :: 3",
25 | "Framework :: Django :: 3",
26 | "Framework :: Django :: 4",
27 | "License :: OSI Approved :: MIT License",
28 | "Operating System :: OS Independent",
29 | ],
30 | package_dir={"data_spec_validator": "data_spec_validator"},
31 | packages=setuptools.find_packages(),
32 | install_requires=[
33 | "python-dateutil",
34 | ],
35 | extras_require={
36 | 'decorator': ['Django>=3.0', 'djangorestframework'],
37 | 'decorator-dj': ['Django>=3.0'],
38 | },
39 | python_requires=">=3.8",
40 | project_urls={"Changelog": "https://github.com/hardcoretech/data-spec-validator/blob/develop/CHANGELOG.md"},
41 | )
42 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/__init__.py:
--------------------------------------------------------------------------------
1 | from .actions import validate_data_spec
2 | from .checks import Checker, CheckerOP
3 |
4 | # Export generic validator NAME
5 | from .defines import (
6 | AMOUNT,
7 | AMOUNT_RANGE,
8 | BOOL,
9 | COND_EXIST,
10 | DATE,
11 | DATE_OBJECT,
12 | DATE_RANGE,
13 | DATETIME_OBJECT,
14 | DECIMAL_PLACE,
15 | DICT,
16 | DIGIT_STR,
17 | DUMMY,
18 | EMAIL,
19 | FLOAT,
20 | FOREACH,
21 | INT,
22 | JSON,
23 | JSON_BOOL,
24 | LENGTH,
25 | LIST,
26 | LIST_OF,
27 | NONE,
28 | NUMBER,
29 | ONE_OF,
30 | REGEX,
31 | SELF,
32 | SPEC,
33 | STR,
34 | UUID,
35 | BaseValidator,
36 | DSVError,
37 | ErrorMode,
38 | not_,
39 | reset_msg_level,
40 | )
41 | from .features import dsv_feature
42 | from .utils import raise_if
43 |
44 | __all__ = [
45 | "Checker",
46 | "CheckerOP",
47 | "validate_data_spec",
48 | "AMOUNT",
49 | "AMOUNT_RANGE",
50 | "BOOL",
51 | "COND_EXIST",
52 | "DATE",
53 | "DATE_OBJECT",
54 | "DATE_RANGE",
55 | "DATETIME_OBJECT",
56 | "DECIMAL_PLACE",
57 | "DICT",
58 | "DIGIT_STR",
59 | "DUMMY",
60 | "EMAIL",
61 | "FLOAT",
62 | "FOREACH",
63 | "INT",
64 | "JSON",
65 | "JSON_BOOL",
66 | "LENGTH",
67 | "LIST",
68 | "LIST_OF",
69 | "NONE",
70 | "NUMBER",
71 | "ONE_OF",
72 | "REGEX",
73 | "SELF",
74 | "SPEC",
75 | "STR",
76 | "UUID",
77 | "BaseValidator",
78 | "DSVError",
79 | "ErrorMode",
80 | "not_",
81 | "reset_msg_level",
82 | "dsv_feature",
83 | "raise_if",
84 | ]
85 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish to PyPI
2 |
3 | on:
4 | release:
5 | types: [published]
6 | workflow_dispatch:
7 | inputs:
8 | environment:
9 | description: 'Target environment'
10 | required: true
11 | type: choice
12 | options:
13 | - pypi
14 | - testpypi
15 | default: 'testpypi'
16 |
17 | jobs:
18 | deploy:
19 | runs-on: ubuntu-latest
20 | permissions:
21 | contents: read
22 |
23 | steps:
24 | - uses: actions/checkout@v4
25 |
26 | - name: Set up Python
27 | uses: actions/setup-python@v5
28 | with:
29 | python-version: '3.11'
30 |
31 | - name: Install dependencies
32 | run: |
33 | python -m pip install --upgrade pip
34 | pip install build
35 |
36 | - name: Build package
37 | run: python -m build
38 |
39 | - name: Check build artifacts exist
40 | run: |
41 | ls -la dist/
42 | test -f dist/*.whl || exit 1
43 | test -f dist/*.tar.gz || exit 1
44 |
45 | - name: Publish to Production PyPI
46 | if: github.event_name == 'release' || (github.event_name == 'workflow_dispatch' && inputs.environment == 'pypi')
47 | uses: pypa/gh-action-pypi-publish@release/v1
48 | with:
49 | password: ${{ secrets.PYPI_API_TOKEN }}
50 |
51 | - name: Publish to Test PyPI
52 | if: github.event_name == 'workflow_dispatch' && inputs.environment == 'testpypi'
53 | uses: pypa/gh-action-pypi-publish@release/v1
54 | with:
55 | repository-url: https://test.pypi.org/legacy/
56 | password: ${{ secrets.TEST_PYPI_API_TOKEN }}
57 |
--------------------------------------------------------------------------------
/bin/admin.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import argparse
4 | import os
5 | import os.path
6 | import subprocess
7 | from os.path import abspath, dirname, join
8 |
9 | CUR_DIR = dirname(abspath(__file__))
10 | ROOT_DIR = abspath(join(CUR_DIR, '..'))
11 |
12 |
13 | def parse_args():
14 | parser = argparse.ArgumentParser(description='data-spec-validator admin')
15 | subparsers = parser.add_subparsers()
16 |
17 | # === build === #
18 | parser_build = subparsers.add_parser('build', help='build package')
19 | parser_build.set_defaults(func=build)
20 |
21 | # === upload === #
22 | parser_upload = subparsers.add_parser('upload', help='upload to pypi')
23 | parser_upload.add_argument(
24 | '--repo', dest='repo', default='pypi', choices=('pypi', 'testpypi'), help='upload repo, pypi or testpypi'
25 | )
26 | parser_upload.set_defaults(func=upload)
27 |
28 | args = parser.parse_args()
29 | args.func(args)
30 |
31 |
32 | def build(args):
33 | subprocess.check_call('python setup.py sdist', shell=True)
34 | subprocess.check_call('rm -vrf ./build ./*.egg-info', shell=True)
35 |
36 |
37 | def upload(args):
38 | # remove old dist
39 | subprocess.check_call('rm -rf ./dist', shell=True)
40 |
41 | # build again
42 | build(args)
43 |
44 | if args.repo == 'pypi':
45 | ans = input("Are you sure to upload package to pypi?\n(y/N)")
46 | if ans.lower() != 'y':
47 | return
48 |
49 | # read local .pypirc first
50 | repo_pypirc_path = join(ROOT_DIR, '.pypirc')
51 | config_arg = f'--config-file={repo_pypirc_path}' if os.path.exists(repo_pypirc_path) else ''
52 | repo_arg = '-r testpypi' if args.repo == 'testpypi' else ''
53 | subprocess.check_call(f'twine upload {repo_arg} {config_arg} dist/*', shell=True)
54 |
55 |
56 | def main():
57 | os.chdir(ROOT_DIR)
58 | parse_args()
59 |
60 |
61 | if __name__ == '__main__':
62 | main()
63 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyPi
31 | .pypirc
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .nox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *.cover
53 | *.py,cover
54 | .hypothesis/
55 | .pytest_cache/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 | db.sqlite3
65 | db.sqlite3-journal
66 |
67 | # Flask stuff:
68 | instance/
69 | .webassets-cache
70 |
71 | # Scrapy stuff:
72 | .scrapy
73 |
74 | # Sphinx documentation
75 | docs/_build/
76 |
77 | # PyBuilder
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
98 | __pypackages__/
99 |
100 | # Celery stuff
101 | celerybeat-schedule
102 | celerybeat.pid
103 |
104 | # SageMath parsed files
105 | *.sage.py
106 |
107 | # Environments
108 | .env
109 | .venv
110 | env/
111 | venv/
112 | ENV/
113 | env.bak/
114 | venv.bak/
115 |
116 | # Spyder project settings
117 | .spyderproject
118 | .spyproject
119 |
120 | # Rope project settings
121 | .ropeproject
122 |
123 | # mkdocs documentation
124 | /site
125 |
126 | # mypy
127 | .mypy_cache/
128 | .dmypy.json
129 | dmypy.json
130 |
131 | # Pyre type checker
132 | .pyre/
133 |
134 | # MacOS Desktop Services Store
135 | *.DS_Store
136 |
137 | # IntelliJ
138 | .idea/
139 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/features.py:
--------------------------------------------------------------------------------
1 | from typing import Callable, Optional, Set, Tuple, Type, Union
2 |
3 | from .checks import Checker
4 | from .defines import FOREACH, SPEC, ErrorMode
5 |
6 |
7 | class _DSVFeatureParams:
8 | __slots__ = ('_strict', '_any_keys_set', '_err_mode', '_spec_name')
9 |
10 | def __init__(self, strict, any_keys_set: Union[Set[Tuple[str, ...]], None], err_mode, spec_name):
11 | self._strict = strict
12 | self._any_keys_set = any_keys_set or set()
13 | self._err_mode = err_mode
14 | self._spec_name = spec_name
15 |
16 | @property
17 | def err_mode(self) -> ErrorMode:
18 | return self._err_mode
19 |
20 | @property
21 | def strict(self) -> bool:
22 | return self._strict
23 |
24 | @property
25 | def any_keys_set(self) -> set:
26 | return self._any_keys_set
27 |
28 | @property
29 | def spec_name(self) -> str:
30 | return self._spec_name
31 |
32 | def __repr__(self):
33 | return f'_DSVFeatureParams(strict={self._strict}, any_keys_set={self._any_keys_set}, err_mode={self._err_mode}), spec_name={self._spec_name}'
34 |
35 |
36 | _FEAT_PARAMS = '__feat_params__'
37 |
38 |
39 | def _process_class(
40 | cls: Type,
41 | strict: bool,
42 | any_keys_set: Union[Set[Tuple[str, ...]], None],
43 | err_mode: ErrorMode,
44 | spec_name: Optional[str],
45 | ) -> Type:
46 | setattr(cls, _FEAT_PARAMS, _DSVFeatureParams(strict, any_keys_set, err_mode, spec_name))
47 |
48 | return cls
49 |
50 |
51 | def dsv_feature(
52 | strict: bool = False,
53 | any_keys_set: Optional[Set[Tuple[str, ...]]] = None,
54 | err_mode=ErrorMode.MSE,
55 | spec_name: Optional[str] = None,
56 | ) -> Callable:
57 | def wrap(cls: Type) -> Type:
58 | return _process_class(cls, strict, any_keys_set, err_mode, spec_name)
59 |
60 | return wrap
61 |
62 |
63 | def get_spec_name(spec) -> str:
64 | if hasattr(spec, _FEAT_PARAMS) and getattr(spec, _FEAT_PARAMS).spec_name:
65 | return getattr(spec, _FEAT_PARAMS).spec_name
66 | return spec.__name__
67 |
68 |
69 | def get_err_mode(spec) -> ErrorMode:
70 | feat_params: Union[_DSVFeatureParams, None] = getattr(spec, _FEAT_PARAMS, None)
71 | return feat_params.err_mode if feat_params else ErrorMode.MSE
72 |
73 |
74 | def is_strict(spec) -> bool:
75 | feat_params: Union[_DSVFeatureParams, None] = getattr(spec, _FEAT_PARAMS, None)
76 | return bool(feat_params and feat_params.strict)
77 |
78 |
79 | def get_any_keys_set(spec) -> Set[Tuple[str, ...]]:
80 | feat_params: Union[_DSVFeatureParams, None] = getattr(spec, _FEAT_PARAMS, None)
81 | return feat_params.any_keys_set if feat_params else set()
82 |
83 |
84 | def repack_multirow(data, spec):
85 | class _InternalMultiSpec:
86 | dsv_multirow = Checker([FOREACH], FOREACH=SPEC, SPEC=spec)
87 |
88 | new_data = dict(dsv_multirow=data)
89 | return new_data, _InternalMultiSpec
90 |
--------------------------------------------------------------------------------
/test/test_nested_spec.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from data_spec_validator.spec import (
4 | BOOL,
5 | DICT,
6 | DIGIT_STR,
7 | FLOAT,
8 | INT,
9 | NONE,
10 | SPEC,
11 | STR,
12 | Checker,
13 | dsv_feature,
14 | validate_data_spec,
15 | )
16 |
17 | from .utils import is_something_error
18 |
19 |
20 | class TestNestedSpec(unittest.TestCase):
21 | def test_nested(self):
22 | class NestedSpec:
23 | class ChildSpec1:
24 | class ChildSpec11:
25 | f_11 = Checker([FLOAT])
26 | d_11 = Checker([DICT])
27 | b_11 = Checker([BOOL])
28 |
29 | f_1 = Checker([FLOAT])
30 | d_1 = Checker([DICT])
31 | b_1 = Checker([BOOL])
32 | s_1 = Checker([SPEC], SPEC=ChildSpec11)
33 |
34 | class ChildSpec2:
35 | s_2 = Checker([STR])
36 |
37 | class ChildSpec3:
38 | n_3 = Checker([NONE])
39 | ds_3 = Checker([DIGIT_STR])
40 |
41 | c1_f = Checker([SPEC], SPEC=ChildSpec1)
42 | int_f = Checker([INT])
43 | str_f = Checker([STR])
44 | c2_f = Checker([SPEC], SPEC=ChildSpec2)
45 | c3_f = Checker([SPEC], SPEC=ChildSpec3)
46 |
47 | ok_data = dict(
48 | c1_f=dict(
49 | f_1=3.4,
50 | d_1=dict(a=1),
51 | b_1=True,
52 | s_1=dict(
53 | f_11=1.1,
54 | d_11=dict(b=[]),
55 | b_11=False,
56 | ),
57 | ),
58 | int_f=9,
59 | str_f='first str',
60 | c2_f=dict(s_2='second str'),
61 | c3_f=dict(
62 | n_3=None,
63 | ds_3='99',
64 | ),
65 | )
66 | assert validate_data_spec(ok_data, NestedSpec)
67 |
68 | nok_data = dict(
69 | c1_f=3,
70 | int_f='9',
71 | str_f=3,
72 | c2_f=None,
73 | c3_f=[],
74 | )
75 | assert is_something_error(TypeError, validate_data_spec, nok_data, NestedSpec)
76 |
77 | def test_nested_error_field_name(self):
78 | class NestedSpec:
79 | class ChildSpec1:
80 | @dsv_feature(spec_name='NestedSpec.c1_f.s_1')
81 | class ChildSpec11:
82 | f_11 = Checker([FLOAT])
83 |
84 | s_1 = Checker([SPEC], SPEC=ChildSpec11)
85 |
86 | c1_f = Checker([SPEC], SPEC=ChildSpec1)
87 |
88 | nok_data = dict(
89 | c1_f=dict(
90 | s_1=dict(
91 | f_11=None,
92 | ),
93 | ),
94 | )
95 |
96 | with self.assertRaises(TypeError) as exc_info:
97 | validate_data_spec(nok_data, NestedSpec)
98 |
99 | exc_msg = str(exc_info.exception)
100 | self.assertEqual(exc_msg, 'field: NestedSpec.c1_f.s_1.f_11, reason: None is not a float')
101 |
--------------------------------------------------------------------------------
/test/utils.py:
--------------------------------------------------------------------------------
1 | from io import StringIO
2 |
3 |
4 | def is_something_error(error, func, *args, **kwargs):
5 | try:
6 | func(*args, **kwargs)
7 | except error:
8 | return True
9 | return False
10 |
11 |
12 | def is_type_error(func, *args):
13 | try:
14 | func(*args)
15 | except TypeError:
16 | return True
17 | return False
18 |
19 |
20 | def is_django_installed():
21 | try:
22 | import django # noqa
23 | except ImportError:
24 | return False
25 | return True
26 |
27 |
28 | def is_drf_installed():
29 | try:
30 | import rest_framework # noqa
31 | except ImportError:
32 | return False
33 | return True
34 |
35 |
36 | def make_request(cls, path='/', method='GET', user=None, headers=None, data=None, qs=None, is_json=False):
37 | assert is_django_installed()
38 |
39 | from django.core.handlers.asgi import ASGIRequest
40 | from django.core.handlers.wsgi import WSGIRequest
41 |
42 | if cls is not ASGIRequest:
43 | kwargs = {'REQUEST_METHOD': method, 'PATH_INFO': path, 'wsgi.input': StringIO()}
44 | if qs:
45 | kwargs.update({'QUERY_STRING': qs})
46 |
47 | # Set content type in initial environ if data is provided
48 | if data and method in ['POST', 'PUT', 'PATCH', 'DELETE']:
49 | kwargs.update(
50 | {
51 | 'CONTENT_TYPE': 'application/json' if is_json else 'application/x-www-form-urlencoded',
52 | 'CONTENT_LENGTH': len(str(data)),
53 | }
54 | )
55 |
56 | req = WSGIRequest(kwargs)
57 | else:
58 | kwargs = {'path': path, 'method': method}
59 | if qs:
60 | kwargs.update({'query_string': qs})
61 |
62 | # Set content type in initial scope if data is provided
63 | if data and method in ['POST', 'PUT', 'PATCH', 'DELETE']:
64 | headers = kwargs.get('headers', [])
65 | headers.extend(
66 | [
67 | [
68 | b'content-type',
69 | ('application/json' if is_json else 'application/x-www-form-urlencoded').encode(),
70 | ],
71 | [b'content-length', str(len(str(data))).encode()],
72 | ]
73 | )
74 | kwargs['headers'] = headers
75 |
76 | req = ASGIRequest(kwargs, StringIO())
77 |
78 | req.user = user
79 |
80 | if headers:
81 | req.META.update(headers)
82 |
83 | if data:
84 | if method == 'GET':
85 | setattr(req, 'GET', data)
86 | elif method in ['POST', 'PUT', 'PATCH', 'DELETE']:
87 | req.read() # trigger RawPostDataException and force DRF to load data from req.POST
88 | if is_json:
89 | req._body = data
90 | req.POST = {}
91 | else:
92 | req.POST = data
93 |
94 | if is_drf_installed() and cls is not WSGIRequest and cls is not ASGIRequest:
95 | from rest_framework.parsers import FormParser
96 | from rest_framework.request import clone_request
97 |
98 | return clone_request(cls(req, parsers=[FormParser]), method)
99 | return req
100 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/defines.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 | from enum import Enum
3 | from typing import Any, List, Type, Union
4 |
5 | # TYPE
6 | NONE = 'none'
7 | INT = 'int'
8 | FLOAT = 'float'
9 | NUMBER = 'number'
10 | DIGIT_STR = 'digit_str' # URL params cannot distinguish from strings and numbers
11 | STR = 'str'
12 | BOOL = 'bool'
13 | JSON = 'json'
14 | JSON_BOOL = 'json_bool'
15 | LIST = 'list'
16 | DICT = 'dict'
17 | DATE_OBJECT = 'date_obj'
18 | DATETIME_OBJECT = 'datetime_obj'
19 | SELF = 'self'
20 |
21 | # VALUE
22 | DATE = 'date'
23 | DATE_RANGE = 'date_range'
24 | AMOUNT = 'amount'
25 | AMOUNT_RANGE = 'amount_range'
26 | LENGTH = 'length'
27 | DECIMAL_PLACE = 'decimal_place'
28 | SPEC = 'spec'
29 | LIST_OF = 'list_of'
30 | ONE_OF = 'one_of'
31 | FOREACH = 'foreach'
32 | DUMMY = 'dummy'
33 | EMAIL = 'email'
34 | UUID = 'uuid'
35 | REGEX = 'regex'
36 |
37 | COND_EXIST = 'cond_exist'
38 |
39 | _TYPE = '_type'
40 |
41 | RAW_CHECK_TYPE = Union[str, Type[Any]]
42 |
43 |
44 | class BaseValidator(metaclass=ABCMeta):
45 | @staticmethod
46 | @abstractmethod
47 | def validate(value, extra, data):
48 | raise NotImplementedError
49 |
50 |
51 | # Wrapper prefix
52 | _wrapper_splitter = '-'
53 | _not_prefix = 'not'
54 |
55 |
56 | class BaseWrapper:
57 | def __init__(self, wrapped_func):
58 | self.wrapped_func = wrapped_func
59 |
60 |
61 | def not_(check: str) -> str:
62 | return _not_prefix + _wrapper_splitter + check
63 |
64 |
65 | class ErrorMode(Enum):
66 | MSE = 'most_significant'
67 | ALL = 'all'
68 |
69 |
70 | class DSVError(Exception):
71 | def __init__(self, *errors: List[Type[Exception]]):
72 | self._errors = errors
73 |
74 | def __str__(self, *args, **kwargs):
75 | return repr(self._errors)
76 |
77 |
78 | class ValidateResult:
79 | def __init__(self, spec: Type = None, field: str = None, value: Any = None, check: str = None, error=None):
80 | from .features import get_spec_name # FIXME: refine the structure to avoid circular import
81 |
82 | # TODO: Output spec & check information when there's a debug message level for development.
83 | self.__spec = get_spec_name(spec) if spec else None
84 | self.__field = field
85 | self.__value = value
86 | self.__check = check
87 | self.__error = error
88 |
89 | @property
90 | def spec(self) -> str:
91 | return self.__spec
92 |
93 | @property
94 | def field(self) -> str:
95 | return self.__field
96 |
97 | @property
98 | def value(self):
99 | return self.__value
100 |
101 | @property
102 | def error(self) -> Exception:
103 | return self.__error
104 |
105 |
106 | class MsgLv(Enum):
107 | VAGUE = 'vague'
108 | DEFAULT = 'default'
109 |
110 |
111 | # TODO: Can make this a per-spec-check scope feature
112 | __message_level = MsgLv.DEFAULT
113 |
114 |
115 | def get_msg_level() -> MsgLv:
116 | return __message_level
117 |
118 |
119 | def reset_msg_level(vague: bool = False):
120 | """
121 | Setting vague=True, all error messages will be replaced to 'field: XXX not well-formatted'.
122 | Otherwise, the message is as usual showing the reason.
123 | """
124 | global __message_level
125 | if vague:
126 | __message_level = MsgLv.VAGUE
127 | else:
128 | __message_level = MsgLv.DEFAULT
129 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/actions.py:
--------------------------------------------------------------------------------
1 | from typing import List, Tuple
2 |
3 | from .defines import DSVError, ErrorMode, MsgLv, ValidateResult, get_msg_level
4 | from .features import get_err_mode, repack_multirow
5 | from .utils import raise_if
6 | from .validators import SpecValidator, UnknownFieldValue
7 |
8 |
9 | def _wrap_error_with_field_info(failure) -> Exception:
10 | if get_msg_level() == MsgLv.VAGUE:
11 | return RuntimeError(f'field: {failure.field} not well-formatted')
12 | if isinstance(failure.value, UnknownFieldValue):
13 | return LookupError(f'field: {failure.field} missing')
14 | msg = f'field: {failure.spec}.{failure.field}, reason: {failure.error}'
15 | return type(failure.error)(msg)
16 |
17 |
18 | def _flatten_results(failures, errors=None):
19 | raise_if(type(errors) != list, RuntimeError(f'{errors} not a list'))
20 |
21 | if type(failures) == tuple:
22 | _flatten_results(failures[1], errors)
23 | elif type(failures) == list:
24 | for item in failures:
25 | _flatten_results(item, errors)
26 | elif isinstance(failures, ValidateResult):
27 | if issubclass(type(failures.error), Exception):
28 | error = _wrap_error_with_field_info(failures)
29 | errors.append(error)
30 | return
31 | _flatten_results(failures.error, errors)
32 |
33 |
34 | def _find_most_significant_error(errors: List[Exception]) -> Exception:
35 | # Build error list by error types
36 | err_map = {}
37 | for err in errors:
38 | if isinstance(err, ValueError):
39 | err_key = 'ValueError'
40 | elif isinstance(err, PermissionError):
41 | err_key = 'PermissionError'
42 | elif isinstance(err, TypeError):
43 | err_key = 'TypeError'
44 | elif isinstance(err, LookupError):
45 | err_key = 'LookupError'
46 | elif isinstance(err, KeyError):
47 | err_key = 'KeyError'
48 | else:
49 | err_key = 'RuntimeError'
50 | err_map.setdefault(err_key, []).append(err)
51 |
52 | # Severity, PermissionError > LookupError > TypeError > ValueError > RuntimeError.
53 | errors = (
54 | err_map.get('PermissionError', [])
55 | or err_map.get('KeyError', [])
56 | or err_map.get('LookupError', [])
57 | or err_map.get('TypeError', [])
58 | or err_map.get('ValueError', [])
59 | or err_map.get('RuntimeError', [])
60 | )
61 | # TODO: For better information, we can raise an error with all error messages at one shot
62 | main_error = errors[0]
63 | return main_error
64 |
65 |
66 | def _is_incorrect_multirow_spec(errors: List[Exception]) -> bool:
67 | return any('_InternalMultiSpec' in str(e) for e in errors)
68 |
69 |
70 | def _extract_error(spec, failures: List[Tuple[bool, List[ValidateResult]]]) -> Exception:
71 | errors = []
72 | _flatten_results(failures, errors)
73 | err_mode = get_err_mode(spec)
74 |
75 | if _is_incorrect_multirow_spec(errors):
76 | msg = f'spec: {spec}, reason: incompatible data format for validation, an iterable object is needed'
77 | return ValueError(msg)
78 |
79 | if err_mode == ErrorMode.MSE:
80 | return _find_most_significant_error(errors)
81 | return DSVError(*errors)
82 |
83 |
84 | def validate_data_spec(data, spec, **kwargs) -> bool:
85 | # SPEC validator as the root validator
86 | (_data, _spec) = repack_multirow(data, spec) if kwargs.get('multirow', False) else (data, spec)
87 | ok, failures = SpecValidator.validate(_data, {SpecValidator.name: _spec}, None)
88 | nothrow = kwargs.get('nothrow', False)
89 |
90 | if not ok and not nothrow:
91 | error = _extract_error(spec, failures)
92 | raise error
93 | return ok
94 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | 3.5.1
5 | -----
6 | - Fix payload loading issue when content type header contains but not exactly "application/json"
7 |
8 |
9 | 3.5.0
10 | -----
11 | - [feature] add `NUMBER` that accepts float and integer
12 |
13 |
14 | 3.4.1
15 | -----
16 | - Fix `dsv` decorator to work with `DSVError`.
17 | - Fix dsv_feature `spec_name` should not show empty value.
18 |
19 |
20 | 3.4.0
21 | -----
22 |
23 | Changes:
24 |
25 | - Drop support for Python 3.6
26 | - [feature] `dsv` decorator now have identical behavior for Django and DRF.
27 | - [feature] new dsv_feature `spec_name` to customize the spec name in error messages.
28 |
29 | 3.3.0
30 | -----
31 |
32 | Changes:
33 |
34 | - DEPRECATIONS: The decorators support to Django version 1.x and 2.x is dropped. The minimum supported version is Django 3.0.
35 | - [feature] `dsv` decorator support loading payload from Django WSGIRequest for Content-Type: application/json
36 |
37 | 3.2.0
38 | -----
39 |
40 | Changes:
41 |
42 | - [feature] `dsv` decorator support Django ASGIRequest class
43 | - [feature] Support validating request data with non-python variable naming convention keys by the `alias' keyword in `Checker`
44 |
45 | i.e. `/some/endpoint?a.b=3&c.d=ok&array[]=1&array[]=2&array[]=3`
46 |
47 | 3.1.0
48 | -----
49 |
50 | Changes:
51 |
52 | - [feature] `dsv` decorator can support only Django Environment
53 |
54 |
55 | 3.0.1
56 | -----
57 |
58 | Changes:
59 |
60 | - [fix] Cherry-pick `eac8af90e0313fd312edba1e7f18b3f537bc80ce` for features (combining query_params in POST/PUT/DELETE request)
61 |
62 |
63 | 3.0.0
64 | -----
65 |
66 | Changes:
67 |
68 | - [feature] Support class-based check
69 | - [feature] Support nested spec class
70 | - [feature] Support combining request query parameters with payload for POST/PUT/DELETE Request
71 | - Checker keyword "extra" is removed.
72 |
73 |
74 | 2.1.1
75 | -----
76 |
77 | Changes:
78 |
79 | - [fix] Export `FLOAT`, `DATE_OBJECT`, `DATETIME_OBJECT` check str
80 |
81 |
82 | 2.1.0
83 | -----
84 |
85 | Changes:
86 |
87 | - [feature] add `FLOAT`, `DATE_OBJECT`, `DATETIME_OBJECT` check to validate python built-in type. Derived instances are
88 | not considered valid and you can use custom spec in these cases.
89 | - Update README
90 |
91 |
92 | 2.0.0
93 | -----
94 |
95 | Changes:
96 |
97 | - [feature] add `multirow` option in `@dsv` and `def validate_data_spec` to validate list of SPEC naturally.
98 | - Update README
99 |
100 |
101 | 1.9.0
102 | -----
103 |
104 | Changes:
105 |
106 | - [feature] new dsv_feature `err_mode` to collect all validation errors into exception arguments.
107 | - [improvement] Spec name is now added before the field name in error message.
108 | - Check `KEY_COEXISTS`, `ANY_KEY_EXISTS` are deprecated.
109 | - Update README
110 |
111 |
112 | 1.8.0
113 | -----
114 |
115 | Changes:
116 |
117 | - [fix] COND_EXIST now works with other checks correctly.
118 | - [feature] Add new check `FOREACH`, used for any iterable.
119 | - [behavior-change] `LIST_OF` enforce LIST type validation as well
120 | - [behavior-change] Use `warning.warn` instead of `print`
121 | - [internal] More type hint
122 | - Postpone {KEY_COEXISTS, ANY_KEY_EXISTS} deprecation, will remove them in 1.9.0
123 | - Update README
124 |
125 |
126 | 1.7.0
127 | -----
128 |
129 | Changes:
130 |
131 | - Add new check COND_EXIST to support conditional key existence. More sample usage in test.
132 | - Add deprecating messages for Check: KEY_COEXISTS, ANY_KEY_EXISTS
133 | - Add test cases.
134 | - DEPRECATIONS: KEY_COEXISTS, ANY_KEY_EXISTS will be removed in 1.8.0
135 |
136 |
137 | 1.6.0
138 | -----
139 |
140 | Changes:
141 |
142 | - Support checks as keyword argument when building checker.
143 | - Add test cases.
144 |
145 |
146 | 1.5.0
147 | -----
148 |
149 | Changes:
150 |
151 | - Add type hints
152 | - Support strict mode to detect unexpected key/value parameters
153 |
154 |
155 | 1.4.0
156 | -----
157 |
158 | Changes:
159 |
160 | - Improve error message readability
161 | - Provide `reset_msg_level` function to change the displayed messages
162 | - Add `allow_none` option for `Checker` to support `None` value
163 |
164 | 1.3.1
165 | -----
166 |
167 | Changes:
168 |
169 | - Change email validator regular expression (https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address)
170 |
171 | 1.3.0
172 | -----
173 |
174 | Changes:
175 |
176 | - Fix package version
177 | - String type UUID now can pass UUIDValidator
178 |
179 | ...
180 |
--------------------------------------------------------------------------------
/test/test_class_type_spec.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 | import uuid
4 |
5 | from data_spec_validator.spec import FOREACH, LIST_OF, Checker, validate_data_spec
6 |
7 | from .utils import is_something_error
8 |
9 |
10 | class TestBuiltinTypeSpec(unittest.TestCase):
11 | def test_int(self):
12 | class IntSpec:
13 | int_field = Checker([int])
14 |
15 | ok_data = dict(int_field=3)
16 | assert validate_data_spec(ok_data, IntSpec)
17 |
18 | nok_data = dict(int_field='3')
19 | assert is_something_error(TypeError, validate_data_spec, nok_data, IntSpec)
20 |
21 | def test_float(self):
22 | class FloatSpec:
23 | float_field = Checker([float])
24 |
25 | ok_data = dict(float_field=3.0)
26 | assert validate_data_spec(ok_data, FloatSpec)
27 |
28 | nok_data = dict(float_field=3)
29 | assert is_something_error(TypeError, validate_data_spec, nok_data, FloatSpec)
30 |
31 | def test_str(self):
32 | class StrSpec:
33 | str_field = Checker([str])
34 |
35 | ok_data = dict(str_field='3')
36 | assert validate_data_spec(ok_data, StrSpec)
37 |
38 | nok_data = dict(str_field=3)
39 | assert is_something_error(TypeError, validate_data_spec, nok_data, StrSpec)
40 |
41 | def test_none(self):
42 | class NoneSpec:
43 | none_field = Checker([type(None)])
44 |
45 | ok_data = dict(none_field=None)
46 | assert validate_data_spec(ok_data, NoneSpec)
47 |
48 | nok_data = dict(none_field=3)
49 | assert is_something_error(TypeError, validate_data_spec, nok_data, NoneSpec)
50 |
51 | def test_bool(self):
52 | class BoolSpec:
53 | bool_field = Checker([bool])
54 |
55 | ok_data = dict(bool_field=False)
56 | assert validate_data_spec(ok_data, BoolSpec)
57 |
58 | nok_data = dict(bool_field='True')
59 | assert is_something_error(TypeError, validate_data_spec, nok_data, BoolSpec)
60 |
61 | def test_list(self):
62 | class ListSpec:
63 | list_field = Checker([list])
64 |
65 | ok_data = dict(list_field=[1, 2, 3])
66 | assert validate_data_spec(ok_data, ListSpec)
67 |
68 | nok_data = dict(list_field=dict(a=2, b=4))
69 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListSpec)
70 |
71 | def test_dict(self):
72 | class DictSpec:
73 | dict_field = Checker([dict])
74 |
75 | ok_data = dict(dict_field=dict(a=2, b=4))
76 | assert validate_data_spec(ok_data, DictSpec)
77 |
78 | nok_data = dict(dict_field=[1, 2, 3])
79 | assert is_something_error(TypeError, validate_data_spec, nok_data, DictSpec)
80 |
81 | def test_date_object(self):
82 | class DateObjSpec:
83 | date_object_field = Checker([datetime.date])
84 |
85 | ok_data = dict(date_object_field=datetime.date(2023, 2, 9))
86 | assert validate_data_spec(ok_data, DateObjSpec)
87 |
88 | nok_data = dict(date_object_field=datetime.datetime(2023, 2, 9, 12, 34))
89 | assert is_something_error(TypeError, validate_data_spec, nok_data, DateObjSpec)
90 |
91 | def test_datetime_object(self):
92 | class DatetimeObjSpec:
93 | datetime_object_field = Checker([datetime.datetime])
94 |
95 | ok_data = dict(datetime_object_field=datetime.datetime(2023, 2, 9, 12, 34))
96 | assert validate_data_spec(ok_data, DatetimeObjSpec)
97 |
98 | nok_data = dict(datetime_object_field=datetime.date(2023, 2, 9))
99 | assert is_something_error(TypeError, validate_data_spec, nok_data, DatetimeObjSpec)
100 |
101 | def test_uuid(self):
102 | class UuidSpec:
103 | uuid_field = Checker([uuid.UUID])
104 |
105 | uuid_inst = uuid.UUID('00000000-0000-0000-0000-000000000000')
106 | ok_data = dict(uuid_field=uuid_inst)
107 | assert validate_data_spec(ok_data, UuidSpec)
108 |
109 | nok_data = dict(uuid_field='z78ff51b-a354-4819-b2dd-bfaede3a8be5')
110 | assert is_something_error(TypeError, validate_data_spec, nok_data, UuidSpec)
111 |
112 | def test_iteration_of_types(self):
113 | class ListOfIntSpec:
114 | list_of_int_field = Checker([LIST_OF], LIST_OF=int)
115 |
116 | class ListOfStrSpec:
117 | list_of_str_field = Checker([LIST_OF], LIST_OF=str)
118 |
119 | class ForeachIntSpec:
120 | foreach_bool_field = Checker([FOREACH], FOREACH=bool)
121 |
122 | ok_data = dict(list_of_int_field=[3])
123 | assert validate_data_spec(ok_data, ListOfIntSpec)
124 |
125 | nok_data = dict(list_of_int_field=['3'])
126 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListOfIntSpec)
127 |
128 | ok_data = dict(list_of_str_field=['3'])
129 | assert validate_data_spec(ok_data, ListOfStrSpec)
130 |
131 | nok_data = dict(list_of_str_field=[True])
132 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListOfStrSpec)
133 |
134 | ok_data = dict(foreach_bool_field=[False])
135 | assert validate_data_spec(ok_data, ForeachIntSpec)
136 |
137 | nok_data = dict(foreach_bool_field=[3])
138 | assert is_something_error(TypeError, validate_data_spec, nok_data, ForeachIntSpec)
139 |
--------------------------------------------------------------------------------
/test/test_decorator_drf.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import unittest
3 | from unittest.mock import patch
4 |
5 | from parameterized import parameterized
6 |
7 | from data_spec_validator.decorator import dsv, dsv_request_meta
8 | from data_spec_validator.spec import DIGIT_STR, LIST_OF, ONE_OF, STR, Checker, ErrorMode, dsv_feature
9 |
10 | from .utils import is_drf_installed, make_request
11 |
12 | try:
13 | from django.conf import settings
14 | from django.test import RequestFactory
15 | from django.views import View
16 | from rest_framework.exceptions import ParseError
17 | from rest_framework.request import Request, override_method
18 |
19 | settings.configure()
20 | except Exception:
21 | # To skip E402 module level import not at top of file
22 | pass
23 |
24 |
25 | @unittest.skipUnless(is_drf_installed(), 'DRF is not installed')
26 | class TestDSVDRF(unittest.TestCase):
27 | def test_should_check_name_url_params(self):
28 | # arrange
29 | class _ViewSpec:
30 | named_arg = Checker([DIGIT_STR])
31 |
32 | class _View(View):
33 | @dsv(_ViewSpec)
34 | def decorated_func(self, request, named_arg):
35 | pass
36 |
37 | factory = RequestFactory()
38 | req = Request(factory.request())
39 | view = _View()
40 |
41 | # action & assert
42 | view.decorated_func(req, named_arg='1') # should pass validation
43 |
44 | with self.assertRaises(ParseError):
45 | view.decorated_func(req, named_arg='')
46 |
47 | def test_data_and_url_params_should_not_have_intersection(self):
48 | # arrange
49 | class _ViewSpec:
50 | pass
51 |
52 | class _View(View):
53 | @dsv(_ViewSpec)
54 | def decorated_func(self, request, named_arg):
55 | pass
56 |
57 | factory = RequestFactory()
58 | wsgi_req = factory.request()
59 | wsgi_req.GET = {'named_arg': ''}
60 | req = Request(wsgi_req)
61 | view = _View()
62 |
63 | # action & assert
64 | with self.assertRaises(RuntimeError):
65 | view.decorated_func(req, named_arg='')
66 |
67 | @parameterized.expand(itertools.product([dsv, dsv_request_meta], ['GET', 'POST']))
68 | def test_data_and_path_named_param_should_combine_together(self, dsv_deco, method):
69 | # arrange
70 | payload = {'test_a': 'TEST A'}
71 | if dsv_deco == dsv:
72 | fake_request = make_request(Request, method=method, data=payload)
73 | elif dsv_deco == dsv_request_meta:
74 | fake_request = make_request(Request, method=method, headers=payload)
75 | else:
76 | assert False
77 |
78 | kwargs = {'test_b': 'TEST_B'}
79 |
80 | class _ViewSpec:
81 | test_a = Checker([ONE_OF], ONE_OF='TEST A')
82 | test_b = Checker([ONE_OF], ONE_OF='TEST_B')
83 |
84 | class _View(View):
85 | @dsv_deco(_ViewSpec)
86 | def decorated_func(self, req, *_args, **_kwargs):
87 | pass
88 |
89 | view = _View(request=fake_request)
90 | view.decorated_func(fake_request, **kwargs)
91 |
92 | @parameterized.expand(['PUT', 'PATCH', 'DELETE'])
93 | def test_query_params_with_data(self, method):
94 | # arrange
95 | qs = 'q_a=3&q_b=true&d.o.t=dot&array[]=a1&array[]=a2&array[]=a3'
96 | payload = {'test_a': 'TEST A', 'test_f[]': [1, 2, 3]}
97 |
98 | fake_request = make_request(Request, method='POST', data=payload, qs=qs)
99 |
100 | kwargs = {'test_b': 'TEST_B', 'test_c.d.e': 'TEST C.D.E'}
101 |
102 | @dsv_feature(strict=True)
103 | class _ViewSpec:
104 | q_a = Checker([LIST_OF], LIST_OF=STR)
105 | q_b = Checker([LIST_OF], LIST_OF=STR)
106 | test_a = Checker([ONE_OF], ONE_OF='TEST A')
107 | test_b = Checker([ONE_OF], ONE_OF='TEST_B')
108 | test_c_d_e = Checker([ONE_OF], ONE_OF='TEST C.D.E', alias='test_c.d.e')
109 | test_f_array = Checker([LIST_OF], LIST_OF=int, alias='test_f[]')
110 | d_o_t = Checker([LIST_OF], LIST_OF=str, alias='d.o.t')
111 | array = Checker([LIST_OF], LIST_OF=str, alias='array[]')
112 |
113 | class _View(View):
114 | @dsv(_ViewSpec)
115 | def decorated_func(self, req, *_args, **_kwargs):
116 | return True
117 |
118 | view = _View(request=fake_request)
119 | with override_method(view, fake_request, method) as request:
120 | assert view.decorated_func(request, **kwargs)
121 |
122 | def test_req_list_data_with_no_multirow_set(self):
123 | # arrange
124 | payload = [{'test_a': 'TEST A1'}, {'test_a': 'TEST A2'}, {'test_a': 'TEST A3'}]
125 | fake_request = make_request(Request, method='POST', data=payload)
126 | kwargs = {'test_b': 'TEST_B'}
127 |
128 | class _ViewSingleRowSpec:
129 | test_a = Checker([STR])
130 |
131 | class _View(View):
132 | @dsv(_ViewSingleRowSpec)
133 | def decorated_func(self, request, *_args, **_kwargs):
134 | pass
135 |
136 | view = _View(request=fake_request)
137 | view.decorated_func(fake_request, **kwargs)
138 |
139 | def test_req_list_data_with_multirow_true(self):
140 | # arrange
141 | payload = [{'test_a': 'TEST A1'}, {'test_a': 'TEST A2'}, {'test_a': 'TEST A3'}]
142 | fake_request = make_request(Request, method='POST', data=payload)
143 | kwargs = {'test_b': 'TEST_B'}
144 |
145 | class _ViewSingleRowSpec:
146 | test_a = Checker([STR])
147 |
148 | class _View(View):
149 | @dsv(_ViewSingleRowSpec, multirow=True)
150 | def decorated_func(self, request, *_args, **_kwargs):
151 | pass
152 |
153 | view = _View(request=fake_request)
154 |
155 | with patch('data_spec_validator.decorator.decorators._is_data_type_list', return_value=False):
156 | view.decorated_func(fake_request, **kwargs)
157 |
158 | def test_non_view_request(self):
159 | # arrange
160 | class _NonViewSpec:
161 | field_a = Checker([DIGIT_STR])
162 |
163 | class _NonView:
164 | @dsv(_NonViewSpec)
165 | def decorated_func(self, request, field_a):
166 | pass
167 |
168 | factory = RequestFactory()
169 | req = Request(factory.request())
170 | non_view = _NonView()
171 |
172 | # action & assert
173 | non_view.decorated_func(req, field_a='1') # should pass validation
174 |
175 | fake_args = ['1', '2', 3]
176 | with self.assertRaises(Exception):
177 | non_view.decorated_func(fake_args, field_a='1')
178 |
179 | def test_json_response_content(self):
180 | # arrange
181 | @dsv_feature(err_mode=ErrorMode.ALL)
182 | class _ViewSpec:
183 | field_a = Checker([DIGIT_STR])
184 |
185 | class _View(View):
186 | @dsv(_ViewSpec)
187 | def decorated_func(self, request, field_a):
188 | pass
189 |
190 | factory = RequestFactory()
191 | wsgi_req = factory.request()
192 | req = Request(wsgi_req)
193 | view = _View()
194 |
195 | # action & assert
196 | with self.assertRaises(Exception) as exc_info:
197 | view.decorated_func(req, field_a='hi')
198 |
199 | self.assertEqual(
200 | exc_info.exception.detail, {'messages': ["field: _ViewSpec.field_a, reason: 'hi' is not a digit str"]}
201 | )
202 |
203 |
204 | if __name__ == '__main__':
205 | unittest.main()
206 |
--------------------------------------------------------------------------------
/data_spec_validator/decorator/decorators.py:
--------------------------------------------------------------------------------
1 | import json
2 | from functools import wraps
3 | from typing import Dict, List, Union
4 |
5 | from data_spec_validator.spec import DSVError, raise_if, validate_data_spec
6 |
7 | try:
8 | from django.core.handlers.asgi import ASGIRequest
9 | from django.core.handlers.wsgi import WSGIRequest
10 | from django.http import HttpResponseBadRequest, HttpResponseForbidden, JsonResponse, QueryDict
11 | from django.views.generic.base import View
12 | except ModuleNotFoundError as e:
13 | print(f'[DSV][WARNING] decorator: "dsv" cannot be used, {e}')
14 |
15 | _enabled_drf = False
16 | try:
17 | import rest_framework.exceptions as drf_exceptions
18 | from rest_framework.request import Request
19 |
20 | _enabled_drf = True
21 | except ModuleNotFoundError:
22 | print('[DSV][INFO] decorator: using "dsv" without djangorestframework')
23 |
24 |
25 | class ValidationError(Exception):
26 | def __init__(self, message):
27 | self.message = message
28 |
29 |
30 | class PermissionDenied(Exception):
31 | def __init__(self, message):
32 | self.message = message
33 |
34 |
35 | class ParseError(Exception):
36 | def __init__(self, message):
37 | self.message = message
38 |
39 |
40 | def _is_wsgi_request(obj):
41 | return isinstance(obj, WSGIRequest)
42 |
43 |
44 | def _is_asgi_request(obj):
45 | return isinstance(obj, ASGIRequest)
46 |
47 |
48 | def _is_drf_request(obj):
49 | return _enabled_drf and isinstance(obj, Request)
50 |
51 |
52 | def _is_request(obj):
53 | return _is_wsgi_request(obj) or _is_asgi_request(obj) or _is_drf_request(obj)
54 |
55 |
56 | def _is_view(obj):
57 | return issubclass(type(obj), View)
58 |
59 |
60 | def _combine_named_params(data, **kwargs):
61 | def combine_params(_data, params):
62 | raise_if(bool(set(_data.keys()) & set(params.keys())), RuntimeError('Data and URL named param have conflict'))
63 |
64 | if isinstance(_data, QueryDict):
65 | qd = QueryDict(mutable=True)
66 | qd.update(_data)
67 | qd.update(params)
68 | return qd
69 |
70 | return {**_data, **params}
71 |
72 | # Named URL parameters should consider as params of the data spec.
73 | if type(data) == list:
74 | data = [combine_params(datum, kwargs) for datum in data]
75 | else:
76 | data = combine_params(data, kwargs)
77 | return data
78 |
79 |
80 | def _extract_request_meta(req, **kwargs):
81 | raise_if(
82 | not _is_wsgi_request(req) and not _is_asgi_request(req) and not _is_drf_request(req),
83 | RuntimeError(f'Unsupported req type, {type(req)}'),
84 | )
85 | return _combine_named_params(req.META, **kwargs)
86 |
87 |
88 | def _extract_request_param_data(req, **kwargs):
89 | is_wsgi_request = _is_wsgi_request(req)
90 | is_asgi_request = _is_asgi_request(req)
91 | is_request = _is_drf_request(req)
92 | raise_if(
93 | not is_wsgi_request and not is_asgi_request and not is_request,
94 | RuntimeError(f'Unsupported req type, {type(req)}'),
95 | )
96 |
97 | def _collect_data(method, req_qp, req_data) -> Dict:
98 | if method == 'GET':
99 | return req_qp
100 | else:
101 | if req_qp and issubclass(type(req_qp), dict) and type(req_data) is not list:
102 | return {**req_qp, **req_data}
103 | # TODO: Don't care about the query_params if it's not a dict or the payload is in list.
104 | return req_data
105 |
106 | def _get_dj_payload(request):
107 | if request.content_type and 'application/json' in request.content_type:
108 | try:
109 | return request.body and json.loads(request.body) or {}
110 | except Exception:
111 | raise ParseError('Unable to parse request body as JSON')
112 | return request.POST
113 |
114 | if is_wsgi_request or is_asgi_request:
115 | data = _collect_data(req.method, req.GET, _get_dj_payload(req))
116 | else:
117 | data = _collect_data(req.method, req.query_params, req.data)
118 |
119 | return _combine_named_params(data, **kwargs)
120 |
121 |
122 | def _extract_request(*args):
123 | obj = args[0]
124 | if _is_request(obj):
125 | return obj
126 | elif hasattr(obj, 'request') and _is_request(obj.request):
127 | return obj.request
128 | else:
129 | # Fallback to find the first request object
130 | req = next(filter(lambda o: _is_request(o), args), None)
131 | raise_if(not req, RuntimeError('Unexpected usage'))
132 | return req
133 |
134 |
135 | def _is_data_type_list(data: Union[Dict, List]) -> bool:
136 | return type(data) == list
137 |
138 |
139 | def _eval_is_multirow(multirow: bool, data: Union[Dict, List]) -> bool:
140 | # NOTE: is_multirow by evaluating list type will be deprecated, so the client must specify multirow=True
141 | # explicitly in the future.
142 | return multirow or _is_data_type_list(data)
143 |
144 |
145 | def _do_validate(data, spec, multirow):
146 | # Raise exceptions with message if validation failed.
147 | error = None
148 | try:
149 | is_multirow = _eval_is_multirow(multirow, data)
150 | validate_data_spec(data, spec, multirow=is_multirow)
151 | except ValueError as value_err:
152 | error = ValidationError(value_err.args)
153 | except PermissionError as perm_err:
154 | error = PermissionDenied(perm_err.args)
155 | except (LookupError, TypeError, RuntimeError, DSVError) as parse_err:
156 | error = ParseError(parse_err.args)
157 |
158 | if error:
159 | raise error
160 |
161 |
162 | def _get_error_response(error, use_drf):
163 | """
164 | Return the error response based on the error type.
165 | If the attribute use_drf is True, Raise DRF's exception to let DRF's exception handler do something about it.
166 | """
167 | error_msg = {'messages': [str(err) for err in error.message]}
168 |
169 | if use_drf:
170 | err_map = {
171 | ValidationError: drf_exceptions.ValidationError,
172 | PermissionDenied: drf_exceptions.PermissionDenied,
173 | ParseError: drf_exceptions.ParseError,
174 | }
175 | raise err_map[error.__class__](error_msg)
176 |
177 | resp_map = {
178 | ValidationError: HttpResponseBadRequest,
179 | PermissionDenied: HttpResponseForbidden,
180 | ParseError: HttpResponseBadRequest,
181 | }
182 |
183 | status_code = resp_map[error.__class__].status_code
184 | return JsonResponse(error_msg, status=status_code)
185 |
186 |
187 | def dsv(spec, multirow=False):
188 | """
189 | Used at any function where view instance or request is the first argument.
190 | e.g. 1) APIView request method (get/post/put/patch/delete)
191 | 2) |ListModelMixin.has_list_permission| & |RetrieveModelMixin.has_retrieve_permission| &
192 | |DestroyModelMixin.has_destroy_permission| & |UpdateModelMixin.has_update_permission| &
193 | & |CreateModelMixin.has_create_permission (NOTE: bulk_create must be False)|
194 | """
195 |
196 | def wrapper(func):
197 | @wraps(func)
198 | def wrapped(*args, **kwargs):
199 | req = _extract_request(*args)
200 | data = _extract_request_param_data(req, **kwargs)
201 |
202 | try:
203 | _do_validate(data, spec, multirow)
204 | except (ValidationError, PermissionDenied, ParseError) as err:
205 | return _get_error_response(err, use_drf=_is_drf_request(req))
206 |
207 | return func(*args, **kwargs)
208 |
209 | return wrapped
210 |
211 | return wrapper
212 |
213 |
214 | def dsv_request_meta(spec):
215 | def wrapper(func):
216 | @wraps(func)
217 | def wrapped(*args, **kwargs):
218 | req = _extract_request(*args)
219 | meta = _extract_request_meta(req, **kwargs)
220 |
221 | try:
222 | _do_validate(meta, spec, multirow=False)
223 | except (ValidationError, PermissionDenied, ParseError) as err:
224 | return _get_error_response(err, use_drf=_is_drf_request(req))
225 |
226 | return func(*args, **kwargs)
227 |
228 | return wrapped
229 |
230 | return wrapper
231 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/checks.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import functools
3 | from enum import Enum
4 | from functools import lru_cache, reduce
5 | from inspect import isclass
6 | from typing import Any, Dict, List, Optional, Tuple, Type, Union
7 |
8 | from .defines import (
9 | AMOUNT,
10 | AMOUNT_RANGE,
11 | BOOL,
12 | COND_EXIST,
13 | DATE,
14 | DATE_OBJECT,
15 | DATE_RANGE,
16 | DATETIME_OBJECT,
17 | DECIMAL_PLACE,
18 | DICT,
19 | DIGIT_STR,
20 | DUMMY,
21 | EMAIL,
22 | FLOAT,
23 | FOREACH,
24 | INT,
25 | JSON,
26 | JSON_BOOL,
27 | LENGTH,
28 | LIST,
29 | LIST_OF,
30 | NONE,
31 | NUMBER,
32 | ONE_OF,
33 | RAW_CHECK_TYPE,
34 | REGEX,
35 | SPEC,
36 | STR,
37 | UUID,
38 | BaseValidator,
39 | BaseWrapper,
40 | _wrapper_splitter,
41 | )
42 | from .utils import raise_if
43 |
44 | _TYPE = '_type_'
45 |
46 |
47 | @lru_cache(1)
48 | def _get_default_check_2_validator_map() -> Dict[str, BaseValidator]:
49 | from data_spec_validator.spec.validators import (
50 | AmountRangeValidator,
51 | AmountValidator,
52 | BoolValidator,
53 | CondExistValidator,
54 | DateObjectValidator,
55 | DateRangeValidator,
56 | DatetimeObjectValidator,
57 | DateValidator,
58 | DecimalPlaceValidator,
59 | DictValidator,
60 | DigitStrValidator,
61 | DummyValidator,
62 | EmailValidator,
63 | FloatValidator,
64 | ForeachValidator,
65 | IntValidator,
66 | JSONBoolValidator,
67 | JSONValidator,
68 | LengthValidator,
69 | ListOfValidator,
70 | ListValidator,
71 | NoneValidator,
72 | NumberValidator,
73 | OneOfValidator,
74 | RegexValidator,
75 | SpecValidator,
76 | StrValidator,
77 | TypeValidator,
78 | UUIDValidator,
79 | )
80 |
81 | return {
82 | INT: IntValidator(),
83 | FLOAT: FloatValidator(),
84 | NUMBER: NumberValidator(),
85 | STR: StrValidator(),
86 | DIGIT_STR: DigitStrValidator(),
87 | BOOL: BoolValidator(),
88 | DICT: DictValidator(),
89 | LIST: ListValidator(),
90 | NONE: NoneValidator(),
91 | JSON: JSONValidator(),
92 | JSON_BOOL: JSONBoolValidator(),
93 | DATE_OBJECT: DateObjectValidator(),
94 | DATETIME_OBJECT: DatetimeObjectValidator(),
95 | ONE_OF: OneOfValidator(),
96 | SPEC: SpecValidator(),
97 | LIST_OF: ListOfValidator(),
98 | LENGTH: LengthValidator(),
99 | AMOUNT: AmountValidator(),
100 | AMOUNT_RANGE: AmountRangeValidator(),
101 | DECIMAL_PLACE: DecimalPlaceValidator(),
102 | DATE: DateValidator(),
103 | DATE_RANGE: DateRangeValidator(),
104 | DUMMY: DummyValidator(),
105 | EMAIL: EmailValidator(),
106 | UUID: UUIDValidator(),
107 | REGEX: RegexValidator(),
108 | COND_EXIST: CondExistValidator(),
109 | FOREACH: ForeachValidator(),
110 | _TYPE: TypeValidator(),
111 | }
112 |
113 |
114 | def _get_wrapper_cls_map() -> Dict[str, Type[BaseWrapper]]:
115 | from .wrappers import NotWrapper
116 |
117 | return {
118 | NotWrapper.name: NotWrapper,
119 | }
120 |
121 |
122 | def _get_check_2_validator_map() -> Dict[str, BaseValidator]:
123 | from .custom_spec.defines import get_custom_check_2_validator_map
124 |
125 | default_map = _get_default_check_2_validator_map()
126 | custom_map = get_custom_check_2_validator_map()
127 | validator_map = {**default_map, **custom_map}
128 | return validator_map
129 |
130 |
131 | def get_validator(check: str) -> Union[BaseValidator, BaseWrapper]:
132 | validator_map = _get_check_2_validator_map()
133 |
134 | found_idx = check.find(_wrapper_splitter)
135 | ori_validator = validator_map.get(check[found_idx + 1 :], validator_map[DUMMY])
136 | if found_idx > 0:
137 | wrapper_cls_map = _get_wrapper_cls_map()
138 | wrapper_cls = wrapper_cls_map.get(check[:found_idx])
139 | wrapper = wrapper_cls(ori_validator.validate)
140 | return wrapper
141 | else:
142 | return ori_validator
143 |
144 |
145 | class CheckerOP(Enum):
146 | ALL = 'all'
147 | # All checks will be validated even when the OP is 'any'
148 | ANY = 'any'
149 |
150 |
151 | class Checker:
152 | def __init__(
153 | self,
154 | raw_checks: List[RAW_CHECK_TYPE],
155 | optional: bool = False,
156 | allow_none: bool = False,
157 | op: CheckerOP = CheckerOP.ALL,
158 | alias: Optional[str] = None,
159 | **kwargs,
160 | ):
161 | """
162 | checks: list of str/class (Check)
163 | optional: boolean
164 | Set optional to True, the validation process will be passed if the field is absent
165 | allow_none: boolean
166 | Set allow_none to True, the field value can be None
167 | op: CheckerOP
168 | alias: str or None
169 | A string that represents the field name which will be used when extracting values from data payload
170 | kwargs: dict
171 | """
172 | self.checks, class_check_type = self._sanitize_checks(raw_checks)
173 |
174 | self._op = op
175 | self._optional = optional
176 | self._allow_none = allow_none
177 | self._alias = alias
178 |
179 | self._ensure(kwargs)
180 | self.extra = self._build_extra(class_check_type, kwargs)
181 |
182 | @staticmethod
183 | def _sanitize_checks(raw_checks: List[RAW_CHECK_TYPE]) -> Tuple[List[str], Optional[Type[Any]]]:
184 | class_type_check = None
185 |
186 | def _is_checkable(elem: Any) -> bool:
187 | return isclass(elem) or type(elem) is str
188 |
189 | def _purify_check(rc: RAW_CHECK_TYPE) -> Union[str, Type[Any]]:
190 | nonlocal class_type_check
191 | if isclass(rc):
192 | raise_if(not _is_checkable(rc), TypeError(f'A qualified CHECK is required, but got {rc}'))
193 | class_type_check = rc
194 | return _TYPE
195 | return rc
196 |
197 | def _convert_class_check(acc: List, raw_check: RAW_CHECK_TYPE) -> List[str]:
198 | raise_if(not _is_checkable(raw_check), TypeError(f'A qualified CHECK is required, but got {raw_check}'))
199 | acc.append(_purify_check(raw_check))
200 | return acc
201 |
202 | ensured = functools.reduce(_convert_class_check, raw_checks, [])
203 | return ensured, class_type_check
204 |
205 | @staticmethod
206 | def _build_extra(class_type_check: Optional[Type[Any]], check_kwargs: Dict[str, Any]) -> Dict[str, Any]:
207 | temp = {_TYPE: class_type_check} if class_type_check else {}
208 | all_keys = set(_get_check_2_validator_map().keys())
209 |
210 | for arg_k, arg_v in check_kwargs.items():
211 | lower_arg_k = arg_k.lower()
212 | if lower_arg_k in all_keys:
213 | temp.update({lower_arg_k: arg_v})
214 |
215 | extra = copy.deepcopy(temp)
216 | for arg_k, arg_v in temp.items():
217 | if arg_k in {LIST_OF, FOREACH}:
218 | if isclass(arg_v) and _TYPE not in extra:
219 | extra.update({arg_k: _TYPE})
220 | extra.update({_TYPE: arg_v})
221 | return extra
222 |
223 | def _ensure(self, check_kwargs: Dict):
224 | def __ensure_upper_case(_kwargs):
225 | non_upper = list(filter(lambda k: not k.isupper(), _kwargs.keys()))
226 | raise_if(bool(non_upper), TypeError(f'Keyword must be upper-cased: {non_upper}'))
227 |
228 | def __ensure_no_repeated_forbidden(_kwargs: Dict):
229 | blacklist = {'optional', 'allow_none', 'op'}
230 |
231 | def _check_in_blacklist(acc: set, key: str):
232 | if key.lower() in blacklist:
233 | acc.add(key)
234 | return acc
235 |
236 | forbidden = list(reduce(_check_in_blacklist, _kwargs.keys(), set()))
237 | forbidden.sort()
238 | raise_if(bool(forbidden), TypeError(f'Forbidden keyword arguments: {", ".join(forbidden)}'))
239 |
240 | raise_if(
241 | self._optional and len(self.checks) == 0, ValueError('Require at least 1 check when set optional=True')
242 | )
243 |
244 | __ensure_upper_case(check_kwargs)
245 | __ensure_no_repeated_forbidden(check_kwargs)
246 |
247 | @property
248 | def allow_none(self) -> bool:
249 | return self._allow_none
250 |
251 | @property
252 | def allow_optional(self) -> bool:
253 | return self._optional
254 |
255 | @property
256 | def is_op_any(self) -> bool:
257 | return self._op == CheckerOP.ANY
258 |
259 | @property
260 | def is_op_all(self) -> bool:
261 | return self._op == CheckerOP.ALL
262 |
263 | @property
264 | def alias(self) -> str:
265 | return self._alias
266 |
--------------------------------------------------------------------------------
/test/test_decorator_dj.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import json
3 | import unittest
4 | from unittest.mock import patch
5 |
6 | from parameterized import parameterized, parameterized_class
7 |
8 | from data_spec_validator.decorator import dsv, dsv_request_meta
9 | from data_spec_validator.decorator.decorators import ParseError
10 | from data_spec_validator.spec import DIGIT_STR, LIST_OF, ONE_OF, STR, Checker, ErrorMode, dsv_feature
11 |
12 | from .utils import is_django_installed, make_request
13 |
14 | try:
15 | from django.conf import settings
16 | from django.core.handlers.asgi import ASGIRequest
17 | from django.core.handlers.wsgi import WSGIRequest
18 | from django.http import HttpResponse, JsonResponse
19 | from django.test import RequestFactory
20 | from django.views import View
21 |
22 | settings.configure()
23 | except Exception:
24 | # To skip E402 module level import not at top of file
25 | pass
26 |
27 |
28 | @parameterized_class(('request_class'), [(ASGIRequest,), (WSGIRequest,)])
29 | @unittest.skipUnless(is_django_installed(), 'Django is not installed')
30 | class TestDSVDJ(unittest.TestCase):
31 | def test_decorated_func_returns_error_response(self):
32 | # arrange
33 | class _ViewSpec:
34 | named_arg = Checker([DIGIT_STR])
35 |
36 | class _View(View):
37 | @dsv(_ViewSpec)
38 | def decorated_func(self, request, named_arg):
39 | return HttpResponse(status=200)
40 |
41 | factory = RequestFactory()
42 | req = factory.request()
43 | view = _View()
44 |
45 | # action
46 | resp_valid = view.decorated_func(req, named_arg='1')
47 | resp_invalid = view.decorated_func(req, named_arg='')
48 |
49 | # assert
50 | self.assertEqual(resp_valid.status_code, 200)
51 | self.assertEqual(resp_invalid.status_code, 400)
52 |
53 | def test_should_check_name_url_params(self):
54 | # arrange
55 | class _ViewSpec:
56 | named_arg = Checker([DIGIT_STR])
57 |
58 | class _View(View):
59 | @dsv(_ViewSpec)
60 | def decorated_func(self, request, named_arg):
61 | return HttpResponse(status=200)
62 |
63 | factory = RequestFactory()
64 | wsgi_req = factory.request()
65 | view = _View()
66 |
67 | # action & assert
68 | view.decorated_func(wsgi_req, named_arg='1') # should pass validation
69 |
70 | resp = view.decorated_func(wsgi_req, named_arg='')
71 | self.assertIsInstance(resp, JsonResponse)
72 | self.assertEqual(resp.status_code, 400)
73 |
74 | def test_data_and_url_params_should_not_have_intersection(self):
75 | # arrange
76 | class _ViewSpec:
77 | pass
78 |
79 | class _View(View):
80 | @dsv(_ViewSpec)
81 | def decorated_func(self, request, named_arg):
82 | pass
83 |
84 | factory = RequestFactory()
85 | wsgi_req = factory.request()
86 | wsgi_req.GET = {'named_arg': ''}
87 | view = _View()
88 |
89 | # action & assert
90 | with self.assertRaises(RuntimeError):
91 | view.decorated_func(wsgi_req, named_arg='')
92 |
93 | @parameterized.expand(itertools.product([dsv, dsv_request_meta], ['GET', 'POST']))
94 | def test_data_and_path_named_param_should_combine_together(self, dsv_deco, method):
95 | # arrange
96 | payload = {'test_a': 'TEST A'}
97 | if dsv_deco == dsv:
98 | fake_request = make_request(self.request_class, method=method, data=payload)
99 | elif dsv_deco == dsv_request_meta:
100 | fake_request = make_request(self.request_class, method=method, headers=payload)
101 | else:
102 | assert False
103 |
104 | kwargs = {'test_b': 'TEST_B'}
105 |
106 | class _ViewSpec:
107 | test_a = Checker([ONE_OF], ONE_OF='TEST A')
108 | test_b = Checker([ONE_OF], ONE_OF='TEST_B')
109 |
110 | class _View(View):
111 | @dsv_deco(_ViewSpec)
112 | def decorated_func(self, req, *_args, **_kwargs):
113 | pass
114 |
115 | view = _View(request=fake_request)
116 | view.decorated_func(fake_request, **kwargs)
117 |
118 | @parameterized.expand(itertools.product(['POST', 'PUT', 'PATCH', 'DELETE'], [True, False]))
119 | def test_query_params_with_data(self, method, is_json):
120 | # arrange
121 | qs = 'q_a=3&q_b=true&d.o.t=dot&array[]=a1&array[]=a2&array[]=a3'
122 | payload = {'test_a': 'TEST A', 'test_f[]': [1, 2, 3]}
123 |
124 | if is_json:
125 | payload = json.dumps(payload).encode('utf-8')
126 | fake_request = make_request(self.request_class, method=method, data=payload, qs=qs, is_json=is_json)
127 |
128 | kwargs = {'test_b': 'TEST_B', 'test_c.d.e': 'TEST C.D.E'}
129 |
130 | @dsv_feature(strict=True)
131 | class _ViewSpec:
132 | q_a = Checker([LIST_OF], LIST_OF=STR)
133 | q_b = Checker([LIST_OF], LIST_OF=STR)
134 | test_a = Checker([ONE_OF], ONE_OF='TEST A')
135 | test_b = Checker([ONE_OF], ONE_OF='TEST_B')
136 | test_c_d_e = Checker([ONE_OF], ONE_OF='TEST C.D.E', alias='test_c.d.e')
137 | test_f_array = Checker([LIST_OF], LIST_OF=int, alias='test_f[]')
138 | d_o_t = Checker([LIST_OF], LIST_OF=str, alias='d.o.t')
139 | array = Checker([LIST_OF], LIST_OF=str, alias='array[]')
140 |
141 | class _View(View):
142 | @dsv(_ViewSpec)
143 | def decorated_func(self, req, *_args, **_kwargs):
144 | return True
145 |
146 | view = _View(request=fake_request)
147 | assert view.decorated_func(fake_request, **kwargs)
148 |
149 | @parameterized.expand(['POST', 'PUT', 'PATCH', 'DELETE'])
150 | def test_query_params_with_data_in_invalid_json_format(self, method):
151 | payload = 'invalid json data'
152 |
153 | fake_request = make_request(self.request_class, method=method, data=payload, is_json=True)
154 |
155 | class _ViewSpec:
156 | pass
157 |
158 | class _View(View):
159 | @dsv(_ViewSpec)
160 | def decorated_func(self, req, *_args, **_kwargs):
161 | return True
162 |
163 | view = _View(request=fake_request)
164 | with self.assertRaises(ParseError):
165 | assert view.decorated_func(fake_request)
166 |
167 | def test_req_list_data_with_no_multirow_set(self):
168 | # arrange
169 | payload = [{'test_a': 'TEST A1'}, {'test_a': 'TEST A2'}, {'test_a': 'TEST A3'}]
170 | fake_request = make_request(self.request_class, method='POST', data=payload)
171 | kwargs = {'test_b': 'TEST_B'}
172 |
173 | class _ViewSingleRowSpec:
174 | test_a = Checker([STR])
175 |
176 | class _View(View):
177 | @dsv(_ViewSingleRowSpec)
178 | def decorated_func(self, request, *_args, **_kwargs):
179 | pass
180 |
181 | view = _View(request=fake_request)
182 | view.decorated_func(fake_request, **kwargs)
183 |
184 | def test_req_list_data_with_multirow_true(self):
185 | # arrange
186 | payload = [{'test_a': 'TEST A1'}, {'test_a': 'TEST A2'}, {'test_a': 'TEST A3'}]
187 | fake_request = make_request(self.request_class, method='POST', data=payload)
188 | kwargs = {'test_b': 'TEST_B'}
189 |
190 | class _ViewSingleRowSpec:
191 | test_a = Checker([STR])
192 |
193 | class _View(View):
194 | @dsv(_ViewSingleRowSpec, multirow=True)
195 | def decorated_func(self, request, *_args, **_kwargs):
196 | pass
197 |
198 | view = _View(request=fake_request)
199 |
200 | with patch('data_spec_validator.decorator.decorators._is_data_type_list', return_value=False):
201 | view.decorated_func(fake_request, **kwargs)
202 |
203 | def test_non_view_request(self):
204 | # arrange
205 | class _NonViewSpec:
206 | field_a = Checker([DIGIT_STR])
207 |
208 | class _NonView:
209 | @dsv(_NonViewSpec)
210 | def decorated_func(self, request, field_a):
211 | pass
212 |
213 | factory = RequestFactory()
214 | wsgi_req = factory.request()
215 | non_view = _NonView()
216 |
217 | # action & assert
218 | non_view.decorated_func(wsgi_req, field_a='1') # should pass validation
219 |
220 | fake_args = ['1', '2', 3]
221 | with self.assertRaises(Exception):
222 | non_view.decorated_func(fake_args, field_a='1')
223 |
224 | def test_json_response_content(self):
225 | # arrange
226 | @dsv_feature(err_mode=ErrorMode.ALL)
227 | class _ViewSpec:
228 | named_arg = Checker([DIGIT_STR])
229 |
230 | class _View(View):
231 | @dsv(_ViewSpec)
232 | def decorated_func(self, request, named_arg):
233 | return HttpResponse(status=200)
234 |
235 | factory = RequestFactory()
236 | req = factory.request()
237 | view = _View()
238 |
239 | # action
240 | resp_valid = view.decorated_func(req, named_arg='1')
241 | resp_invalid = view.decorated_func(req, named_arg='hi')
242 |
243 | # assert
244 | self.assertIsInstance(resp_valid, HttpResponse)
245 | self.assertEqual(resp_valid.status_code, 200)
246 |
247 | self.assertIsInstance(resp_invalid, JsonResponse)
248 | self.assertEqual(resp_invalid.status_code, 400)
249 | self.assertEqual(
250 | json.loads(resp_invalid.content),
251 | {'messages': ["field: _ViewSpec.named_arg, reason: 'hi' is not a digit str"]},
252 | )
253 |
254 |
255 | if __name__ == '__main__':
256 | unittest.main()
257 |
--------------------------------------------------------------------------------
/DEPLOYMENT.md:
--------------------------------------------------------------------------------
1 | # Deployment Guide
2 |
3 | This document describes the process for deploying a new version of `data-spec-validator` to PyPI.
4 |
5 | ## Deployment Methods
6 |
7 | There are two ways to deploy to PyPI:
8 |
9 | 1. **Automated via GitHub Actions** (Recommended) - Automatically publishes when you create a GitHub release
10 | 2. **Manual deployment** - Using `twine` directly from your local machine
11 |
12 | ## Method 1: Automated Deployment via GitHub Actions (Recommended)
13 |
14 | ### Initial Setup
15 |
16 | #### 1. Generate PyPI API Tokens
17 |
18 | **For Production PyPI:**
19 |
20 | 1. Go to
21 | 2. Click "Add API token"
22 | 3. Token name: `data-spec-validator-github-actions` (or any name you prefer)
23 | 4. Scope: Select "Project: data-spec-validator" (recommended) or "Entire account"
24 | 5. Click "Add token"
25 | 6. **Copy the token** (starts with `pypi-`) - you won't see it again!
26 |
27 | **For Test PyPI (optional, for manual testing):**
28 |
29 | 1. Go to
30 | 2. Follow the same steps as above
31 | 3. Copy the token
32 |
33 | #### 2. Add Tokens to GitHub Secrets
34 |
35 | 1. Go to repository settings:
36 | 2. Click **"New repository secret"**
37 | 3. For production PyPI:
38 | - Name: `PYPI_API_TOKEN`
39 | - Secret: Paste your PyPI token (the `pypi-...` string)
40 | - Click "Add secret"
41 | 4. For Test PyPI (if you want manual testing):
42 | - Click "New repository secret" again
43 | - Name: `TEST_PYPI_API_TOKEN`
44 | - Secret: Paste your Test PyPI token
45 | - Click "Add secret"
46 |
47 | ### Deployment Process
48 |
49 | #### Option A: Automatic on Release (Production)
50 |
51 | 1. **Prepare the release** (see "Prepare the Release" section below)
52 | 2. **Create a GitHub Release:**
53 | - Go to
54 | - Create a new tag: `vX.Y.Z` (e.g., `v3.5.1`)
55 | - Set release title: `vX.Y.Z`
56 | - Copy the changelog entry for this version into the description
57 | - Click "Publish release"
58 | 3. **GitHub Actions will automatically:**
59 | - Build the package
60 | - Upload to production PyPI
61 | - You can monitor progress at
62 |
63 | #### Option B: Manual Trigger (Test or Production)
64 |
65 | You can manually trigger the workflow to test deployment or deploy to production:
66 |
67 | 1. **Go to Actions tab:**
68 | - Visit
69 | 2. **Click "Run workflow"**
70 | 3. **Select target:**
71 | - Choose `testpypi` to test deployment (requires `TEST_PYPI_API_TOKEN` secret)
72 | - Choose `pypi` to deploy to production PyPI
73 | 4. **Click "Run workflow"**
74 | 5. Monitor the workflow execution in the Actions tab
75 |
76 | ## Method 2: Manual Deployment
77 |
78 | ### Prerequisites
79 |
80 | - Python 3.8 or higher
81 | - `twine` installed (`pip install twine`)
82 | - PyPI account with maintainer permissions for `data-spec-validator`
83 | - PyPI API token configured
84 |
85 | ### Initial Setup
86 |
87 | #### 1. Install Required Tools
88 |
89 | ```bash
90 | pip install build twine
91 | ```
92 |
93 | #### 2. Configure PyPI Credentials
94 |
95 | **Option 1: Environment Variables (Recommended)**
96 |
97 | ```bash
98 | # Set credentials as environment variables
99 | export TWINE_USERNAME=__token__
100 | export TWINE_PASSWORD=your-pypi-token-here
101 |
102 | # Twine will automatically use these
103 | twine upload dist/*
104 | ```
105 |
106 | **Option 2: Using .pypirc file**
107 |
108 | Create `.pypirc` in the project root (for multiple profiles):
109 |
110 | ```ini
111 | [distutils]
112 | index-servers =
113 | pypi
114 | testpypi
115 |
116 | [pypi]
117 | repository = https://upload.pypi.org/legacy/
118 | username = __token__
119 | password =
120 |
121 | [testpypi]
122 | repository = https://test.pypi.org/legacy/
123 | username = __token__
124 | password =
125 | ```
126 |
127 | **Important:** Never commit `.pypirc` to version control (it's already in `.gitignore`).
128 |
129 | **Option 3: Pass credentials directly**
130 |
131 | ```bash
132 | twine upload dist/* -u __token__ -p "${PYPI_TOKEN}"
133 | ```
134 |
135 | ### Manual Deployment Steps
136 |
137 | #### 1. Prepare the Release
138 |
139 | 1. **Fetch the latest develop branch:**
140 |
141 | ```bash
142 | git fetch origin develop
143 | git checkout develop
144 | git pull origin develop
145 | ```
146 |
147 | 2. **Update the version number:**
148 | - Edit `data_spec_validator/__version__.py`
149 | - Update the version following [Semantic Versioning](https://semver.org/):
150 | - MAJOR version for incompatible API changes
151 | - MINOR version for backwards-compatible functionality additions
152 | - PATCH version for backwards-compatible bug fixes
153 |
154 | 3. **Update the CHANGELOG:**
155 | - Edit `CHANGELOG.md`
156 | - Add a new section at the top with the new version number
157 | - Document all changes since the last release:
158 | - `[feature]` for new features
159 | - `[fix]` for bug fixes
160 | - `[improvement]` for enhancements
161 | - `[behavior-change]` for breaking changes
162 | - `[internal]` for internal refactoring
163 |
164 | 4. **Commit the version bump:**
165 |
166 | ```bash
167 | git add data_spec_validator/__version__.py CHANGELOG.md
168 | git commit -m "bump version to X.Y.Z"
169 | git push origin develop
170 | ```
171 |
172 | #### 2. Build and Deploy
173 |
174 | **Install build tools:**
175 |
176 | ```bash
177 | pip install build twine
178 | ```
179 |
180 | **Build the package:**
181 |
182 | ```bash
183 | # Clean old builds
184 | rm -rf dist/ build/ *.egg-info
185 |
186 | # Build source distribution and wheel
187 | python -m build
188 | ```
189 |
190 | **Upload to Test PyPI (for testing):**
191 |
192 | ```bash
193 | twine upload --repository testpypi dist/*
194 | # Or use a specific profile from .pypirc:
195 | twine upload --config-file .pypirc -r testdsv dist/*
196 | ```
197 |
198 | **Upload to Production PyPI:**
199 |
200 | ```bash
201 | twine upload dist/*
202 | # Or use a specific profile from .pypirc:
203 | twine upload --config-file .pypirc -r pypi dist/*
204 | ```
205 |
206 | **Verify the upload:**
207 |
208 | ```bash
209 | # For Test PyPI
210 | pip install --index-url https://test.pypi.org/simple/ --no-deps data-spec-validator
211 |
212 | # For Production PyPI
213 | pip install --upgrade data-spec-validator
214 | python -c "from data_spec_validator.__version__ import __version__; print(__version__)"
215 | ```
216 |
217 | #### 3. Post-Deployment
218 |
219 | 1. **Create a Git tag:**
220 |
221 | ```bash
222 | git tag vX.Y.Z
223 | git push origin vX.Y.Z
224 | ```
225 |
226 | 2. **Create a GitHub Release:**
227 | - Go to
228 | - Select the tag you just created
229 | - Copy the changelog entry for this version
230 | - Publish the release
231 |
232 | ## Configuration Files
233 |
234 | - **setup.py**: Package metadata and dependencies
235 | - **pyproject.toml**: Build system requirements (Black configuration)
236 | - **setup.cfg**: Flake8 and isort configuration
237 | - **.pypirc**: PyPI credentials (NOT committed to git)
238 | - **data_spec_validator/__version__.py**: Current version number
239 |
240 | ## Troubleshooting
241 |
242 | ### "Package already exists" error
243 |
244 | - This means the version number is already uploaded to PyPI
245 | - Increment the version number and try again
246 | - PyPI does not allow re-uploading the same version
247 |
248 | ### Authentication errors
249 |
250 | - Verify `.pypirc` credentials are correct
251 | - Ensure you have maintainer permissions for the package
252 | - Generate a new API token from
253 |
254 | ### Test PyPI 403 Forbidden error
255 |
256 | - Test PyPI and production PyPI use separate accounts and API tokens
257 | - The version may already exist on Test PyPI (each version can only be uploaded once)
258 | - If testing is needed, increment to a dev version (e.g., `3.5.2.dev1`) for Test PyPI
259 | - Alternatively, skip Test PyPI and deploy directly to production if you're confident in the release
260 |
261 | ### Missing twine error
262 |
263 | - Install twine: `pip install twine`
264 | - Ensure twine is available in your PATH
265 |
266 | ### Build errors
267 |
268 | - Ensure all dependencies are installed: `pip install -r requirements-dev.txt`
269 | - Check that `setup.py` is valid: `python setup.py check`
270 |
271 | ## Quick Reference
272 |
273 | ### Automated Deployment via GitHub Actions (Recommended)
274 |
275 | #### Automatic on Release
276 |
277 | ```bash
278 | # 1. Update version and changelog
279 | git checkout develop
280 | git pull origin develop
281 |
282 | # Edit data_spec_validator/__version__.py
283 | # Edit CHANGELOG.md
284 |
285 | git add data_spec_validator/__version__.py CHANGELOG.md
286 | git commit -m "bump version to X.Y.Z"
287 | git push origin develop
288 |
289 | # 2. Create GitHub release
290 | # Go to: https://github.com/hardcoretech/data-spec-validator/releases/new
291 | # Tag: vX.Y.Z
292 | # Title: vX.Y.Z
293 | # Description: Copy changelog entry
294 | # Click "Publish release"
295 | #
296 | # GitHub Actions will automatically publish to PyPI
297 | ```
298 |
299 | #### Manual Trigger (Test or Production)
300 |
301 | ```bash
302 | # 1. Update version and changelog (same as above)
303 | git checkout develop
304 | git pull origin develop
305 |
306 | # Edit data_spec_validator/__version__.py
307 | # Edit CHANGELOG.md
308 |
309 | git add data_spec_validator/__version__.py CHANGELOG.md
310 | git commit -m "bump version to X.Y.Z"
311 | git push origin develop
312 |
313 | # 2. Manually trigger GitHub Action
314 | # Go to: https://github.com/hardcoretech/data-spec-validator/actions/workflows/publish.yml
315 | # Click "Run workflow"
316 | # Select branch: develop
317 | # Select environment: testpypi or pypi
318 | # Click "Run workflow"
319 | #
320 | # Monitor at: https://github.com/hardcoretech/data-spec-validator/actions
321 | ```
322 |
323 | ### Manual Deployment
324 |
325 | ```bash
326 | # 1. Update version and changelog
327 | git checkout develop
328 | git pull origin develop
329 |
330 | # Edit data_spec_validator/__version__.py
331 | # Edit CHANGELOG.md
332 |
333 | git add data_spec_validator/__version__.py CHANGELOG.md
334 | git commit -m "bump version to X.Y.Z"
335 | git push origin develop
336 |
337 | # 2. Build and deploy
338 | pip install build twine
339 | rm -rf dist/ build/ *.egg-info
340 | python -m build
341 | twine upload dist/*
342 |
343 | # 3. Tag the release
344 | git tag vX.Y.Z
345 | git push origin vX.Y.Z
346 |
347 | # 4. Create GitHub release at:
348 | # https://github.com/hardcoretech/data-spec-validator/releases/new
349 | ```
350 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # data-spec-validator
2 |
3 | ## Why
4 | * To get rid of code snippet like these (... cumbersome and tedious validation)
5 | ``` python
6 | def do_something(params):
7 | val_a_must_int = params.get('a', 0)
8 | val_b_must_be_non_empty_list = params.get('b', [])
9 | # if key c presents, value c must be a date string between '2000-01-01' to '2020-01-01'
10 | val_c_might_be_none = params.get('c', None)
11 |
12 | # check type
13 | if type(val_a_must_int) != int:
14 | raise XXX
15 |
16 | # check type & value
17 | if type(val_b_must_list) != list or len(val_b_must_be_non_empty_list) == 0:
18 | raise XXX
19 |
20 | # if value exists, check its value
21 | if val_c_might_be_none is not None:
22 | date_c = datetime.strptime(val_c_might_be_present, '%Y-%m-%d')
23 | date_20000101 = datetime.date(2000, 1, 1)
24 | date_20200101 = datetime.date(2020, 1, 1)
25 | if not (date_20000101 <= date_c <= date_20200101):
26 | raise XXX
27 | ...
28 | # do something actually
29 | ```
30 |
31 | ## Installation
32 | - Basic usage:
33 | ```shell
34 | pip install data-spec-validator
35 | ```
36 | - Advance usage (decorator)
37 | 1. The decorator function `dsv` may depend on `Django` (support v3.0 or later) or `djangorestframework`.
38 | ```shell
39 | pip install data-spec-validator[decorator-dj] # Django Only
40 | pip install data-spec-validator[decorator] # Django Rest Framework
41 | ```
42 |
43 | ## Quick Example
44 | * Do `validate_data_spec` directly wherever you like
45 | ```python
46 | from data_spec_validator.spec import INT, DIGIT_STR, ONE_OF, LIST_OF, Checker, CheckerOP, validate_data_spec
47 |
48 | class SomeSpec:
49 | field_a = Checker([INT])
50 | field_b = Checker([DIGIT_STR], optional=True)
51 | field_c = Checker([DIGIT_STR, INT], op=CheckerOP.ANY)
52 | filed_d_array = Checker([LIST_OF], LIST_OF=int, alias='field_d[]', optional=True)
53 |
54 | some_data = dict(field_a=4, field_b='3', field_c=1, field_dont_care=[5,6])
55 | validate_data_spec(some_data, SomeSpec) # return True
56 |
57 | some_data = dict(field_a=4, field_c='1')
58 | validate_data_spec(some_data, SomeSpec) # return True
59 |
60 | some_data = {
61 | 'field_a': 4,
62 | 'field_c': 1,
63 | 'field_d[]': [5, 6],
64 | }
65 | validate_data_spec(some_data, SomeSpec) # return True
66 |
67 | some_data = dict(field_a='4', field_c='1')
68 | validate_data_spec(some_data, SomeSpec) # raise Exception
69 |
70 | some_data = dict(field_a='4', field_c='1')
71 | validate_data_spec(some_data, SomeSpec, nothrow=True) # return False
72 |
73 | class AnotherSpec:
74 | field = Checker([ONE_OF], ONE_OF=[1, '2', [3, 4], {'5': 6}])
75 |
76 | another_data = dict(field=[3, 4])
77 | validate_data_spec(another_data, AnotherSpec) # return True
78 |
79 | another_data = dict(field='4')
80 | validate_data_spec(another_data, AnotherSpec) # raise Exception
81 | ```
82 |
83 | * Multiple rows data
84 | ```python
85 | from data_spec_validator.spec import INT, STR, Checker, validate_data_spec
86 |
87 | class SingleSpec:
88 | f_a = Checker([INT])
89 | f_b = Checker([STR])
90 |
91 | multirow_data = [dict(f_a=1, f_b='1'), dict(f_a=2, f_b='2'), dict(f_a=3, f_b='3')]
92 | validate_data_spec(multirow_data, SingleSpec, multirow=True) # return True
93 |
94 | ```
95 |
96 |
97 | ---
98 | ## Supported checks & sample usages (see `test_spec.py`/`test_class_type_spec.py` for more cases)
99 |
100 | ### INT
101 | `int_field = Checker([INT])` or `Checker[int]`
102 |
103 | ### FLOAT
104 | `float_field = Checker([FLOAT])` or `Checker([float])`
105 |
106 | ### NUMBER
107 | `number_field = Checker([NUMBER])`
108 |
109 | ### STR
110 | `str_field = Checker([STR])` or `Checker([str])`
111 |
112 | ### DIGIT_STR
113 | `digi_str_field = Checker([DIGIT_STR])`
114 |
115 | ### BOOL
116 | `bool_field = Checker([BOOL])` or `Checker([bool])`
117 |
118 | ### DICT
119 | `dict_field = Checker([DICT])` or `Checker([dict])`
120 |
121 | ### LIST
122 | `list_field = Checker([LIST])` or `Checker([list])`
123 |
124 | ### DATE_OBJECT
125 | `date_obj_field = Checker([DATE_OBJECT])` or `Checker([datetime.date])`
126 |
127 | ### DATETIME_OBJECT
128 | `datetime_obj_field = Checker([DATETIME_OBJECT])` or `Checker([datetime.datetime])`
129 |
130 | ### NONE
131 | `none_field = Checker([NONE])` or `Checker([type(None)])`
132 |
133 | ### JSON
134 | `json_field = Checker([JSON])`
135 |
136 | ### JSON_BOOL
137 | `json_bool_field = Checker([JSON_BOOL])`
138 |
139 | ### ONE_OF
140 | `one_of_field = Checker([ONE_OF], ONE_OF=['a', 'b', 'c'])`
141 |
142 | ### SPEC
143 | `spec_field = Checker([SPEC], SPEC=SomeSpecClass)`
144 |
145 | ### LIST_OF: Enforce LIST type validation as well
146 | `list_of_int_field = Checker([LIST_OF], LIST_OF=INT)`
147 |
148 | `list_of_spec_field = Checker([LIST_OF], LIST_OF=SomeSpecClass)`
149 |
150 | ### LENGTH
151 | `length_field = Checker([LENGTH], LENGTH=dict(min=3, max=5))`
152 |
153 | ### AMOUNT
154 | `amount_field = Checker([AMOUNT])`
155 |
156 | ### AMOUNT_RANGE
157 | `amount_range_field = Checker([AMOUNT_RANGE], AMOUNT_RANGE=dict(min=-2.1, max=3.8))`
158 |
159 | ### DECIMAL_PLACE
160 | `decimal_place_field = Checker([DECIMAL_PLACE], DECIMAL_PLACE=4)`
161 |
162 | ### DATE
163 | `date_field = Checker([DATE])`
164 |
165 | ### DATE_RANGE
166 | `date_range_field = Checker([DATE_RANGE], DATE_RANGE=dict(min='2000-01-01', max='2010-12-31'))`
167 |
168 | ### EMAIL
169 | `email_field = Checker([EMAIL])`
170 |
171 | ### UUID
172 | `uuid_field = Checker([UUID])` or `Checker([uuid.UUID])`
173 |
174 | ### REGEX
175 | `re_field = Checker([REGEX], REGEX=dict(pattern=r'^The'))`
176 |
177 | `re_field = Checker([REGEX], REGEX=dict(pattern=r'watch out', method='match'))`
178 |
179 | ### COND_EXIST
180 | If a exists, c must not exist, if b exists, a must exist, if c exists, a must not exist.
181 |
182 | Practically, `optional=True` will be configured in the most use cases, FMI, see `test/test_spec.py`
183 |
184 | `a = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))`
185 |
186 | `b = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a']))`
187 |
188 | `c = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))`
189 |
190 | ### Self-defined class type
191 | ```
192 | class SomeClass:
193 | pass
194 |
195 | a = Checker([SomeClass])
196 | ```
197 |
198 | ---
199 |
200 | * Decorate a method with `dsv`, the method must meet one of the following requirements.
201 | 1) It's a view's member function, and the view has a WSGIRequest(`django.core.handlers.wsgi.WSGIRequest`) attribute.
202 | 2) It's a view's member function, and the 2nd argument of the method is a `rest_framework.request.Request` instance.
203 | 3) It's already decorated with `rest_framework.decorators import api_view`, the 1st argument is a `rest_framework.request.Request`
204 | ```python
205 | from rest_framework.decorators import api_view
206 | from rest_framework.views import APIView
207 |
208 | from data_spec_validator.decorator import dsv
209 | from data_spec_validator.spec import UUID, EMAIL, Checker
210 |
211 | class SomeViewSpec:
212 | param_a = Checker([UUID])
213 | param_b = Checker([EMAIL])
214 |
215 | class SomeView(APIView):
216 | @dsv(SomeViewSpec)
217 | def get(self, request):
218 | pass
219 |
220 | @api_view(('POST',))
221 | @dsv(SomeViewSpec)
222 | def customer_create(request):
223 | pass
224 |
225 | @api_view(('POST',))
226 | @dsv(SomeViewSpec, multirow=True) # For type(request.POST) is list
227 | def customer_create(request):
228 | pass
229 | ```
230 |
231 | * Decorate another method with `dsv_request_meta` can help you validate the META in request header.
232 | ---
233 |
234 | ### Register Custom Spec Check & Validator
235 | - Define custom CHECK constant (`gt_check` in this case) and write custom Validator(`GreaterThanValidator` in this case)
236 | ```python
237 | gt_check = 'gt_check'
238 | from data_spec_validator.spec.defines import BaseValidator
239 | class GreaterThanValidator(BaseValidator):
240 | name = gt_check
241 |
242 | @staticmethod
243 | def validate(value, extra, data):
244 | criteria = extra.get(GreaterThanValidator.name)
245 | return value > criteria, ValueError(f'{value} is not greater than {criteria}')
246 | ```
247 | - Register custom check & validator into data_spec_validator
248 | ```python
249 | from data_spec_validator.spec import custom_spec, Checker, validate_data_spec
250 | custom_spec.register(dict(gt_check=GreaterThanValidator()))
251 |
252 | class GreaterThanSpec:
253 | key = Checker(['gt_check'], GT_CHECK=10)
254 |
255 | ok_data = dict(key=11)
256 | validate_data_spec(ok_data, GreaterThanSpec) # return True
257 |
258 | nok_data = dict(key=9)
259 | validate_data_spec(ok_data, GreaterThanSpec) # raise Exception
260 | ```
261 | ---
262 | ### Message Level
263 |
264 | - 2 modes (**Default** v.s. **Vague**), can be switched by calling `reset_msg_level(vague=True)`
265 | ```python
266 | # In default mode, any exception happens, there will be a reason in the message
267 | "field: XXX, reason: '3' is not a integer"
268 |
269 | # In vague mode, any exception happens, a general message is shown
270 | "field: XXX not well-formatted"
271 | ```
272 | ---
273 | ### Feature: Strict Mode
274 |
275 | - A spec class decorated with `dsv_feature(strict=True)` detects unexpected key/value in data
276 | ```python
277 | from data_spec_validator.spec import Checker, validate_data_spec, dsv_feature, BOOL
278 |
279 | @dsv_feature(strict=True)
280 | class StrictSpec:
281 | a = Checker([BOOL])
282 |
283 | ok_data = dict(a=True)
284 | validate_data_spec(ok_data, StrictSpec) # return True
285 |
286 | nok_data = dict(a=True, b=1)
287 | validate_data_spec(nok_data, StrictSpec) # raise Exception
288 | ```
289 | ---
290 | ### Feature: Any Keys Set
291 |
292 | - A spec class decorated with e.g. `dsv_feature(any_keys_set={...})` means that at least one key among a keys tuple from the set must exist.
293 | ```python
294 | from data_spec_validator.spec import Checker, validate_data_spec, dsv_feature, INT
295 |
296 | @dsv_feature(any_keys_set={('a', 'b'), ('c', 'd')})
297 | class _AnyKeysSetSpec:
298 | a = Checker([INT], optional=True)
299 | b = Checker([INT], optional=True)
300 | c = Checker([INT], optional=True)
301 | d = Checker([INT], optional=True)
302 |
303 | validate_data_spec(dict(a=1, c=1, d=1), _AnyKeysSetSpec)
304 | validate_data_spec(dict(a=1, c=1), _AnyKeysSetSpec)
305 | validate_data_spec(dict(a=1, d=1), _AnyKeysSetSpec)
306 | validate_data_spec(dict(b=1, c=1, d=1), _AnyKeysSetSpec)
307 | validate_data_spec(dict(b=1, c=1), _AnyKeysSetSpec)
308 | validate_data_spec(dict(b=1, d=1), _AnyKeysSetSpec)
309 | validate_data_spec(dict(a=1, b=1, c=1), _AnyKeysSetSpec)
310 | validate_data_spec(dict(a=1, b=1, d=1), _AnyKeysSetSpec)
311 | validate_data_spec(dict(a=1, b=1, c=1, d=1), _AnyKeysSetSpec)
312 |
313 | validate_data_spec(dict(a=1), _AnyKeysSetSpec) # raise exception
314 | validate_data_spec(dict(b=1), _AnyKeysSetSpec) # raise exception
315 | validate_data_spec(dict(c=1), _AnyKeysSetSpec) # raise exception
316 | validate_data_spec(dict(d=1), _AnyKeysSetSpec) # raise exception
317 | validate_data_spec(dict(e=1), _AnyKeysSetSpec) # raise exception
318 | ```
319 | ---
320 | ### Feature: Error Mode, i.e. ErrorMode.ALL, ErrorMode.MSE(default behavior)
321 | NOTE 1: `ErrorMode.MSE` stands for MOST-SIGNIFICANT-ERROR
322 |
323 | NOTE 2: The validation results respect to the ErrorMode feature config on the **OUTER-MOST** spec. All nested specs
324 | follow the **OUTER-MOST** spec configuration, for more reference, see `test_spec.py:test_err_mode`
325 | ```python
326 | from data_spec_validator.spec import Checker, validate_data_spec, dsv_feature, LENGTH, STR, AMOUNT, ErrorMode, INT, DIGIT_STR
327 |
328 | @dsv_feature(err_mode=ErrorMode.ALL)
329 | class _ErrModeAllSpec:
330 | a = Checker([INT])
331 | b = Checker([DIGIT_STR])
332 | c = Checker([LENGTH, STR, AMOUNT], LENGTH=dict(min=3, max=5))
333 |
334 | nok_data = dict(
335 | a=True,
336 | b='abc',
337 | c='22',
338 | )
339 |
340 | validate_data_spec(nok_data, _ErrModeAllSpec) # raise DSVError
341 | """
342 | A DSVError is raised with 3 errors in args.
343 | (TypeError('field: _ErrModeAllSpec.a, reason: True is not an integer',),
344 | TypeError("field: _ErrModeAllSpec.b, reason: 'abc' is not a digit str",),
345 | ValueError("field: _ErrModeAllSpec.c, reason: Length of '22' must be between 3 and 5",))
346 |
347 | """
348 | ```
349 | ---
350 | ### Feature: Self-defined Spec name in error message
351 | ```python
352 | from data_spec_validator.spec import Checker, dsv_feature, validate_data_spec, INT
353 |
354 | @dsv_feature(spec_name='CustomSpecName')
355 | class _MySpec:
356 | a = Checker([INT])
357 |
358 | nok_data = dict(
359 | a='abc',
360 | )
361 |
362 | validate_data_spec(nok_data, _MySpec)
363 | """
364 | TypeError: field: CustomSpecName.a, reason: 'abc' is not an integer
365 | """
366 | ```
367 |
368 | ---
369 | ## Test
370 | ```bash
371 | python -m unittest test/*.*
372 | ```
373 |
--------------------------------------------------------------------------------
/data_spec_validator/spec/validators.py:
--------------------------------------------------------------------------------
1 | import copy
2 | import datetime
3 | import json
4 | import re
5 | import uuid
6 | from dataclasses import dataclass
7 | from decimal import Decimal
8 | from functools import lru_cache
9 | from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union
10 |
11 | import dateutil.parser
12 |
13 | from .checks import (
14 | _TYPE,
15 | AMOUNT,
16 | AMOUNT_RANGE,
17 | BOOL,
18 | COND_EXIST,
19 | DATE,
20 | DATE_OBJECT,
21 | DATE_RANGE,
22 | DATETIME_OBJECT,
23 | DECIMAL_PLACE,
24 | DICT,
25 | DIGIT_STR,
26 | DUMMY,
27 | EMAIL,
28 | FLOAT,
29 | FOREACH,
30 | INT,
31 | JSON,
32 | JSON_BOOL,
33 | LENGTH,
34 | LIST,
35 | LIST_OF,
36 | NONE,
37 | NUMBER,
38 | ONE_OF,
39 | REGEX,
40 | SPEC,
41 | STR,
42 | UUID,
43 | Checker,
44 | get_validator,
45 | )
46 | from .defines import SELF, BaseValidator, ValidateResult
47 | from .features import get_any_keys_set, is_strict
48 | from .utils import raise_if
49 |
50 | _ALLOW_UNKNOWN = 'ALLOW_UNKNOWN'
51 | _SPEC_WISE_CHECKS = [COND_EXIST]
52 |
53 |
54 | class UnknownFieldValue:
55 | message = 'This field cannot be found in this SPEC'
56 |
57 |
58 | @dataclass(frozen=True)
59 | class FieldKey:
60 | spec_field: str
61 | data_field: Optional[str] = None
62 |
63 |
64 | @lru_cache(1)
65 | def get_unknown_field_value() -> UnknownFieldValue:
66 | return UnknownFieldValue()
67 |
68 |
69 | def _extract_value(checks: list, data: dict, field_key: FieldKey):
70 | if LIST_OF in checks and hasattr(data, 'getlist'):
71 | # For QueryDict, all query values are put into list for the same key.
72 | # It should be client side's (Spec maker) responsibility to indicate that
73 | # whether the field is a list or not.
74 | value = data.getlist(field_key.data_field, get_unknown_field_value())
75 | else:
76 | value = data.get(field_key.data_field, get_unknown_field_value())
77 | return value
78 |
79 |
80 | def _makeup_internals_to_extra(spec: Type, checks: List[str], raw_extra: Dict, allow_optional: bool) -> Dict:
81 | extra = copy.deepcopy(raw_extra)
82 | if extra.get(SpecValidator.name) == SELF:
83 | extra[SpecValidator.name] = spec
84 |
85 | if COND_EXIST in checks and allow_optional:
86 | extra[_ALLOW_UNKNOWN] = True
87 | return extra
88 |
89 |
90 | def _pass_optional(allow_optional: bool, checks: List[str], value: Any) -> bool:
91 | return value == get_unknown_field_value() and allow_optional and COND_EXIST not in checks
92 |
93 |
94 | def _pass_none(allow_none: bool, value: Any) -> bool:
95 | return value is None and allow_none
96 |
97 |
98 | def _pass_unknown(_extra: Dict, value: Any) -> bool:
99 | return value == get_unknown_field_value() and _ALLOW_UNKNOWN in _extra
100 |
101 |
102 | def _validate_field(data, field_key: FieldKey, spec) -> Tuple[bool, List[ValidateResult]]:
103 | checker = getattr(spec, field_key.spec_field)
104 |
105 | checks = checker.checks
106 | allow_optional = checker.allow_optional
107 | allow_none = checker.allow_none
108 |
109 | value = _extract_value(checks, data, field_key)
110 | extra = _makeup_internals_to_extra(spec, checks, checker.extra, allow_optional)
111 |
112 | results = []
113 |
114 | if _pass_optional(allow_optional, checks, value):
115 | # Skip all the other checks' validations
116 | return True, []
117 | elif _pass_none(allow_none, value):
118 | # Skip all the other checks' validations
119 | return True, []
120 | else:
121 |
122 | def _do_validate(_acc_results: List, _spec: Any, _check: str, _value: Any, _data: Dict, _extra: Dict) -> None:
123 | validator = get_validator(_check)
124 | try:
125 | ok, error = validator.validate(_value, _extra, _data)
126 | except AttributeError as ae:
127 | if _check == LIST_OF:
128 | # During list_of check, the target should be one kind of spec.
129 | ok, error = False, TypeError(f'{repr(_value)} is not a spec of {_spec}, detail: {repr(ae)}')
130 | else:
131 | ok, error = False, RuntimeError(f'{repr(ae)}')
132 | except NotImplementedError:
133 | raise
134 | except Exception as e:
135 | # For any unwell-handled case, go this way for now.
136 | ok, error = False, RuntimeError(f'{repr(e)}')
137 | _acc_results.append((ok, ValidateResult(spec, field_key.data_field, _value, _check, error)))
138 |
139 | spec_wise_checks = set(filter(lambda c: c in _SPEC_WISE_CHECKS, checks))
140 | field_wise_checks = set(checks) - spec_wise_checks
141 |
142 | for chk in spec_wise_checks:
143 | _do_validate(results, spec, chk, value, data, extra)
144 |
145 | if not _pass_unknown(extra, value):
146 | for chk in field_wise_checks:
147 | _do_validate(results, spec, chk, value, data, extra)
148 |
149 | nok_results = [rs for (ok, rs) in results if not ok]
150 | if checker.is_op_any and len(nok_results) == len(checks):
151 | return False, nok_results
152 | if checker.is_op_all and nok_results:
153 | return False, nok_results
154 | return True, []
155 |
156 |
157 | def _validate_spec_features(data, data_fields, spec) -> Tuple[bool, List[ValidateResult]]:
158 | if is_strict(spec):
159 | unexpected = set(data.keys()) - set(data_fields)
160 | if unexpected:
161 | error = ValueError(f'Unexpected field keys({unexpected}) found in strict mode spec')
162 | return False, [ValidateResult(spec, str(unexpected), data, 'strict', error)]
163 |
164 | any_keys_set = get_any_keys_set(spec)
165 | if any_keys_set:
166 | data_keys = set(data.keys())
167 | for keys in any_keys_set:
168 | if data_keys.isdisjoint(set(keys)):
169 | str_keys = ", ".join(keys)
170 | error = KeyError('At least one of these fields must exist')
171 | return False, [ValidateResult(spec, str_keys, data, 'any_keys_set', error)]
172 |
173 | return True, [ValidateResult()]
174 |
175 |
176 | def _validate_spec_fields(data, field_keys: List[FieldKey], spec) -> List[Tuple[bool, List[ValidateResult]]]:
177 | rs = [_validate_field(data, fk, spec) for fk in field_keys]
178 | return rs
179 |
180 |
181 | class DummyValidator(BaseValidator):
182 | name = DUMMY
183 |
184 | @staticmethod
185 | def validate(value, extra, data):
186 | raise NotImplementedError
187 |
188 |
189 | class TypeValidator(BaseValidator):
190 | name = _TYPE
191 |
192 | @staticmethod
193 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
194 | check_type = extra.get(TypeValidator.name)
195 | ok = type(value) is check_type
196 | info = '' if ok else TypeError(f'{repr(value)} is not in type: {check_type}')
197 | return ok, info
198 |
199 |
200 | class IntValidator(BaseValidator):
201 | name = INT
202 |
203 | @staticmethod
204 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
205 | ok = type(value) is int
206 | info = '' if ok else TypeError(f'{repr(value)} is not an integer')
207 | return ok, info
208 |
209 |
210 | class FloatValidator(BaseValidator):
211 | name = FLOAT
212 |
213 | @staticmethod
214 | def validate(value, extra, data):
215 | ok = type(value) is float
216 | info = '' if ok else TypeError(f'{repr(value)} is not a float')
217 | return ok, info
218 |
219 |
220 | class NumberValidator(BaseValidator):
221 | name = NUMBER
222 |
223 | @staticmethod
224 | def validate(value, extra, data):
225 | ok = type(value) is float or type(value) is int
226 | info = '' if ok else TypeError(f'{repr(value)} is not a number')
227 | return ok, info
228 |
229 |
230 | class StrValidator(BaseValidator):
231 | name = STR
232 |
233 | @staticmethod
234 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
235 | ok = type(value) is str
236 | info = '' if ok else TypeError(f'{repr(value)} is not a string')
237 | return ok, info
238 |
239 |
240 | class NoneValidator(BaseValidator):
241 | name = NONE
242 |
243 | @staticmethod
244 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
245 | ok = value is None
246 | info = '' if ok else TypeError(f'{repr(value)} is not None')
247 | return ok, info
248 |
249 |
250 | class BoolValidator(BaseValidator):
251 | name = BOOL
252 |
253 | @staticmethod
254 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
255 | ok = type(value) is bool
256 | info = '' if ok else TypeError(f'{repr(value)} is not a boolean')
257 | return ok, info
258 |
259 |
260 | class JSONValidator(BaseValidator):
261 | name = JSON
262 |
263 | @staticmethod
264 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
265 | try:
266 | json.loads(value)
267 | return True, ''
268 | except Exception as e:
269 | return False, TypeError(f'{repr(value)} is not a json object, {e.__str__()}')
270 |
271 |
272 | class JSONBoolValidator(BaseValidator):
273 | name = JSON_BOOL
274 |
275 | @staticmethod
276 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
277 | try:
278 | ok = type(json.loads(value)) is bool
279 | info = '' if ok else TypeError(f'{repr(value)} is not a json boolean')
280 | return ok, info
281 | except Exception as e:
282 | return False, TypeError(f'{repr(value)} is not a json object, {e.__str__()}')
283 |
284 |
285 | class ListValidator(BaseValidator):
286 | name = LIST
287 |
288 | @staticmethod
289 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
290 | ok = type(value) is list
291 | info = '' if ok else TypeError(f'{repr(value)} is not a list')
292 | return ok, info
293 |
294 |
295 | class DictValidator(BaseValidator):
296 | name = DICT
297 |
298 | @staticmethod
299 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
300 | ok = type(value) is dict
301 | info = TypeError(f'{repr(value)} is not a dict')
302 | return ok, info
303 |
304 |
305 | class DateObjectValidator(BaseValidator):
306 | name = DATE_OBJECT
307 |
308 | @staticmethod
309 | def validate(value, extra, data):
310 | ok = type(value) is datetime.date
311 | info = '' if ok else TypeError(f'{repr(value)} is not a date object')
312 | return ok, info
313 |
314 |
315 | class DatetimeObjectValidator(BaseValidator):
316 | name = DATETIME_OBJECT
317 |
318 | @staticmethod
319 | def validate(value, extra, data):
320 | ok = type(value) is datetime.datetime
321 | info = '' if ok else TypeError(f'{repr(value)} is not a datetime object')
322 | return ok, info
323 |
324 |
325 | class AmountValidator(BaseValidator):
326 | name = AMOUNT
327 |
328 | @staticmethod
329 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
330 | try:
331 | float(value)
332 | return True, ''
333 | except ValueError:
334 | return False, ValueError(f'Cannot convert {repr(value)} to float')
335 |
336 |
337 | class AmountRangeValidator(BaseValidator):
338 | name = AMOUNT_RANGE
339 |
340 | @staticmethod
341 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
342 | amount_range_info = extra.get(AmountRangeValidator.name)
343 | raise_if(
344 | type(amount_range_info) != dict or ('min' not in amount_range_info and 'max' not in amount_range_info),
345 | RuntimeError(f'Invalid checker configuration: {extra}'),
346 | )
347 |
348 | lower_bound = amount_range_info.get('min', float('-inf'))
349 | upper_bound = amount_range_info.get('max', float('inf'))
350 |
351 | ok = lower_bound <= float(value) <= upper_bound
352 | info = '' if ok else ValueError(f'Amount: {repr(value)} must be between {lower_bound} and {upper_bound}')
353 | return ok, info
354 |
355 |
356 | class LengthValidator(BaseValidator):
357 | name = LENGTH
358 |
359 | @staticmethod
360 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
361 | length_info = extra.get(LengthValidator.name)
362 | raise_if(
363 | type(length_info) != dict or ('min' not in length_info and 'max' not in length_info),
364 | RuntimeError(f'Invalid checker configuration: {extra}'),
365 | )
366 |
367 | lower_bound, upper_bound = length_info.get('min', 0), length_info.get('max')
368 | raise_if(
369 | lower_bound < 0,
370 | RuntimeError('Lower boundary cannot less than 0 for length validator'),
371 | )
372 |
373 | if upper_bound:
374 | ok = lower_bound <= len(value) <= upper_bound
375 | info = '' if ok else ValueError(f'Length of {repr(value)} must be between {lower_bound} and {upper_bound}')
376 | else:
377 | ok = lower_bound <= len(value)
378 | info = '' if ok else ValueError(f'Length of {repr(value)} must be greater than or equal to {lower_bound}')
379 | return ok, info
380 |
381 |
382 | class SpecValidator(BaseValidator):
383 | name = SPEC
384 |
385 | @staticmethod
386 | def _extract_field_keys(spec) -> List[FieldKey]:
387 | raise_if(type(spec) != type, RuntimeError(f'{spec} should be a spec class'))
388 |
389 | fields = []
390 | for f_name, checker in spec.__dict__.items():
391 | if isinstance(checker, Checker):
392 | key = FieldKey(spec_field=f_name, data_field=checker.alias if checker.alias else f_name)
393 | fields.append(key)
394 |
395 | return fields
396 |
397 | @staticmethod
398 | def validate(value, extra, data) -> Tuple[bool, List[Tuple[bool, List[ValidateResult]]]]:
399 | target_spec = extra.get(SpecValidator.name)
400 |
401 | field_keys = SpecValidator._extract_field_keys(target_spec)
402 |
403 | result = _validate_spec_features(value, [fk.data_field for fk in field_keys], target_spec)
404 | if not result[0]:
405 | return False, [result]
406 |
407 | results = _validate_spec_fields(value, field_keys, target_spec)
408 | failures = [r for r in results if not r[0]]
409 |
410 | ok = len(failures) == 0
411 | return ok, failures
412 |
413 |
414 | class ListOfValidator(BaseValidator):
415 | name = LIST_OF
416 |
417 | @staticmethod
418 | def validate(values, extra, data) -> Tuple[bool, Union[Exception, str]]:
419 | if type(values) != list:
420 | return False, TypeError('Must a be in type: list')
421 |
422 | check = extra.get(ListOfValidator.name)
423 | validator = get_validator(check)
424 | for value in values:
425 | ok, error = validator.validate(value, extra, data)
426 | if not ok:
427 | # Early return to save lives.
428 | return False, error
429 | return True, ''
430 |
431 |
432 | class OneOfValidator(BaseValidator):
433 | name = ONE_OF
434 |
435 | @staticmethod
436 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
437 | options = extra.get(OneOfValidator.name)
438 | ok = value in options
439 | info = '' if ok else ValueError(f'{repr(value)} is not one of {options}')
440 | return ok, info
441 |
442 |
443 | class ForeachValidator(BaseValidator):
444 | name = FOREACH
445 |
446 | @staticmethod
447 | def validate(values: Iterable, extra: Dict, data: Dict) -> Tuple[bool, Union[Exception, str]]:
448 | check = extra.get(ForeachValidator.name)
449 | validator = get_validator(check)
450 | for value in values:
451 | ok, error = validator.validate(value, extra, data)
452 | if not ok:
453 | # Early return to save lives.
454 | return False, error
455 | return True, ''
456 |
457 |
458 | class DecimalPlaceValidator(BaseValidator):
459 | name = DECIMAL_PLACE
460 |
461 | @staticmethod
462 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
463 | dp_info = extra.get(DecimalPlaceValidator.name)
464 | dv = Decimal(str(value))
465 | dv_tup = dv.as_tuple()
466 | dv_dp = -1 * dv_tup.exponent if dv_tup.exponent < 0 else 0
467 | ok = dv_dp <= dp_info
468 | info = '' if ok else ValueError(f'Expect decimal places({dp_info}) for value: {value!r}, ' f'but got {dv_dp}')
469 | return ok, info
470 |
471 |
472 | class DateValidator(BaseValidator):
473 | name = DATE
474 |
475 | @staticmethod
476 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
477 | try:
478 | dateutil.parser.parse(value).date()
479 | return True, ''
480 | except ValueError:
481 | return False, ValueError(f'Unexpected date format: {repr(value)}')
482 |
483 |
484 | class DateRangeValidator(BaseValidator):
485 | name = DATE_RANGE
486 |
487 | @staticmethod
488 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
489 | range_info = extra.get(DateRangeValidator.name)
490 | raise_if(
491 | type(range_info) != dict or ('min' not in range_info and 'max' not in range_info),
492 | RuntimeError(f'Invalid checker configuration: {extra}'),
493 | )
494 |
495 | min_date_str = range_info.get('min', '1970-01-01')
496 | max_date_str = range_info.get('max', '2999-12-31')
497 | raise_if(
498 | type(min_date_str) != str or type(max_date_str) != str,
499 | RuntimeError(f'Invalid checker configuration(must be str): {extra}'),
500 | )
501 |
502 | min_date = dateutil.parser.parse(min_date_str).date()
503 | max_date = dateutil.parser.parse(max_date_str).date()
504 | value_date = dateutil.parser.parse(value).date()
505 | ok = min_date <= value_date <= max_date
506 | info = '' if ok else ValueError(f'{repr(value)} is not in range {min_date_str} ~ {max_date_str}')
507 | return ok, info
508 |
509 |
510 | class DigitStrValidator(BaseValidator):
511 | name = DIGIT_STR
512 |
513 | @staticmethod
514 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
515 | ok = type(value) == str and value.isdigit()
516 | info = '' if ok else TypeError(f'{repr(value)} is not a digit str')
517 | return ok, info
518 |
519 |
520 | class EmailValidator(BaseValidator):
521 | name = EMAIL
522 |
523 | # https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address
524 | regex = r'[a-zA-Z0-9.!#$%&\'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$'
525 |
526 | @staticmethod
527 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
528 | ok = type(value) == str and re.fullmatch(EmailValidator.regex, value)
529 | info = '' if ok else ValueError(f'{repr(value)} is not a valid email address')
530 | return ok, info
531 |
532 |
533 | class UUIDValidator(BaseValidator):
534 | name = UUID
535 |
536 | @staticmethod
537 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
538 | try:
539 | if not isinstance(value, uuid.UUID):
540 | uuid.UUID(value)
541 | return True, ''
542 | except Exception as e:
543 | return False, ValueError(f'{repr(value)} is not an UUID object: {e.__str__}')
544 |
545 |
546 | class RegexValidator(BaseValidator):
547 | name = REGEX
548 |
549 | @staticmethod
550 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
551 | regex_param = extra.get(RegexValidator.name, {})
552 | pattern = regex_param.get('pattern', '')
553 | match_method = regex_param.get('method', 'search')
554 | error_regex_param = regex_param.copy()
555 | error_regex_param['method'] = match_method
556 |
557 | if match_method == 'match':
558 | match_func = re.match
559 | elif match_method == 'fullmatch':
560 | match_func = re.fullmatch
561 | elif match_method == 'search':
562 | match_func = re.search
563 | else:
564 | raise RuntimeError(f'unsupported match method: {match_method}')
565 |
566 | ok = type(value) == str and match_func and match_func(pattern, value)
567 | info = '' if ok else ValueError(f'{repr(value)} does not match "{error_regex_param}"')
568 | return ok, info
569 |
570 |
571 | class CondExistValidator(BaseValidator):
572 | name = COND_EXIST
573 |
574 | @staticmethod
575 | def validate(value, extra, data) -> Tuple[bool, Union[Exception, str]]:
576 | allow_unknown = extra.get(_ALLOW_UNKNOWN, False)
577 | params = extra.get(CondExistValidator.name, {})
578 | must_with_keys = params.get('WITH', [])
579 | must_without_keys = params.get('WITHOUT', [])
580 |
581 | if isinstance(value, UnknownFieldValue) and not allow_unknown:
582 | return False, LookupError('must exist')
583 |
584 | ok = True
585 | msg = ''
586 | if must_with_keys and not isinstance(value, UnknownFieldValue):
587 | ok = all([key in data for key in must_with_keys])
588 | msg = f'{", ".join(must_with_keys)} must exist' if not ok else msg
589 |
590 | if must_without_keys and not isinstance(value, UnknownFieldValue):
591 | ok = ok and all([key not in data for key in must_without_keys])
592 | msg = f'{", ".join(must_without_keys)} must not exist' if not ok else msg
593 |
594 | info = '' if ok else KeyError(msg)
595 | return ok, info
596 |
--------------------------------------------------------------------------------
/test/test_spec.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 | import uuid
4 | from datetime import date
5 | from itertools import chain
6 |
7 | from data_spec_validator.spec import (
8 | AMOUNT,
9 | AMOUNT_RANGE,
10 | BOOL,
11 | COND_EXIST,
12 | DATE,
13 | DATE_OBJECT,
14 | DATE_RANGE,
15 | DATETIME_OBJECT,
16 | DECIMAL_PLACE,
17 | DICT,
18 | DIGIT_STR,
19 | EMAIL,
20 | FLOAT,
21 | FOREACH,
22 | INT,
23 | JSON,
24 | JSON_BOOL,
25 | LENGTH,
26 | LIST,
27 | LIST_OF,
28 | NONE,
29 | NUMBER,
30 | ONE_OF,
31 | REGEX,
32 | SELF,
33 | SPEC,
34 | STR,
35 | UUID,
36 | Checker,
37 | CheckerOP,
38 | DSVError,
39 | ErrorMode,
40 | dsv_feature,
41 | not_,
42 | reset_msg_level,
43 | validate_data_spec,
44 | )
45 | from data_spec_validator.spec.validators import BaseValidator
46 |
47 | from .utils import is_something_error, is_type_error
48 |
49 |
50 | class TestSpec(unittest.TestCase):
51 | def test_int(self):
52 | class IntSpec:
53 | int_field = Checker([INT])
54 |
55 | ok_data = dict(int_field=3)
56 | assert validate_data_spec(ok_data, IntSpec)
57 |
58 | nok_data = dict(int_field='3')
59 | assert is_something_error(TypeError, validate_data_spec, nok_data, IntSpec)
60 |
61 | def test_float(self):
62 | class FloatSpec:
63 | float_field = Checker([FLOAT])
64 |
65 | ok_data = dict(float_field=3.0)
66 | assert validate_data_spec(ok_data, FloatSpec)
67 |
68 | nok_data = dict(float_field=3)
69 | assert is_something_error(TypeError, validate_data_spec, nok_data, FloatSpec)
70 |
71 | def test_number(self):
72 | class NumberSpec:
73 | number_field = Checker([NUMBER])
74 |
75 | ok_data = dict(number_field=3.0)
76 | assert validate_data_spec(ok_data, NumberSpec)
77 |
78 | ok_data = dict(number_field=3)
79 | assert validate_data_spec(ok_data, NumberSpec)
80 |
81 | nok_data = dict(number_field='3.0')
82 | assert is_something_error(TypeError, validate_data_spec, nok_data, NumberSpec)
83 |
84 | def test_str(self):
85 | class StrSpec:
86 | str_field = Checker([STR])
87 |
88 | ok_data = dict(str_field='3')
89 | assert validate_data_spec(ok_data, StrSpec)
90 |
91 | nok_data = dict(str_field=3)
92 | assert is_something_error(TypeError, validate_data_spec, nok_data, StrSpec)
93 |
94 | def test_none(self):
95 | class NoneSpec:
96 | none_field = Checker([NONE])
97 |
98 | ok_data = dict(none_field=None)
99 | assert validate_data_spec(ok_data, NoneSpec)
100 |
101 | nok_data = dict(none_field=3)
102 | assert is_something_error(TypeError, validate_data_spec, nok_data, NoneSpec)
103 |
104 | def test_allow_none(self):
105 | class AllowNoneSpec:
106 | maybe_none_field = Checker([INT], allow_none=True)
107 |
108 | ok_data = dict(maybe_none_field=3)
109 | assert validate_data_spec(ok_data, AllowNoneSpec)
110 |
111 | ok_data = dict(maybe_none_field=None)
112 | assert validate_data_spec(ok_data, AllowNoneSpec)
113 |
114 | nok_data = dict(maybe_none_field='3')
115 | assert is_something_error(TypeError, validate_data_spec, nok_data, AllowNoneSpec)
116 |
117 | def test_bool(self):
118 | class BoolSpec:
119 | bool_field = Checker([BOOL])
120 |
121 | ok_data = dict(bool_field=False)
122 | assert validate_data_spec(ok_data, BoolSpec)
123 |
124 | nok_data = dict(bool_field='True')
125 | assert is_something_error(TypeError, validate_data_spec, nok_data, BoolSpec)
126 |
127 | def test_self(self):
128 | class SelfSpec:
129 | next_field = Checker([SPEC], optional=True, SPEC=SELF)
130 | children = Checker([LIST_OF], optional=True, LIST_OF=SPEC, SPEC=SELF)
131 |
132 | ok_data = dict(
133 | next_field=dict(
134 | next_field=dict(
135 | next_field=dict(),
136 | ),
137 | ),
138 | children=[
139 | dict(
140 | next_field=dict(next_field=dict()),
141 | ),
142 | dict(
143 | next_field=dict(),
144 | ),
145 | dict(
146 | children=[dict()],
147 | ),
148 | ],
149 | )
150 | assert validate_data_spec(ok_data, SelfSpec)
151 |
152 | nok_data = dict(next_field=dict(next_field=0))
153 | assert is_something_error(Exception, validate_data_spec, nok_data, SelfSpec)
154 |
155 | def test_list(self):
156 | class ListSpec:
157 | list_field = Checker([LIST])
158 |
159 | ok_data = dict(list_field=[1, 2, 3])
160 | assert validate_data_spec(ok_data, ListSpec)
161 |
162 | nok_data = dict(list_field=dict(a=2, b=4))
163 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListSpec)
164 |
165 | def test_dict(self):
166 | class DictSpec:
167 | dict_field = Checker([DICT])
168 |
169 | ok_data = dict(dict_field=dict(a=2, b=4))
170 | assert validate_data_spec(ok_data, DictSpec)
171 |
172 | nok_data = dict(dict_field=[1, 2, 3])
173 | assert is_something_error(TypeError, validate_data_spec, nok_data, DictSpec)
174 |
175 | def test_date_object(self):
176 | class DateObjSpec:
177 | date_object_field = Checker([DATE_OBJECT])
178 |
179 | ok_data = dict(date_object_field=datetime.date(2023, 2, 9))
180 | assert validate_data_spec(ok_data, DateObjSpec)
181 |
182 | nok_data = dict(date_object_field=datetime.datetime(2023, 2, 9, 12, 34))
183 | assert is_something_error(TypeError, validate_data_spec, nok_data, DateObjSpec)
184 |
185 | def test_datetime_object(self):
186 | class DatetimeObjSpec:
187 | datetime_object_field = Checker([DATETIME_OBJECT])
188 |
189 | ok_data = dict(datetime_object_field=datetime.datetime(2023, 2, 9, 12, 34))
190 | assert validate_data_spec(ok_data, DatetimeObjSpec)
191 |
192 | nok_data = dict(datetime_object_field=datetime.date(2023, 2, 9))
193 | assert is_something_error(TypeError, validate_data_spec, nok_data, DatetimeObjSpec)
194 |
195 | def test_optional(self):
196 | class OptionalSpec:
197 | optional_field = Checker([STR], optional=True)
198 |
199 | ok_data = dict(whatever_field='dont_care')
200 | assert validate_data_spec(ok_data, OptionalSpec)
201 |
202 | def test_amount(self):
203 | class AmountSpec:
204 | amount_field = Checker([AMOUNT])
205 |
206 | ok_data = dict(amount_field='3.1415')
207 | assert validate_data_spec(ok_data, AmountSpec)
208 |
209 | ok_data = dict(amount_field=5566)
210 | assert validate_data_spec(ok_data, AmountSpec)
211 |
212 | nok_data = dict(amount_field='abc')
213 | assert is_something_error(ValueError, validate_data_spec, nok_data, AmountSpec)
214 |
215 | def test_amount_range(self):
216 | class AmountRangeSpec:
217 | amount_range_field = Checker([AMOUNT_RANGE], AMOUNT_RANGE=dict(min=-2.1, max=3.8))
218 |
219 | ok_data = dict(
220 | amount_range_field='3.8',
221 | )
222 | assert validate_data_spec(ok_data, AmountRangeSpec)
223 |
224 | ok_data = dict(amount_range_field=-2.1)
225 | assert validate_data_spec(ok_data, AmountRangeSpec)
226 |
227 | nok_data = dict(amount_range_field='-2.2')
228 | assert is_something_error(ValueError, validate_data_spec, nok_data, AmountRangeSpec)
229 |
230 | nok_data = dict(amount_range_field='3.81')
231 | assert is_something_error(ValueError, validate_data_spec, nok_data, AmountRangeSpec)
232 |
233 | def test_length(self):
234 | class LengthSpec:
235 | length_field = Checker([LENGTH], LENGTH=dict(min=3, max=5))
236 |
237 | ok_data = dict(length_field='3.2')
238 | assert validate_data_spec(ok_data, LengthSpec)
239 |
240 | ok_data = dict(length_field='3.141')
241 | assert validate_data_spec(ok_data, LengthSpec)
242 |
243 | nok_data = dict(length_field='ah')
244 | assert is_something_error(ValueError, validate_data_spec, nok_data, LengthSpec)
245 |
246 | nok_data = dict(length_field='exceed')
247 | assert is_something_error(ValueError, validate_data_spec, nok_data, LengthSpec)
248 |
249 | # assert error message
250 | with self.assertRaises(ValueError) as e:
251 | validate_data_spec(dict(length_field='ah'), LengthSpec)
252 |
253 | expected_error_msg = "field: LengthSpec.length_field, reason: Length of 'ah' must be between 3 and 5"
254 | self.assertEqual(str(e.exception), expected_error_msg)
255 |
256 | def test_length__without_upper_limit(self):
257 | class LengthSpec:
258 | length_field = Checker([LENGTH], LENGTH=dict(min=3))
259 |
260 | # assert error message
261 | with self.assertRaises(ValueError) as e:
262 | validate_data_spec(dict(length_field='ah'), LengthSpec)
263 |
264 | expected_error_msg = "field: LengthSpec.length_field, reason: Length of 'ah' must be greater than or equal to 3"
265 | self.assertEqual(str(e.exception), expected_error_msg)
266 |
267 | def test_decimal_place(self):
268 | class DecimalPlaceSpec:
269 | decimal_place_field = Checker([DECIMAL_PLACE], DECIMAL_PLACE=4)
270 |
271 | ok_data = dict(decimal_place_field=3.123)
272 | assert validate_data_spec(ok_data, DecimalPlaceSpec)
273 |
274 | ok_data = dict(decimal_place_field=3.1234)
275 | assert validate_data_spec(ok_data, DecimalPlaceSpec)
276 |
277 | nok_data = dict(decimal_place_field=3.12345)
278 | assert is_something_error(ValueError, validate_data_spec, nok_data, DecimalPlaceSpec)
279 |
280 | def test_date(self):
281 | class DateStrSpec:
282 | date_field = Checker([DATE])
283 |
284 | ok_data = dict(date_field='2000-01-31')
285 | assert validate_data_spec(ok_data, DateStrSpec)
286 |
287 | ok_data = dict(date_field='1-31-2000')
288 | assert validate_data_spec(ok_data, DateStrSpec)
289 |
290 | ok_data = dict(date_field='20200101')
291 | assert validate_data_spec(ok_data, DateStrSpec)
292 |
293 | nok_data = dict(date_field='202011')
294 | assert is_something_error(ValueError, validate_data_spec, nok_data, DateStrSpec)
295 |
296 | def test_date_range(self):
297 | class DateStrRangeSpec:
298 | date_range_field = Checker(
299 | [DATE_RANGE],
300 | DATE_RANGE=dict(min='2000-01-01', max='2010-12-31'),
301 | )
302 |
303 | ok_data = dict(date_range_field='2000-1-1')
304 | assert validate_data_spec(ok_data, DateStrRangeSpec)
305 |
306 | ok_data = dict(date_range_field='2005-12-31')
307 | assert validate_data_spec(ok_data, DateStrRangeSpec)
308 |
309 | ok_data = dict(date_range_field='2010-12-31')
310 | assert validate_data_spec(ok_data, DateStrRangeSpec)
311 |
312 | nok_data = dict(date_range_field='1999-12-31')
313 | assert is_something_error(ValueError, validate_data_spec, nok_data, DateStrRangeSpec)
314 |
315 | def test_nested_spec(self):
316 | class LeafSpec:
317 | int_field = Checker([INT])
318 | str_field = Checker([STR])
319 | bool_field = Checker([BOOL])
320 |
321 | class MidLeafSpec:
322 | int_field = Checker([INT])
323 | str_field = Checker([STR])
324 | leaf_field = Checker([SPEC], SPEC=LeafSpec)
325 |
326 | class RootSpec:
327 | int_field = Checker([INT])
328 | mid_leaf_field = Checker([SPEC], SPEC=MidLeafSpec)
329 | bool_field = Checker([BOOL])
330 |
331 | ok_data = dict(
332 | int_field=1,
333 | mid_leaf_field=dict(
334 | int_field=2,
335 | str_field='2',
336 | leaf_field=dict(
337 | int_field=3,
338 | str_field='3',
339 | bool_field=True,
340 | ),
341 | ),
342 | bool_field=False,
343 | )
344 | assert validate_data_spec(ok_data, RootSpec)
345 |
346 | nok_data = dict(
347 | int_field=1,
348 | mid_leaf_field=dict(
349 | int_field=2,
350 | wrong_name_mid_field='2',
351 | leaf_field=dict(
352 | int_field=3,
353 | str_field='3',
354 | bool_field=True,
355 | ),
356 | ),
357 | bool_field=False,
358 | )
359 | assert is_something_error(LookupError, validate_data_spec, nok_data, RootSpec)
360 |
361 | nok_data = dict(
362 | int_field=1,
363 | mid_leaf_field=dict(
364 | int_field=2,
365 | str_field='2',
366 | leaf_field=dict(
367 | int_field=3,
368 | str_field='3',
369 | wrong_name_leaf_field=True,
370 | ),
371 | ),
372 | bool_field=False,
373 | )
374 | assert is_something_error(LookupError, validate_data_spec, nok_data, RootSpec)
375 |
376 | def test_list_of(self):
377 | class ChildSpec:
378 | int_field = Checker([INT])
379 | bool_field = Checker([BOOL])
380 |
381 | class ParentSpec:
382 | list_of_spec_field = Checker([LIST_OF], LIST_OF=SPEC, SPEC=ChildSpec)
383 |
384 | ok_data = dict(
385 | list_of_spec_field=[
386 | dict(int_field=1, bool_field=False),
387 | dict(int_field=2, bool_field=True),
388 | dict(int_field=3, bool_field=False),
389 | ]
390 | )
391 | assert validate_data_spec(ok_data, ParentSpec)
392 |
393 | nok_data = dict(
394 | list_of_spec_field=[
395 | dict(int_field=1, bool_field=False),
396 | 2,
397 | ]
398 | )
399 | assert is_something_error(TypeError, validate_data_spec, nok_data, ParentSpec)
400 |
401 | class ListOfIntSpec:
402 | list_of_int_field = Checker([LIST_OF], LIST_OF=INT)
403 |
404 | ok_data = dict(list_of_int_field=[1, 2, 3])
405 | assert validate_data_spec(ok_data, ListOfIntSpec)
406 |
407 | nok_with_non_list_data = dict(list_of_int_field={1: 1, 2: 2, 3: 3})
408 | assert is_something_error(TypeError, validate_data_spec, nok_with_non_list_data, ListOfIntSpec)
409 |
410 | nok_data = dict(list_of_int_field=[1, 2, '3'])
411 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListOfIntSpec)
412 |
413 | def test_foreach(self):
414 | class ChildSpec:
415 | int_field = Checker([INT])
416 | bool_field = Checker([BOOL])
417 |
418 | class ParentSpec:
419 | foreach_spec_field = Checker([FOREACH], FOREACH=SPEC, SPEC=ChildSpec)
420 |
421 | ok_data = dict(
422 | foreach_spec_field=(
423 | dict(int_field=1, bool_field=False),
424 | dict(int_field=2, bool_field=True),
425 | dict(int_field=3, bool_field=False),
426 | )
427 | )
428 | assert validate_data_spec(ok_data, ParentSpec)
429 |
430 | class ForeachIntSpec:
431 | foreach_spec_field = Checker([FOREACH], FOREACH=INT)
432 |
433 | ok_data = dict(foreach_spec_field=(1, 2, 3))
434 | assert validate_data_spec(ok_data, ForeachIntSpec)
435 | ok_data = dict(foreach_spec_field=[1, 2, 3])
436 | assert validate_data_spec(ok_data, ForeachIntSpec)
437 | ok_data = dict(foreach_spec_field={1, 2, 3})
438 | assert validate_data_spec(ok_data, ForeachIntSpec)
439 | ok_data = dict(foreach_spec_field={1: 1, 2: 2, 3: 3})
440 | assert validate_data_spec(ok_data, ForeachIntSpec)
441 |
442 | def test_one_of(self):
443 | class OneOfSpec:
444 | one_of_spec_field = Checker([ONE_OF], ONE_OF=[1, '2', [3, 4], {'5': 6}])
445 |
446 | ok_data = dict(one_of_spec_field=1)
447 | assert validate_data_spec(ok_data, OneOfSpec)
448 |
449 | ok_data = dict(one_of_spec_field='2')
450 | assert validate_data_spec(ok_data, OneOfSpec)
451 |
452 | ok_data = dict(one_of_spec_field=[3, 4])
453 | assert validate_data_spec(ok_data, OneOfSpec)
454 |
455 | ok_data = dict(one_of_spec_field={'5': 6})
456 | assert validate_data_spec(ok_data, OneOfSpec)
457 |
458 | nok_data = dict(one_of_spec_field=6)
459 | assert is_something_error(ValueError, validate_data_spec, nok_data, OneOfSpec)
460 |
461 | def test_json(self):
462 | class JsonSpec:
463 | json_spec_field = Checker([JSON])
464 |
465 | for value in chain.from_iterable(
466 | (
467 | ('-1', '0', '3.14', '2.718e-4'), # Numbers
468 | ('"Hello"', "\"World\""), # Strings
469 | ('false', 'true'), # Booleans
470 | ('[]', '[0, 1, 2]', '[1, "+", 1, "=", 2]'), # Arrays
471 | ('{}', '{"foo":"bar"}', '{"sheldon":["says","bazinga"]}'), # Objects
472 | ('null',), # null
473 | )
474 | ):
475 | ok_data = dict(json_spec_field=value)
476 | assert validate_data_spec(ok_data, JsonSpec), value
477 |
478 | for value in chain.from_iterable(
479 | (
480 | ('0123', '0xFFFF'), # Numbers
481 | ('Hello', "'World'"), # Strings
482 | ('False', 'TRUE'), # Booleans
483 | ('(1, 2, 3)',), # Arrays
484 | ('{foo:"bar"}', '{"foo":"bar",}'), # Objects
485 | ('none', 'None', 'undefined'), # null
486 | )
487 | ):
488 | nok_data = dict(json_spec_field=value)
489 | assert is_something_error(TypeError, validate_data_spec, nok_data, JsonSpec), value
490 |
491 | def test_json_bool(self):
492 | class JsonBoolSpec:
493 | json_bool_spec_field = Checker([JSON_BOOL])
494 |
495 | ok_data = dict(json_bool_spec_field='true')
496 | assert validate_data_spec(ok_data, JsonBoolSpec)
497 |
498 | ok_data = dict(json_bool_spec_field='false')
499 | assert validate_data_spec(ok_data, JsonBoolSpec)
500 |
501 | nok_data = dict(json_bool_spec_field=True)
502 | assert is_something_error(TypeError, validate_data_spec, nok_data, JsonBoolSpec)
503 |
504 | nok_data = dict(json_bool_spec_field='False')
505 | assert is_something_error(TypeError, validate_data_spec, nok_data, JsonBoolSpec)
506 |
507 | nok_data = dict(json_bool_spec_field='FALSE')
508 | assert is_something_error(TypeError, validate_data_spec, nok_data, JsonBoolSpec)
509 |
510 | def test_op_all(self):
511 | class AllSpec:
512 | all_field = Checker([LENGTH, STR, AMOUNT], LENGTH=dict(min=3, max=5))
513 |
514 | ok_data = dict(all_field='1.234')
515 | assert validate_data_spec(ok_data, AllSpec)
516 |
517 | ok_data = dict(all_field='12345')
518 | assert validate_data_spec(ok_data, AllSpec)
519 |
520 | nok_data = dict(all_field='123456')
521 | assert is_something_error(ValueError, validate_data_spec, nok_data, AllSpec)
522 |
523 | def test_op_any(self):
524 | class AnySpec:
525 | any_field = Checker([INT, STR], optional=True, op=CheckerOP.ANY)
526 |
527 | ok_data = dict(any_field=1)
528 | assert validate_data_spec(ok_data, AnySpec)
529 |
530 | ok_data = dict(any_field='1')
531 | assert validate_data_spec(ok_data, AnySpec)
532 |
533 | ok_data = dict(any_unexist_field=1)
534 | assert validate_data_spec(ok_data, AnySpec)
535 |
536 | nok_data = dict(any_field=True)
537 | assert is_something_error(TypeError, validate_data_spec, nok_data, AnySpec)
538 |
539 | def test_email(self):
540 | class EmailSpec:
541 | email_field = Checker([EMAIL])
542 |
543 | ok_data = dict(email_field='foo@bar.com')
544 | assert validate_data_spec(ok_data, EmailSpec)
545 |
546 | ok_data = dict(email_field='foo.bar@test.org')
547 | assert validate_data_spec(ok_data, EmailSpec)
548 |
549 | ok_data = dict(email_field='foo+bar@hc.co.uk')
550 | assert validate_data_spec(ok_data, EmailSpec)
551 |
552 | ok_data = dict(email_field='ABC@DEF.COM')
553 | assert validate_data_spec(ok_data, EmailSpec)
554 |
555 | ok_data = dict(email_field='_ab_C@example.com')
556 | assert validate_data_spec(ok_data, EmailSpec)
557 |
558 | ok_data = dict(email_field='-AB-c@example.com')
559 | assert validate_data_spec(ok_data, EmailSpec)
560 |
561 | ok_data = dict(email_field='3aBc@example.com')
562 | assert validate_data_spec(ok_data, EmailSpec)
563 |
564 | nok_data = dict(email_field="example.com")
565 | assert is_something_error(ValueError, validate_data_spec, nok_data, EmailSpec)
566 |
567 | nok_data = dict(email_field="john@doe.")
568 | assert is_something_error(ValueError, validate_data_spec, nok_data, EmailSpec)
569 |
570 | nok_data = dict(email_field="john@.doe")
571 | assert is_something_error(ValueError, validate_data_spec, nok_data, EmailSpec)
572 |
573 | nok_data = dict(email_field="say@hello.world!")
574 | assert is_something_error(ValueError, validate_data_spec, nok_data, EmailSpec)
575 |
576 | def test_regex_validator(self):
577 | # ^, $
578 | class SimpleRegexSpec1:
579 | re_field = Checker([REGEX], REGEX=dict(pattern=r'^The'))
580 |
581 | # Just test SINGLE ONE regex spec for convenience
582 | class NoExtraSimpleRegexSpec1:
583 | re_field = Checker([REGEX], REGEX=dict(pattern=r'^The'))
584 |
585 | class SimpleRegexSpec2:
586 | re_field = Checker([REGEX], REGEX=dict(pattern=r'of the world$'))
587 |
588 | class SimpleRegexSpec3:
589 | re_field = Checker([REGEX], REGEX=dict(pattern=r'^abc$'))
590 |
591 | class SimpleRegexSpec4:
592 | re_field = Checker([REGEX], REGEX=dict(pattern=r'notice'))
593 |
594 | ok_data = dict(re_field='The')
595 | assert validate_data_spec(ok_data, SimpleRegexSpec1)
596 | assert validate_data_spec(ok_data, NoExtraSimpleRegexSpec1)
597 | nok_data = dict(re_field='That cat is cute')
598 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec1)
599 | assert is_something_error(ValueError, validate_data_spec, nok_data, NoExtraSimpleRegexSpec1)
600 | nok_data = dict(re_field='I am the king of dogs')
601 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec1)
602 | assert is_something_error(ValueError, validate_data_spec, nok_data, NoExtraSimpleRegexSpec1)
603 |
604 | ok_data = dict(re_field='of the world')
605 | assert validate_data_spec(ok_data, SimpleRegexSpec2)
606 | nok_data = dict(re_field='I am the king of the world.')
607 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec2)
608 |
609 | ok_data = dict(re_field='abc')
610 | assert validate_data_spec(ok_data, SimpleRegexSpec3)
611 | nok_data = dict(re_field='adcd')
612 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec3)
613 | nok_data = dict(re_field='adc')
614 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec3)
615 |
616 | ok_data = dict(re_field='Did you notice that')
617 | assert validate_data_spec(ok_data, SimpleRegexSpec4)
618 | nok_data = dict(re_field='coffee, not iced please')
619 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec4)
620 |
621 | # ?, +, *,
622 | class SimpleRegexSpec5:
623 | re_field = Checker([REGEX], REGEX=dict(pattern=r'ab*'))
624 |
625 | class SimpleRegexSpec6:
626 | re_field = Checker([REGEX], REGEX=dict(pattern=r'ab+'))
627 |
628 | class SimpleRegexSpec7:
629 | re_field = Checker([REGEX], REGEX=dict(pattern=r'ab?'))
630 |
631 | class SimpleRegexSpec8:
632 | re_field = Checker([REGEX], REGEX=dict(pattern=r'a?b+$'))
633 |
634 | ok_data = dict(re_field='ac')
635 | assert validate_data_spec(ok_data, SimpleRegexSpec5)
636 | ok_data = dict(re_field='ab')
637 | assert validate_data_spec(ok_data, SimpleRegexSpec5)
638 | ok_data = dict(re_field='abbc')
639 | assert validate_data_spec(ok_data, SimpleRegexSpec5)
640 | nok_data = dict(re_field='b')
641 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec5)
642 |
643 | ok_data = dict(re_field='ab')
644 | assert validate_data_spec(ok_data, SimpleRegexSpec6)
645 | ok_data = dict(re_field='abbc')
646 | assert validate_data_spec(ok_data, SimpleRegexSpec6)
647 | nok_data = dict(re_field='ac')
648 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec6)
649 |
650 | ok_data = dict(re_field='ac')
651 | assert validate_data_spec(ok_data, SimpleRegexSpec7)
652 | ok_data = dict(re_field='ab')
653 | assert validate_data_spec(ok_data, SimpleRegexSpec7)
654 | ok_data = dict(re_field='abbc')
655 | assert validate_data_spec(ok_data, SimpleRegexSpec7)
656 | nok_data = dict(re_field='bc')
657 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec7)
658 |
659 | ok_data = dict(re_field='ab')
660 | assert validate_data_spec(ok_data, SimpleRegexSpec8)
661 | ok_data = dict(re_field='abb')
662 | assert validate_data_spec(ok_data, SimpleRegexSpec8)
663 | ok_data = dict(re_field='b')
664 | assert validate_data_spec(ok_data, SimpleRegexSpec8)
665 | ok_data = dict(re_field='bb')
666 | assert validate_data_spec(ok_data, SimpleRegexSpec8)
667 | nok_data = dict(re_field='aac')
668 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec8)
669 | nok_data = dict(re_field='ba')
670 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec8)
671 |
672 | # {}
673 | class SimpleRegexSpec9:
674 | re_field = Checker([REGEX], REGEX=dict(pattern=r'ab{2}'))
675 |
676 | class SimpleRegexSpec10:
677 | re_field = Checker([REGEX], REGEX=dict(pattern=r'ab{3,5}'))
678 |
679 | ok_data = dict(re_field='abb')
680 | assert validate_data_spec(ok_data, SimpleRegexSpec9)
681 | ok_data = dict(re_field='abcabbc')
682 | assert validate_data_spec(ok_data, SimpleRegexSpec9)
683 | nok_data = dict(re_field='ab')
684 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec9)
685 |
686 | ok_data = dict(re_field='abbb')
687 | assert validate_data_spec(ok_data, SimpleRegexSpec10)
688 | ok_data = dict(re_field='abbabbbb')
689 | assert validate_data_spec(ok_data, SimpleRegexSpec10)
690 | nok_data = dict(re_field='abbabb')
691 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec10)
692 |
693 | # |, ()
694 | class SimpleRegexSpec11:
695 | re_field = Checker([REGEX], REGEX=dict(pattern=r'hello|world'))
696 |
697 | class SimpleRegexSpec12:
698 | re_field = Checker([REGEX], REGEX=dict(pattern=r'(a|bc)de'))
699 |
700 | class SimpleRegexSpec13:
701 | re_field = Checker([REGEX], REGEX=dict(pattern=r'(a|b)*c'))
702 |
703 | ok_data = dict(re_field='hello, hi')
704 | assert validate_data_spec(ok_data, SimpleRegexSpec11)
705 | ok_data = dict(re_field='new world')
706 | assert validate_data_spec(ok_data, SimpleRegexSpec11)
707 | nok_data = dict(re_field='hell, word')
708 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec11)
709 |
710 | ok_data = dict(re_field='ade')
711 | assert validate_data_spec(ok_data, SimpleRegexSpec12)
712 | ok_data = dict(re_field='bcde')
713 | assert validate_data_spec(ok_data, SimpleRegexSpec12)
714 | nok_data = dict(re_field='adbce')
715 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec12)
716 |
717 | ok_data = dict(re_field='c')
718 | assert validate_data_spec(ok_data, SimpleRegexSpec13)
719 | ok_data = dict(re_field='acb')
720 | assert validate_data_spec(ok_data, SimpleRegexSpec13)
721 | ok_data = dict(re_field='ebcd')
722 | assert validate_data_spec(ok_data, SimpleRegexSpec13)
723 | nok_data = dict(re_field='ab')
724 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec13)
725 |
726 | # ., []
727 | class SimpleRegexSpec14:
728 | re_field = Checker([REGEX], REGEX=dict(pattern=r'a.[0-9]'))
729 |
730 | class SimpleRegexSpec15:
731 | re_field = Checker([REGEX], REGEX=dict(pattern=r'^.{3}$'))
732 |
733 | class SimpleRegexSpec16:
734 | re_field = Checker([REGEX], REGEX=dict(pattern=r'[a-c]'))
735 |
736 | class SimpleRegexSpec17:
737 | re_field = Checker([REGEX], REGEX=dict(pattern=r'[0-9]%'))
738 |
739 | class SimpleRegexSpec18:
740 | re_field = Checker([REGEX], REGEX=dict(pattern=r',[a-zA-Z0-9]$'))
741 |
742 | ok_data = dict(re_field='a33')
743 | assert validate_data_spec(ok_data, SimpleRegexSpec14)
744 | ok_data = dict(re_field='a.0')
745 | assert validate_data_spec(ok_data, SimpleRegexSpec14)
746 | ok_data = dict(re_field='a@9')
747 | assert validate_data_spec(ok_data, SimpleRegexSpec14)
748 | nok_data = dict(re_field='a8')
749 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec14)
750 | nok_data = dict(re_field='a.a')
751 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec14)
752 |
753 | ok_data = dict(re_field=',3c')
754 | assert validate_data_spec(ok_data, SimpleRegexSpec15)
755 | nok_data = dict(re_field='12')
756 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec15)
757 | nok_data = dict(re_field='abcd')
758 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec15)
759 |
760 | ok_data = dict(re_field='12a3c')
761 | assert validate_data_spec(ok_data, SimpleRegexSpec16)
762 | ok_data = dict(re_field='ab')
763 | assert validate_data_spec(ok_data, SimpleRegexSpec16)
764 | nok_data = dict(re_field='de')
765 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec16)
766 |
767 | ok_data = dict(re_field='18%')
768 | assert validate_data_spec(ok_data, SimpleRegexSpec17)
769 | nok_data = dict(re_field='a%')
770 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec17)
771 |
772 | ok_data = dict(re_field=',1')
773 | assert validate_data_spec(ok_data, SimpleRegexSpec18)
774 | ok_data = dict(re_field=',G')
775 | assert validate_data_spec(ok_data, SimpleRegexSpec18)
776 | nok_data = dict(re_field=',end')
777 | assert is_something_error(ValueError, validate_data_spec, nok_data, SimpleRegexSpec18)
778 |
779 | def test_regex_match_method_validator(self):
780 | class SearchRegexSpec:
781 | re_field = Checker([REGEX], REGEX=dict(pattern=r'watch out'))
782 |
783 | class MatchRegexSpec:
784 | re_field = Checker([REGEX], REGEX=dict(pattern=r'watch out', method='match'))
785 |
786 | class FullmatchRegexSpec:
787 | re_field = Checker([REGEX], REGEX=dict(pattern=r'watch out', method='fullmatch'))
788 |
789 | ok_data = dict(re_field='someone tell me to watch out.')
790 | assert validate_data_spec(ok_data, SearchRegexSpec)
791 | nok_data = dict(re_field='someone tell me')
792 | assert is_something_error(ValueError, validate_data_spec, nok_data, SearchRegexSpec)
793 |
794 | ok_data = dict(re_field='watch out, it is close!')
795 | assert validate_data_spec(ok_data, MatchRegexSpec)
796 | nok_data = dict(re_field='someone tell me to watch out.')
797 | assert is_something_error(ValueError, validate_data_spec, nok_data, MatchRegexSpec)
798 |
799 | ok_data = dict(re_field='watch out')
800 | assert validate_data_spec(ok_data, FullmatchRegexSpec)
801 | nok_data = dict(re_field='watch out, it is close!')
802 | assert is_something_error(ValueError, validate_data_spec, nok_data, FullmatchRegexSpec)
803 |
804 | def test_uuid(self):
805 | class UuidSpec:
806 | uuid_field = Checker([UUID])
807 |
808 | uuid_inst = uuid.UUID('00000000-0000-0000-0000-000000000000')
809 | ok_data = dict(uuid_field=uuid_inst)
810 | assert validate_data_spec(ok_data, UuidSpec)
811 |
812 | ok_data = dict(uuid_field='92d88ec0-a1f2-439a-b3c0-9e36db8b0b75')
813 | assert validate_data_spec(ok_data, UuidSpec)
814 |
815 | ok_data = dict(uuid_field='{4700bb68-09b5-4c4f-a403-773c12ee166e}')
816 | assert validate_data_spec(ok_data, UuidSpec)
817 |
818 | ok_data = dict(uuid_field='urn:uuid:a4be2b64-caf3-4a00-a924-7ea848471e6c')
819 | assert validate_data_spec(ok_data, UuidSpec)
820 |
821 | nok_data = dict(uuid_field='z78ff51b-a354-4819-b2dd-bfaede3a8be5')
822 | assert is_something_error(ValueError, validate_data_spec, nok_data, UuidSpec)
823 |
824 | def test_not_checker(self):
825 | class NonBoolSpec:
826 | key = Checker([not_(BOOL)])
827 |
828 | class ListOfNonBoolSpec:
829 | keys = Checker([LIST_OF], LIST_OF=not_(BOOL))
830 |
831 | ok_data = dict(key=1)
832 | assert validate_data_spec(ok_data, NonBoolSpec)
833 |
834 | ok_data = dict(key='1')
835 | assert validate_data_spec(ok_data, NonBoolSpec)
836 |
837 | nok_data = dict(key=True)
838 | assert is_something_error(TypeError, validate_data_spec, nok_data, NonBoolSpec)
839 |
840 | ok_data = dict(keys=['1', 2, date(2000, 1, 1)])
841 | assert validate_data_spec(ok_data, ListOfNonBoolSpec)
842 |
843 | nok_data = dict(keys=['1', True, date(2000, 1, 1)])
844 | assert is_something_error(TypeError, validate_data_spec, nok_data, ListOfNonBoolSpec)
845 |
846 | def test_strict_mode(self):
847 | @dsv_feature(strict=True)
848 | class _LeafStrictSpec:
849 | d = Checker([BOOL])
850 |
851 | class _LeafNonStrictSpec:
852 | e = Checker([BOOL])
853 |
854 | class _MiddleSpec:
855 | c = Checker([BOOL])
856 | leaf_strict = Checker([LIST_OF], LIST_OF=SPEC, SPEC=_LeafStrictSpec)
857 | leaf_non_strict = Checker([SPEC], SPEC=_LeafNonStrictSpec)
858 |
859 | @dsv_feature(strict=True)
860 | class _RootStrictSpec:
861 | a = Checker([BOOL])
862 | middle = Checker([SPEC], SPEC=_MiddleSpec)
863 |
864 | ok_data = dict(
865 | a=True,
866 | middle=dict(
867 | c=False,
868 | leaf_strict=[dict(d=True), dict(d=False)],
869 | leaf_non_strict=dict(e=True, f=False),
870 | g=True,
871 | ),
872 | )
873 | assert validate_data_spec(ok_data, _RootStrictSpec)
874 |
875 | nok_data_root = dict(
876 | a=True,
877 | middle=dict(
878 | c=False,
879 | leaf_strict=[dict(d=True), dict(d=False)],
880 | leaf_non_strict=dict(e=True, f=False),
881 | g=True,
882 | ),
883 | unexpected_field=False,
884 | )
885 | assert is_something_error(ValueError, validate_data_spec, nok_data_root, _RootStrictSpec)
886 |
887 | nok_data_leaf = dict(
888 | a=True,
889 | middle=dict(
890 | c=False,
891 | leaf_strict=[dict(d=True), dict(d=False, unexpected_field=False)],
892 | leaf_non_strict=dict(e=True, f=False),
893 | g=True,
894 | ),
895 | )
896 | assert is_something_error(ValueError, validate_data_spec, nok_data_leaf, _RootStrictSpec)
897 |
898 | def test_any_keys_set(self):
899 | @dsv_feature(any_keys_set={('a', 'b')})
900 | class _AnyKeysSetEmptyFieldsSpec:
901 | pass
902 |
903 | assert validate_data_spec(dict(a=1, b=1), _AnyKeysSetEmptyFieldsSpec)
904 | assert validate_data_spec(dict(a=1), _AnyKeysSetEmptyFieldsSpec)
905 | assert validate_data_spec(dict(b=1), _AnyKeysSetEmptyFieldsSpec)
906 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _AnyKeysSetEmptyFieldsSpec)
907 |
908 | @dsv_feature(any_keys_set={('a', 'b')})
909 | class _AnyKeysSetSpec:
910 | a = Checker([INT], optional=True)
911 | b = Checker([INT], optional=True)
912 |
913 | assert validate_data_spec(dict(a=1, b=1), _AnyKeysSetSpec)
914 | assert validate_data_spec(dict(a=1), _AnyKeysSetSpec)
915 | assert validate_data_spec(dict(b=1), _AnyKeysSetSpec)
916 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _AnyKeysSetSpec)
917 |
918 | @dsv_feature(any_keys_set={('a', 'b'), ('c', 'd')})
919 | class _AnyKeysSetSpec:
920 | a = Checker([INT], optional=True)
921 | b = Checker([INT], optional=True)
922 | c = Checker([INT], optional=True)
923 | d = Checker([INT], optional=True)
924 |
925 | assert validate_data_spec(dict(a=1, c=1, d=1), _AnyKeysSetSpec)
926 | assert validate_data_spec(dict(a=1, c=1), _AnyKeysSetSpec)
927 | assert validate_data_spec(dict(a=1, d=1), _AnyKeysSetSpec)
928 | assert validate_data_spec(dict(b=1, c=1, d=1), _AnyKeysSetSpec)
929 | assert validate_data_spec(dict(b=1, c=1), _AnyKeysSetSpec)
930 | assert validate_data_spec(dict(b=1, d=1), _AnyKeysSetSpec)
931 | assert validate_data_spec(dict(a=1, b=1, c=1), _AnyKeysSetSpec)
932 | assert validate_data_spec(dict(a=1, b=1, d=1), _AnyKeysSetSpec)
933 | assert validate_data_spec(dict(a=1, b=1, c=1, d=1), _AnyKeysSetSpec)
934 |
935 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _AnyKeysSetSpec)
936 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _AnyKeysSetSpec)
937 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _AnyKeysSetSpec)
938 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _AnyKeysSetSpec)
939 | assert is_something_error(LookupError, validate_data_spec, dict(e=1), _AnyKeysSetSpec)
940 |
941 | def test_err_mode(self):
942 | @dsv_feature(err_mode=ErrorMode.ALL)
943 | class _ErrModeAllSpec:
944 | a = Checker([INT])
945 | b = Checker([DIGIT_STR])
946 | c = Checker([LENGTH, STR, AMOUNT], LENGTH=dict(min=3, max=5))
947 |
948 | nok_data = dict(
949 | a=True,
950 | b='abc',
951 | c='22',
952 | )
953 |
954 | with self.assertRaises(DSVError) as context:
955 | validate_data_spec(nok_data, _ErrModeAllSpec)
956 | assert len(context.exception.args) == 3
957 |
958 | def _get_nested_err_mode_spec(mode):
959 | @dsv_feature()
960 | class LeafSpec:
961 | int_f = Checker([INT])
962 | str_f = Checker([STR])
963 | bool_f = Checker([BOOL])
964 |
965 | class MidLeafSpec:
966 | int_f = Checker([INT])
967 | str_f = Checker([STR])
968 | leaf_f = Checker([SPEC], SPEC=LeafSpec)
969 |
970 | @dsv_feature(err_mode=mode)
971 | class RootSpec:
972 | int_f = Checker([INT])
973 | ml_f = Checker([SPEC], SPEC=MidLeafSpec)
974 | bool_f = Checker([BOOL])
975 |
976 | return RootSpec
977 |
978 | nok_data2 = dict(
979 | int_f='a',
980 | ml_f=dict(
981 | int_f=3.3,
982 | str_f='ok',
983 | leaf_f=dict(
984 | int_f=1,
985 | str_f=True,
986 | bool_f='non-bool',
987 | ),
988 | ),
989 | bool_f='22',
990 | )
991 |
992 | with self.assertRaises(DSVError) as context:
993 | validate_data_spec(nok_data2, _get_nested_err_mode_spec(ErrorMode.ALL))
994 | assert len(context.exception.args) == 5
995 |
996 | with self.assertRaises(TypeError) as context:
997 | validate_data_spec(nok_data2, _get_nested_err_mode_spec(ErrorMode.MSE))
998 | assert len(context.exception.args) == 1
999 |
1000 | def test_conditional_existence(self):
1001 | """
1002 | The existence cases of a, b, c. 2 * 2 * 2 = 8 cases.
1003 | dict(a=1, b=1, c=1)
1004 | dict(a=1, b=1)
1005 | dict(a=1, c=1)
1006 | dict(b=1, c=1)
1007 | dict(a=1)
1008 | dict(b=1)
1009 | dict(c=1)
1010 | dict(d=1)
1011 | """
1012 | # ==========================
1013 |
1014 | class _CondExistAOBOCOSpec:
1015 | a = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1016 | b = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1017 | c = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1018 |
1019 | assert validate_data_spec(dict(a=1, b=1), _CondExistAOBOCOSpec)
1020 | assert validate_data_spec(dict(a=1), _CondExistAOBOCOSpec)
1021 | assert validate_data_spec(dict(c=1), _CondExistAOBOCOSpec)
1022 | assert validate_data_spec(dict(d=1), _CondExistAOBOCOSpec)
1023 |
1024 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistAOBOCOSpec)
1025 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistAOBOCOSpec)
1026 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistAOBOCOSpec)
1027 | assert is_something_error(KeyError, validate_data_spec, dict(b=1), _CondExistAOBOCOSpec)
1028 | # ==========================
1029 |
1030 | class _CondExistABOCOSpec:
1031 | a = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1032 | b = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1033 | c = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1034 |
1035 | assert validate_data_spec(dict(a=1, b=1), _CondExistABOCOSpec)
1036 | assert validate_data_spec(dict(a=1), _CondExistABOCOSpec)
1037 |
1038 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistABOCOSpec)
1039 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistABOCOSpec)
1040 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistABOCOSpec)
1041 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistABOCOSpec)
1042 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistABOCOSpec)
1043 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistABOCOSpec)
1044 | # ==========================
1045 |
1046 | class _CondExistAOBCOSpec:
1047 | a = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1048 | b = Checker([COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1049 | c = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1050 |
1051 | assert validate_data_spec(dict(a=1, b=1), _CondExistAOBCOSpec)
1052 |
1053 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistAOBCOSpec)
1054 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistAOBCOSpec)
1055 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistAOBCOSpec)
1056 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistAOBCOSpec)
1057 | assert is_something_error(KeyError, validate_data_spec, dict(b=1), _CondExistAOBCOSpec)
1058 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistAOBCOSpec)
1059 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistAOBCOSpec)
1060 | # ==========================
1061 |
1062 | class _CondExistAOBOCSpec:
1063 | a = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1064 | b = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1065 | c = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1066 |
1067 | assert validate_data_spec(dict(c=1), _CondExistAOBOCSpec)
1068 |
1069 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistAOBOCSpec)
1070 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistAOBOCSpec)
1071 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistAOBOCSpec)
1072 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistAOBOCSpec)
1073 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistAOBOCSpec)
1074 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistAOBOCSpec)
1075 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistAOBOCSpec)
1076 | # ==========================
1077 |
1078 | class _CondExistABCOSpec:
1079 | a = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1080 | b = Checker([COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1081 | c = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1082 |
1083 | assert validate_data_spec(dict(a=1, b=1), _CondExistABCOSpec)
1084 |
1085 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistABCOSpec)
1086 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistABCOSpec)
1087 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistABCOSpec)
1088 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistABCOSpec)
1089 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistABCOSpec)
1090 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistABCOSpec)
1091 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistABCOSpec)
1092 | # ==========================
1093 |
1094 | class _CondExistABOCSpec:
1095 | a = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1096 | b = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1097 | c = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1098 |
1099 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistABOCSpec)
1100 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistABOCSpec)
1101 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistABOCSpec)
1102 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistABOCSpec)
1103 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistABOCSpec)
1104 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistABOCSpec)
1105 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistABOCSpec)
1106 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistABOCSpec)
1107 | # ==========================
1108 |
1109 | class _CondExistAOBCSpec:
1110 | a = Checker([COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1111 | b = Checker([COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1112 | c = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1113 |
1114 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistAOBCSpec)
1115 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistAOBCSpec)
1116 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, c=1), _CondExistAOBCSpec)
1117 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistAOBCSpec)
1118 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistAOBCSpec)
1119 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistAOBCSpec)
1120 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistAOBCSpec)
1121 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistAOBCSpec)
1122 | # ==========================
1123 |
1124 | class _CondExistABCSpec:
1125 | a = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1126 | b = Checker([COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1127 | c = Checker([COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1128 |
1129 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistABCSpec)
1130 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistABCSpec)
1131 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, c=1), _CondExistABCSpec)
1132 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistABCSpec)
1133 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistABCSpec)
1134 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistABCSpec)
1135 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistABCSpec)
1136 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistABCSpec)
1137 |
1138 | def test_optional_conditional_existence_other_check_fail(self):
1139 | """
1140 | The existence cases of a, b, c. 2 * 2 * 2 = 8 cases.
1141 | dict(a=1, b=1, c=1)
1142 | dict(a=1, b=1)
1143 | dict(a=1, c=1)
1144 | dict(b=1, c=1)
1145 | dict(a=1)
1146 | dict(b=1)
1147 | dict(c=1)
1148 | dict(d=1)
1149 | """
1150 |
1151 | # ==========================
1152 | class _CondExistOtherFailAOBOCOSpec:
1153 | a = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1154 | b = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1155 | c = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1156 |
1157 | assert validate_data_spec(dict(d=1), _CondExistOtherFailAOBOCOSpec)
1158 | # Spec Wise
1159 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailAOBOCOSpec)
1160 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailAOBOCOSpec)
1161 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailAOBOCOSpec)
1162 | assert is_something_error(KeyError, validate_data_spec, dict(b=1), _CondExistOtherFailAOBOCOSpec)
1163 | # Field Wise
1164 | assert is_something_error(TypeError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailAOBOCOSpec)
1165 | assert is_something_error(TypeError, validate_data_spec, dict(a=1), _CondExistOtherFailAOBOCOSpec)
1166 | assert is_something_error(TypeError, validate_data_spec, dict(c=1), _CondExistOtherFailAOBOCOSpec)
1167 |
1168 | # ==========================
1169 | class _CondExistOtherFailABOCOSpec:
1170 | a = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1171 | b = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1172 | c = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1173 |
1174 | # SPec Wise
1175 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailABOCOSpec)
1176 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailABOCOSpec)
1177 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailABOCOSpec)
1178 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistOtherFailABOCOSpec)
1179 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistOtherFailABOCOSpec)
1180 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailABOCOSpec)
1181 | # Field Wise
1182 | assert is_something_error(TypeError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailABOCOSpec)
1183 | assert is_something_error(TypeError, validate_data_spec, dict(a=1), _CondExistOtherFailABOCOSpec)
1184 |
1185 | # ==========================
1186 | class _CondExistOtherFailAOBCOSpec:
1187 | a = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1188 | b = Checker([STR, COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1189 | c = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1190 |
1191 | # SPec Wise
1192 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailAOBCOSpec)
1193 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailAOBCOSpec)
1194 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailAOBCOSpec)
1195 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistOtherFailAOBCOSpec)
1196 | assert is_something_error(KeyError, validate_data_spec, dict(b=1), _CondExistOtherFailAOBCOSpec)
1197 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistOtherFailAOBCOSpec)
1198 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailAOBCOSpec)
1199 | # Field Wise
1200 | assert is_something_error(TypeError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailAOBCOSpec)
1201 |
1202 | # ==========================
1203 | class _CondExistOtherFailAOBOCSpec:
1204 | a = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1205 | b = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1206 | c = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1207 |
1208 | # Spec Wise
1209 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailAOBOCSpec)
1210 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailAOBOCSpec)
1211 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailAOBOCSpec)
1212 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailAOBOCSpec)
1213 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistOtherFailAOBOCSpec)
1214 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistOtherFailAOBOCSpec)
1215 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailAOBOCSpec)
1216 | # Field Wise
1217 | assert is_something_error(TypeError, validate_data_spec, dict(c=1), _CondExistOtherFailAOBOCSpec)
1218 | # ==========================
1219 |
1220 | class _CondExistOtherFailABCOSpec:
1221 | a = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1222 | b = Checker([STR, COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1223 | c = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['a']))
1224 |
1225 | # Spec Wise
1226 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailABCOSpec)
1227 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailABCOSpec)
1228 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailABCOSpec)
1229 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistOtherFailABCOSpec)
1230 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistOtherFailABCOSpec)
1231 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistOtherFailABCOSpec)
1232 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailABCOSpec)
1233 | # Field Wise
1234 | assert is_something_error(TypeError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailABCOSpec)
1235 |
1236 | # ==========================
1237 | class _CondExistOtherFailABOCSpec:
1238 | a = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['c']))
1239 | b = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1240 | c = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1241 |
1242 | # Spec Wise
1243 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailABOCSpec)
1244 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailABOCSpec)
1245 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailABOCSpec)
1246 | assert is_something_error(LookupError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailABOCSpec)
1247 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistOtherFailABOCSpec)
1248 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistOtherFailABOCSpec)
1249 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistOtherFailABOCSpec)
1250 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailABOCSpec)
1251 | # ==========================
1252 |
1253 | class _CondExistOtherFailAOBCSpec:
1254 | a = Checker([STR, COND_EXIST], optional=True, COND_EXIST=dict(WITHOUT=['c']))
1255 | b = Checker([STR, COND_EXIST], COND_EXIST=dict(WITH=['a'], WITHOUT=['c']))
1256 | c = Checker([STR, COND_EXIST], COND_EXIST=dict(WITHOUT=['a']))
1257 |
1258 | assert is_something_error(KeyError, validate_data_spec, dict(a=1, b=1, c=1), _CondExistOtherFailAOBCSpec)
1259 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, b=1), _CondExistOtherFailAOBCSpec)
1260 | assert is_something_error(LookupError, validate_data_spec, dict(a=1, c=1), _CondExistOtherFailAOBCSpec)
1261 | assert is_something_error(KeyError, validate_data_spec, dict(b=1, c=1), _CondExistOtherFailAOBCSpec)
1262 | assert is_something_error(LookupError, validate_data_spec, dict(a=1), _CondExistOtherFailAOBCSpec)
1263 | assert is_something_error(LookupError, validate_data_spec, dict(b=1), _CondExistOtherFailAOBCSpec)
1264 | assert is_something_error(LookupError, validate_data_spec, dict(c=1), _CondExistOtherFailAOBCSpec)
1265 | assert is_something_error(LookupError, validate_data_spec, dict(d=1), _CondExistOtherFailAOBCSpec)
1266 | # ==========================
1267 |
1268 | def test_alias(self):
1269 | class AliasSpec:
1270 | dot_field = Checker([int], alias='dot.field')
1271 | array_field = Checker([LIST_OF], LIST_OF=str, alias='array_field[]')
1272 |
1273 | ok_data = {
1274 | 'dot.field': 3,
1275 | 'array_field[]': ['a', 'b'],
1276 | }
1277 |
1278 | assert validate_data_spec(ok_data, AliasSpec)
1279 |
1280 | nok_data = {
1281 | 'dot_field': 3,
1282 | 'array_field': ['a', 'b'],
1283 | }
1284 | assert is_something_error(LookupError, validate_data_spec, nok_data, AliasSpec)
1285 |
1286 |
1287 | class TestCustomSpec(unittest.TestCase):
1288 | def test_incorrect_validator_class(self):
1289 | some_check = 'some_check'
1290 |
1291 | class InvalidClassValidator:
1292 | name = some_check
1293 |
1294 | @staticmethod
1295 | def validate(value, extra, data):
1296 | return True, ValueError(f'{value} is not expected')
1297 |
1298 | from data_spec_validator.spec import custom_spec
1299 |
1300 | assert is_something_error(TypeError, custom_spec.register, dict(some_check=InvalidClassValidator()))
1301 |
1302 | def test_validator_been_overwritten(self):
1303 | duplicate_check = 'd_check'
1304 |
1305 | class AValidator(BaseValidator):
1306 | name = duplicate_check
1307 |
1308 | @staticmethod
1309 | def validate(value, extra, data):
1310 | return False, ValueError('a value error')
1311 |
1312 | class BValidator(BaseValidator):
1313 | name = duplicate_check
1314 |
1315 | @staticmethod
1316 | def validate(value, extra, data):
1317 | return False, TypeError('a type error')
1318 |
1319 | from data_spec_validator.spec import custom_spec
1320 |
1321 | custom_spec.register(dict(duplicate_check=AValidator()))
1322 | is_type_error(custom_spec.register, dict(duplicate_check=BValidator()))
1323 |
1324 | def test_custom_validator(self):
1325 | gt_check = 'gt_check'
1326 |
1327 | class GreaterThanValidator(BaseValidator):
1328 | name = gt_check
1329 |
1330 | @staticmethod
1331 | def validate(value, extra, data):
1332 | criteria = extra.get(GreaterThanValidator.name)
1333 | return value > criteria, ValueError(f'{value} is not greater than {criteria}')
1334 |
1335 | from data_spec_validator.spec import custom_spec
1336 |
1337 | custom_spec.register(dict(gt_check=GreaterThanValidator()))
1338 |
1339 | class GreaterThanSpec:
1340 | key = Checker([gt_check], GT_CHECK=10)
1341 |
1342 | ok_data = dict(key=11)
1343 | assert validate_data_spec(ok_data, GreaterThanSpec)
1344 |
1345 | nok_data = dict(key=10)
1346 | assert is_something_error(ValueError, validate_data_spec, nok_data, GreaterThanSpec)
1347 |
1348 |
1349 | class TestCheckKeyword(unittest.TestCase):
1350 | def test_check_keyword_must_upper_case(self):
1351 | assert Checker([STR], WHAT_EVER=True, MUST_BE_UPPER={'1': 1, '2': 2}, CASE=[1, 2])
1352 |
1353 | with self.assertRaises(TypeError):
1354 | Checker([STR], WHAT_eVER=True)
1355 |
1356 | def test_blacklist_check_keyword(self):
1357 | with self.assertRaises(TypeError):
1358 | Checker([STR], ALLOW_NONE=True)
1359 |
1360 | with self.assertRaises(TypeError):
1361 | Checker([STR], OP='SOME_OP')
1362 |
1363 | with self.assertRaises(TypeError):
1364 | Checker([STR], OPTIONAL=True)
1365 |
1366 | with self.assertRaises(TypeError) as cm:
1367 | Checker([ONE_OF], op=CheckerOP.ANY, OP='SOME_OP', ONE_OF=[1, 2], ALLOW_NONE=True)
1368 | self.assertEqual('Forbidden keyword arguments: ALLOW_NONE, OP', str(cm.exception))
1369 |
1370 |
1371 | class TestMessageLevel(unittest.TestCase):
1372 | def test_vague_message(self):
1373 | def _get_int_spec():
1374 | class IntSpec:
1375 | int_field = Checker([INT])
1376 |
1377 | return IntSpec
1378 |
1379 | reset_msg_level(vague=True)
1380 | nok_data = dict(int_field='3')
1381 | try:
1382 | validate_data_spec(nok_data, _get_int_spec())
1383 | except Exception as e:
1384 | assert str(e).find('well-formatted') >= 0
1385 |
1386 | reset_msg_level()
1387 | try:
1388 | validate_data_spec(nok_data, _get_int_spec())
1389 | except Exception as e:
1390 | assert str(e).find('reason') >= 0
1391 |
1392 |
1393 | class TestMultipleRowSpec(unittest.TestCase):
1394 | def test_multirow_spec(self):
1395 | def _get_singlerow_spec():
1396 | class SingleRowSpec:
1397 | i_field = Checker([INT])
1398 | s_field = Checker([STR])
1399 |
1400 | return SingleRowSpec
1401 |
1402 | ok_data = [dict(i_field=1, s_field='1'), dict(i_field=2, s_field='2'), dict(i_field=3, s_field='3')]
1403 | assert validate_data_spec(ok_data, _get_singlerow_spec(), multirow=True)
1404 |
1405 | nok_data = [dict(i_field=1, s_field=1), dict(i_field=2, s_field='2')]
1406 | is_something_error(TypeError, validate_data_spec, nok_data, _get_singlerow_spec(), multirow=True)
1407 |
1408 | with self.assertRaises(ValueError) as ctx:
1409 | nok_data = dict(i_field=1, s_field='1')
1410 | validate_data_spec(nok_data, _get_singlerow_spec(), multirow=True)
1411 | assert 'SingleRowSpec' in str(ctx.exception)
1412 |
1413 |
1414 | if __name__ == '__main__':
1415 | unittest.main()
1416 |
--------------------------------------------------------------------------------