├── LICENSE ├── README.md ├── jsf ├── tests │ ├── __init__.py │ ├── data │ │ ├── null.json │ │ ├── boolean.json │ │ ├── empty.json │ │ ├── allof.json │ │ ├── string-enum.json │ │ ├── string.json │ │ ├── string-max-min-length.json │ │ ├── anyof.json │ │ ├── number-exclusive-float.json │ │ ├── number.json │ │ ├── enum.json │ │ ├── array-fixed-int.json │ │ ├── integer.json │ │ ├── number-exclusive.json │ │ ├── unique-items-array.json │ │ ├── oneof.json │ │ ├── array.json │ │ ├── const.json │ │ ├── object-with-optionals.json │ │ ├── oneof_allof.json │ │ ├── array-fixed-str.json │ │ ├── object-enum.json │ │ ├── object-pattern-properties.json │ │ ├── oneof_recursive.json │ │ ├── tuple.json │ │ ├── ordered-refs.json │ │ ├── unordered-refs.json │ │ ├── object-with-examples.json │ │ ├── recursion.json │ │ ├── external-ref-common.json │ │ ├── object.json │ │ ├── array-dicts.json │ │ ├── type-list.json │ │ ├── object_recursive.json │ │ ├── string-content-encoding.json │ │ ├── external-ref.json │ │ ├── type-list-null.json │ │ ├── string-content-type.json │ │ ├── anyof_object.json │ │ ├── oneof_object.json │ │ ├── allof-complex.json │ │ ├── inner-ref.json │ │ ├── complex_recursive.json │ │ ├── string-format.json │ │ ├── object_no_properties.json │ │ └── custom.json │ ├── conftest.py │ ├── BUILD │ ├── test_utils.py │ ├── test_cli.py │ ├── test_model_gen.py │ ├── test_nullable_types_gen.py │ ├── test_parser.py │ └── test_default_fake.py ├── schema_types │ ├── BUILD │ ├── string_utils │ │ ├── BUILD │ │ ├── content_type │ │ │ ├── BUILD │ │ │ ├── image__jpeg.py │ │ │ ├── image__webp.py │ │ │ ├── application__gzip.py │ │ │ ├── application__zip.py │ │ │ ├── text__plain.py │ │ │ ├── application__jwt.py │ │ │ └── __init__.py │ │ └── content_encoding.py │ ├── null.py │ ├── allof.py │ ├── boolean.py │ ├── anyof.py │ ├── oneof.py │ ├── __init__.py │ ├── enum.py │ ├── _tuple.py │ ├── number.py │ ├── array.py │ ├── object.py │ ├── base.py │ └── string.py ├── __init__.py ├── 3rdparty │ └── python │ │ ├── pytest-requirements.txt │ │ ├── mypy-requirements.txt │ │ ├── requirements.txt │ │ ├── BUILD │ │ └── pytest.lock ├── cli.py ├── LICENSE ├── BUILD ├── README.md └── parser.py ├── docs ├── help │ ├── get-help.md │ ├── index.md │ └── contributing.md ├── about │ ├── alternatives.md │ ├── about.md │ └── code-of-conduct.md ├── intro-to-json-schema.md ├── user-guide │ ├── advanced.md │ └── first-steps.md ├── BUILD ├── Dockerfile ├── assets │ ├── imgs │ │ ├── ui-1.png │ │ ├── ui-2.png │ │ ├── ui-3.png │ │ ├── ui-4.png │ │ └── index.png │ └── css │ │ └── styles.css ├── requirements.txt ├── features.md └── index.md ├── examples ├── fastapi │ ├── requirements.txt │ ├── BUILD │ ├── Dockerfile │ ├── api.py │ ├── docker-compose.yaml │ ├── README.md │ └── model.py └── flatfile │ ├── requirements.txt │ ├── output.xlsx │ ├── output.parquet │ ├── BUILD │ ├── output.csv │ ├── schema.json │ ├── output.jsonl │ ├── output.json │ ├── README.md │ └── main.py ├── .flake8 ├── .vscode └── settings.json ├── Makefile ├── Dockerfile ├── docker-compose.yaml ├── .github ├── dependabot.yml └── workflows │ ├── mkdoc-gh-pages.yaml │ ├── python-publish.yaml │ └── python-package.yaml ├── pyproject.toml ├── pants.toml ├── SECURITY.md ├── .gitignore └── mkdocs.yml /LICENSE: -------------------------------------------------------------------------------- 1 | jsf/LICENSE -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | jsf/README.md -------------------------------------------------------------------------------- /jsf/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/help/get-help.md: -------------------------------------------------------------------------------- 1 | ## Coming soon ... -------------------------------------------------------------------------------- /jsf/schema_types/BUILD: -------------------------------------------------------------------------------- 1 | python_sources() 2 | -------------------------------------------------------------------------------- /docs/about/alternatives.md: -------------------------------------------------------------------------------- 1 | ## Coming soon ... -------------------------------------------------------------------------------- /docs/intro-to-json-schema.md: -------------------------------------------------------------------------------- 1 | ## Coming soon ... -------------------------------------------------------------------------------- /docs/user-guide/advanced.md: -------------------------------------------------------------------------------- 1 | ## Coming soon ... -------------------------------------------------------------------------------- /docs/user-guide/first-steps.md: -------------------------------------------------------------------------------- 1 | ## Coming soon ... -------------------------------------------------------------------------------- /jsf/__init__.py: -------------------------------------------------------------------------------- 1 | from jsf.parser import JSF 2 | -------------------------------------------------------------------------------- /jsf/tests/data/null.json: -------------------------------------------------------------------------------- 1 | { "type": "null" } 2 | -------------------------------------------------------------------------------- /docs/BUILD: -------------------------------------------------------------------------------- 1 | python_requirements( 2 | name="reqs", 3 | ) 4 | -------------------------------------------------------------------------------- /examples/fastapi/requirements.txt: -------------------------------------------------------------------------------- 1 | fastapi 2 | uvicorn 3 | jsf -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/BUILD: -------------------------------------------------------------------------------- 1 | python_sources(name="src") -------------------------------------------------------------------------------- /docs/about/about.md: -------------------------------------------------------------------------------- 1 | About JSF, its design, inspiration and more. 🤓 -------------------------------------------------------------------------------- /jsf/tests/data/boolean.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "boolean" 3 | } 4 | -------------------------------------------------------------------------------- /jsf/tests/data/empty.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Any valid JSON" 3 | } -------------------------------------------------------------------------------- /docs/help/index.md: -------------------------------------------------------------------------------- 1 | Help and get help, contribute, get involved. 🤝 2 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/BUILD: -------------------------------------------------------------------------------- 1 | python_sources(name="src") -------------------------------------------------------------------------------- /examples/flatfile/requirements.txt: -------------------------------------------------------------------------------- 1 | jsf 2 | pandas 3 | openpyxl 4 | PyArrow 5 | jsonlines -------------------------------------------------------------------------------- /jsf/3rdparty/python/pytest-requirements.txt: -------------------------------------------------------------------------------- 1 | typer>=0.7.0 2 | pyjwt 3 | pytest-cov 4 | -------------------------------------------------------------------------------- /docs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 2 | ADD requirements.txt . 3 | RUN pip install -r requirements.txt -------------------------------------------------------------------------------- /docs/assets/imgs/ui-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/docs/assets/imgs/ui-1.png -------------------------------------------------------------------------------- /docs/assets/imgs/ui-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/docs/assets/imgs/ui-2.png -------------------------------------------------------------------------------- /docs/assets/imgs/ui-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/docs/assets/imgs/ui-3.png -------------------------------------------------------------------------------- /docs/assets/imgs/ui-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/docs/assets/imgs/ui-4.png -------------------------------------------------------------------------------- /docs/assets/imgs/index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/docs/assets/imgs/index.png -------------------------------------------------------------------------------- /examples/fastapi/BUILD: -------------------------------------------------------------------------------- 1 | python_sources() 2 | 3 | python_requirements( 4 | name="reqs", 5 | ) 6 | -------------------------------------------------------------------------------- /examples/flatfile/output.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/examples/flatfile/output.xlsx -------------------------------------------------------------------------------- /examples/flatfile/output.parquet: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghandic/jsf/HEAD/examples/flatfile/output.parquet -------------------------------------------------------------------------------- /jsf/3rdparty/python/mypy-requirements.txt: -------------------------------------------------------------------------------- 1 | mypy==1.1.1 2 | mypy-typing-asserts 3 | types-requests 4 | types-jsonschema -------------------------------------------------------------------------------- /jsf/tests/data/allof.json: -------------------------------------------------------------------------------- 1 | { 2 | "allOf": [ 3 | { "type": "string" }, 4 | { "maxLength": 5 } 5 | ] 6 | } -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | select = C,E,F,W,B,B9 3 | ignore = E203, E501, W503 4 | exclude = .git,__pycache__,tests,__init__.py -------------------------------------------------------------------------------- /jsf/tests/data/string-enum.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string", 3 | "enum": ["Street", "Avenue", "Boulevard"] 4 | } 5 | -------------------------------------------------------------------------------- /jsf/tests/data/string.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string", 3 | "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$" 4 | } 5 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.defaultInterpreterPath": "./dist/export/python/virtualenvs/python-default/3.8.18" 3 | } -------------------------------------------------------------------------------- /jsf/tests/data/string-max-min-length.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "string", 3 | "maxLength": 2, 4 | "minLength": 2 5 | } 6 | -------------------------------------------------------------------------------- /examples/fastapi/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8 2 | 3 | ADD requirements.txt . 4 | RUN pip install -r requirements.txt 5 | 6 | WORKDIR /opt/working -------------------------------------------------------------------------------- /examples/flatfile/BUILD: -------------------------------------------------------------------------------- 1 | python_sources(interpreter_constraints=["CPython>=3.10,<4"]) 2 | 3 | python_requirements( 4 | name="reqs", 5 | ) 6 | -------------------------------------------------------------------------------- /jsf/tests/data/anyof.json: -------------------------------------------------------------------------------- 1 | { 2 | "anyOf": [ 3 | { "type": "string", "maxLength": 5 }, 4 | { "type": "number", "minimum": 0 } 5 | ] 6 | } -------------------------------------------------------------------------------- /jsf/tests/data/number-exclusive-float.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "number", 3 | "exclusiveMinimum": 600, 4 | "exclusiveMaximum": 700 5 | } 6 | -------------------------------------------------------------------------------- /jsf/tests/data/number.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "number", 3 | "minimum": 600, 4 | "maximum": 700, 5 | "exclusiveMinimum": true 6 | } 7 | -------------------------------------------------------------------------------- /jsf/tests/data/enum.json: -------------------------------------------------------------------------------- 1 | { 2 | "enum": [ 3 | "red", 4 | "amber", 5 | "green", 6 | null, 7 | 42 8 | ] 9 | } -------------------------------------------------------------------------------- /jsf/tests/data/array-fixed-int.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "array", 3 | "$fixed": 5, 4 | "items": { 5 | "enum": ["red", "amber", "green"] 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /jsf/3rdparty/python/requirements.txt: -------------------------------------------------------------------------------- 1 | faker>=15.3.4 2 | jsonschema>=4.17.3 3 | pydantic >= 2.0.0 4 | rstr>=3.2.0 5 | smart-open[http]>=6.3.0 6 | typing-extensions>=4.9.0 7 | -------------------------------------------------------------------------------- /jsf/tests/data/integer.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "integer", 3 | "minimum": 600, 4 | "maximum": 700, 5 | "multipleOf": 7, 6 | "exclusiveMinimum": true 7 | } 8 | -------------------------------------------------------------------------------- /jsf/tests/data/number-exclusive.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "number", 3 | "minimum": 600, 4 | "maximum": 700, 5 | "exclusiveMinimum": false, 6 | "exclusiveMaximum": true 7 | } 8 | -------------------------------------------------------------------------------- /jsf/tests/data/unique-items-array.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "array", 3 | "items": { "type": "boolean" }, 4 | "uniqueItems": true, 5 | "minItems": 2, 6 | "maxItems": 2 7 | } 8 | -------------------------------------------------------------------------------- /jsf/tests/data/oneof.json: -------------------------------------------------------------------------------- 1 | { 2 | "oneOf": [ 3 | { 4 | "type": "string", 5 | "maxLength": 5 6 | }, 7 | { 8 | "type": "boolean" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /jsf/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest # pants: no-infer-dep 4 | 5 | 6 | @pytest.fixture() 7 | def TestData(): 8 | yield Path(__file__).parent.resolve() / "data" 9 | -------------------------------------------------------------------------------- /jsf/tests/data/array.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "array", 3 | "minItems": 1, 4 | "maxItems": 5, 5 | "uniqueItems": true, 6 | "items": { 7 | "enum": ["red", "amber", "green"] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /jsf/tests/data/const.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": ["country"], 4 | "properties": { 5 | "country": { 6 | "const": "United States of America" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /jsf/tests/data/object-with-optionals.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": ["name"], 4 | "properties": { 5 | "name": { "type": "string" }, 6 | "credit_card": { "type": "number" } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /examples/flatfile/output.csv: -------------------------------------------------------------------------------- 1 | name,email 2 | Jessica Bennett,eric90@example.org 3 | Christine Sanchez,epeterson@example.net 4 | Emily Ayala,wreed@example.org 5 | Julia Dickerson,dwaynehoward@example.com 6 | Justin Miller,emiller@example.com 7 | -------------------------------------------------------------------------------- /jsf/tests/data/oneof_allof.json: -------------------------------------------------------------------------------- 1 | { 2 | "oneOf": [ 3 | { 4 | "allOf": [ 5 | { "type": "string" }, 6 | { "maxLength": 5 } 7 | ] 8 | }, 9 | { 10 | "type": "boolean" 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: build 2 | 3 | test: 4 | @pants test :: 5 | 6 | lint: 7 | @pants lint :: 8 | 9 | fmt: 10 | @pants fmt :: 11 | 12 | build: 13 | @pants package :: 14 | 15 | check: 16 | @pants check :: 17 | 18 | clean: 19 | @rm -rf dist/ .pids/ .pants.d/ -------------------------------------------------------------------------------- /jsf/tests/data/array-fixed-str.json: -------------------------------------------------------------------------------- 1 | { 2 | "$state": { 3 | "quantity": "lambda: 50" 4 | }, 5 | "type": "array", 6 | "$fixed": "lambda: state['#']['quantity']", 7 | "items": { 8 | "enum": ["red", "amber", "green"] 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8 AS builder 2 | 3 | WORKDIR /opt/working 4 | COPY . . 5 | RUN make build 6 | 7 | FROM python:3.8.16-alpine3.17 8 | WORKDIR /root/ 9 | COPY --from=builder /opt/working/dist/jsf-*.tar.gz ./ 10 | RUN pip install /root/jsf-*.tar.gz 11 | CMD ["jsf"] -------------------------------------------------------------------------------- /jsf/tests/data/object-enum.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "enum": [ 4 | { 5 | "code": "1", 6 | "value": "CHILD" 7 | }, 8 | { 9 | "code": "2", 10 | "value": "ADULT" 11 | } 12 | ] 13 | } -------------------------------------------------------------------------------- /jsf/tests/BUILD: -------------------------------------------------------------------------------- 1 | python_test_utils( 2 | name="test_utils", 3 | ) 4 | 5 | files(name="tests", sources=["data/*.json", "*.py"]) 6 | 7 | python_tests( 8 | name="pytest", 9 | dependencies=[":tests"], 10 | interpreter_constraints=parametrize(py3=[">=3.8,<4"]), 11 | ) 12 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocstrings[python]==0.23.0 2 | mkdocs-material==9.4.7 3 | # For Material for MkDocs, Chinese search 4 | jieba==0.42.1 5 | # For image processing by Material for MkDocs 6 | pillow==10.2.0 7 | # For image processing by Material for MkDocs 8 | cairosvg==2.7.0 9 | termynal==0.11.1 -------------------------------------------------------------------------------- /examples/fastapi/api.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | 3 | from jsf import JSF 4 | 5 | app = FastAPI(docs_url="/") 6 | generator = JSF.from_json("custom.json") 7 | 8 | 9 | @app.get("/generate", response_model=generator.pydantic()) 10 | def read_root(): 11 | return generator.generate() 12 | -------------------------------------------------------------------------------- /examples/flatfile/schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "name": {"type": "string", "$provider": "faker.name", "title": "Full Name"}, 5 | "email": {"type": "string", "$provider": "faker.email", "title": "Email"} 6 | }, 7 | "required": ["name", "email"] 8 | } -------------------------------------------------------------------------------- /jsf/3rdparty/python/BUILD: -------------------------------------------------------------------------------- 1 | python_requirements(name="reqs") 2 | 3 | python_requirements( 4 | name="pytest", 5 | source="pytest-requirements.txt", 6 | resolve="pytest", 7 | ) 8 | 9 | python_requirements( 10 | name="mypy", 11 | source="mypy-requirements.txt", 12 | resolve="mypy", 13 | ) -------------------------------------------------------------------------------- /jsf/tests/data/object-pattern-properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": ["name", "credit_card", "test"], 4 | "properties": { 5 | "name": { "type": "string" } 6 | }, 7 | "patternProperties": { 8 | "^S_": { "type": "string" }, 9 | "^I_": { "type": "integer" } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /examples/flatfile/output.jsonl: -------------------------------------------------------------------------------- 1 | {"name": "Judith Lara", "email": "rhowe@example.com"} 2 | {"name": "Joseph Warren", "email": "carrollandrew@example.net"} 3 | {"name": "Marilyn Thompson", "email": "tyler22@example.net"} 4 | {"name": "Teresa Brown", "email": "tharris@example.net"} 5 | {"name": "Derek Singleton", "email": "lindsay66@example.org"} 6 | -------------------------------------------------------------------------------- /examples/flatfile/output.json: -------------------------------------------------------------------------------- 1 | [{"name": "Melissa Jackson", "email": "reneebullock@example.net"}, {"name": "Miss Tina Morales", "email": "alexandergeorge@example.net"}, {"name": "Jake Vazquez", "email": "jeffreyreeves@example.com"}, {"name": "Terri Taylor", "email": "nperez@example.net"}, {"name": "Kayla Williams", "email": "davidprice@example.org"}] -------------------------------------------------------------------------------- /examples/flatfile/README.md: -------------------------------------------------------------------------------- 1 | # Flat file example 2 | 3 | ## Dependencies 4 | 5 | * Typer 6 | * pandas 7 | * openpyxl 8 | * PyArrow 9 | * jsonlines 10 | 11 | ## Usage 12 | 13 | Using `main.py` in current example folder 14 | 15 | ```bash 16 | python main.py --schema schema.json --records 5 --output-format parquet --output output.parquet 17 | ``` 18 | -------------------------------------------------------------------------------- /examples/fastapi/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | services: 3 | fastapi-jsf: 4 | build: . 5 | image: challisa/fastapi-jsf 6 | container_name: fastapi-jsf 7 | ports: 8 | - "8080:8080" 9 | volumes: 10 | - .:/opt/working 11 | entrypoint: uvicorn api:app --reload --host 0.0.0.0 --port 8080 12 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | services: 3 | mkdocs-jsf: 4 | build: ./docs 5 | image: challisa/mkdocs-jsf 6 | container_name: mkdocs-jsf 7 | ports: 8 | - "8000:8000" 9 | volumes: 10 | - ./docs:/docs 11 | - ./mkdocs.yml:/mkdocs.yml 12 | entrypoint: mkdocs serve --dev-addr 0.0.0.0:8000 13 | -------------------------------------------------------------------------------- /examples/fastapi/README.md: -------------------------------------------------------------------------------- 1 | # FastAPI Example 2 | 3 | ## Dependancies 4 | 5 | * FastAPI 6 | * uvicorn 7 | * datamodel-code-generator 8 | 9 | ## Usage 10 | 11 | Using `api.py` and `custom.json` in current example folder 12 | 13 | ```bash 14 | datamodel-codegen --input jsf/tests/data/custom.json --output model.py 15 | uvicorn api:app --reload --host 0.0.0.0 --port 8080 16 | ``` 17 | -------------------------------------------------------------------------------- /jsf/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest # pants: no-infer-dep 2 | from jsf.schema_types.string import random_fixed_length_sentence 3 | 4 | 5 | @pytest.mark.parametrize( 6 | "_min, _max", 7 | [(0, 1), (0, 0), (0, 10), (10, 20), (10, 2000)], 8 | ) 9 | def test_random_fixed_length_sentence(_min, _max): 10 | gen = random_fixed_length_sentence(_min, _max) 11 | assert len(gen) <= _max 12 | assert len(gen) >= _min 13 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/image__jpeg.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | import requests # pants: no-infer-dep 4 | 5 | from jsf.schema_types.string_utils.content_encoding import bytes_str_repr 6 | 7 | 8 | def random_jpg(*args, **kwargs) -> str: 9 | return bytes_str_repr( 10 | requests.get( 11 | f"https://picsum.photos/{random.randint(1,50)*10}/{random.randint(1,50)*10}.jpg" 12 | ).content 13 | ) 14 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/image__webp.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | import requests # pants: no-infer-dep 4 | 5 | from jsf.schema_types.string_utils.content_encoding import bytes_str_repr 6 | 7 | 8 | def random_webp(*args, **kwargs) -> str: 9 | return bytes_str_repr( 10 | requests.get( 11 | f"https://picsum.photos/{random.randint(1,50)*10}/{random.randint(1,50)*10}.webp" 12 | ).content 13 | ) 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | commit-message: 9 | prefix: ⬆ 10 | # Python 11 | - package-ecosystem: "pip" 12 | directory: "/jsf/" 13 | schedule: 14 | interval: "daily" 15 | commit-message: 16 | prefix: ⬆ 17 | ignore: 18 | - dependency-name: "mkdocs" 19 | -------------------------------------------------------------------------------- /jsf/tests/data/oneof_recursive.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/tree", 3 | "definitions": { 4 | "tree": { 5 | "type": "array", 6 | "items": { 7 | "oneOf": [ 8 | { 9 | "$ref": "#/definitions/tree" 10 | }, 11 | { 12 | "type": "integer" 13 | } 14 | ] 15 | } 16 | } 17 | } 18 | } -------------------------------------------------------------------------------- /jsf/tests/data/tuple.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "array", 3 | "items": [ 4 | { 5 | "type": "number" 6 | }, 7 | { 8 | "type": "string" 9 | }, 10 | { 11 | "type": "string", 12 | "enum": ["Street", "Avenue", "Boulevard"] 13 | }, 14 | { 15 | "type": "string", 16 | "enum": ["NW", "NE", "SW", "SE"] 17 | } 18 | ], 19 | "additionalItems": { "type": "string" } 20 | } 21 | -------------------------------------------------------------------------------- /jsf/tests/data/ordered-refs.json: -------------------------------------------------------------------------------- 1 | { 2 | "$defs": { 3 | "SomeEnum": {"enum": ["A", "B"], "title": "SomeEnum", "type": "string"}, 4 | "Foo": { 5 | "properties": {"bar": {"$ref": "#/$defs/SomeEnum"}}, 6 | "required": ["bar"], 7 | "title": "Foo", 8 | "type": "object" 9 | } 10 | }, 11 | "properties": {"foobar": {"$ref": "#/$defs/Foo"}}, 12 | "required": ["foobar"], 13 | "title": "FooBarObject", 14 | "type": "object" 15 | } -------------------------------------------------------------------------------- /jsf/tests/data/unordered-refs.json: -------------------------------------------------------------------------------- 1 | { 2 | "$defs": { 3 | "Foo": { 4 | "properties": {"bar": {"$ref": "#/$defs/SomeEnum"}}, 5 | "required": ["bar"], 6 | "title": "Foo", 7 | "type": "object" 8 | }, 9 | "SomeEnum": {"enum": ["A", "B"], "title": "SomeEnum", "type": "string"} 10 | }, 11 | "properties": {"foobar": {"$ref": "#/$defs/Foo"}}, 12 | "required": ["foobar"], 13 | "title": "FooBarObject", 14 | "type": "object" 15 | } -------------------------------------------------------------------------------- /jsf/tests/data/object-with-examples.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Pet", 3 | "type": "object", 4 | "properties": { 5 | "name": { 6 | "type": "string", 7 | "examples": ["Chop", "Luna", "Thanos"] 8 | }, 9 | "species": { 10 | "type": "string", 11 | "examples": ["Dog", "Cat", "Rabbit"] 12 | }, 13 | "breed": { 14 | "type": "string", 15 | "default": "Mixed Breed", 16 | "examples": ["Labrador Retriever", "Siamese", "Golden Retriever"] 17 | } 18 | }, 19 | "required": ["name", "species"] 20 | } 21 | -------------------------------------------------------------------------------- /jsf/schema_types/null.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Tuple, Type 2 | 3 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 4 | 5 | 6 | class Null(BaseSchema): 7 | def generate(self, context: Dict[str, Any]) -> None: 8 | try: 9 | return super().generate(context) 10 | except ProviderNotSetException: 11 | return None 12 | 13 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 14 | return self.to_pydantic(context, type(None)) 15 | 16 | @classmethod 17 | def from_dict(cls, d: Dict[str, Any]) -> "Null": 18 | return Null(**d) 19 | -------------------------------------------------------------------------------- /jsf/tests/data/recursion.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | 4 | "definitions": { 5 | "person": { 6 | "type": "object", 7 | "properties": { 8 | "name": { "type": "string" }, 9 | "children": { 10 | "type": "array", 11 | "items": { "$ref": "#/definitions/person" }, 12 | "default": [] 13 | } 14 | } 15 | } 16 | }, 17 | 18 | "type": "object", 19 | 20 | "properties": { 21 | "person": { "$ref": "#/definitions/person" } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /.github/workflows/mkdoc-gh-pages.yaml: -------------------------------------------------------------------------------- 1 | name: Publish docs via GitHub Pages 2 | on: 3 | release: 4 | types: [created] 5 | workflow_dispatch: 6 | 7 | jobs: 8 | build: 9 | name: Deploy docs 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Set up Python 3.11 14 | uses: actions/setup-python@v5 15 | with: 16 | python-version: 3.11 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install -r docs/requirements.txt 21 | - name: Deploy docs 22 | run: | 23 | mkdocs gh-deploy --force -------------------------------------------------------------------------------- /jsf/schema_types/allof.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 4 | 5 | 6 | class AllOf(BaseSchema): 7 | combined_schema: BaseSchema = None 8 | 9 | @classmethod 10 | def from_dict(cls, d: Dict[str, Any]) -> "AllOf": 11 | return AllOf(**d) 12 | 13 | def generate(self, context: Dict[str, Any]) -> Optional[Any]: 14 | try: 15 | return super().generate(context) 16 | except ProviderNotSetException: 17 | return self.combined_schema.generate(context) 18 | 19 | def model(self, context: Dict[str, Any]) -> None: 20 | pass 21 | -------------------------------------------------------------------------------- /jsf/tests/data/external-ref-common.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "type": "object", 4 | "additionalProperties": false, 5 | "definitions": { 6 | "ExternalType": { 7 | "type": "object", 8 | "additionalProperties": false, 9 | "properties": { 10 | "src": { 11 | "type": "array", 12 | "items": { 13 | "type": "string", 14 | "minLength": 1 15 | } 16 | } 17 | }, 18 | "required": ["src"] 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /jsf/schema_types/boolean.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Any, Dict, Optional, Tuple, Type 3 | 4 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 5 | 6 | 7 | class Boolean(BaseSchema): 8 | def generate(self, context: Dict[str, Any]) -> Optional[bool]: 9 | try: 10 | return super().generate(context) 11 | except ProviderNotSetException: 12 | return random.choice([True, False]) 13 | 14 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 15 | return self.to_pydantic(context, bool) 16 | 17 | @classmethod 18 | def from_dict(cls, d: Dict[str, Any]) -> "Boolean": 19 | return Boolean(**d) 20 | -------------------------------------------------------------------------------- /jsf/tests/data/object.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": ["name", "credit_card", "test"], 4 | "properties": { 5 | "name": { "type": "string" }, 6 | "credit_card": { "type": "number" }, 7 | "test": { 8 | "type": "integer", 9 | "maximum": 700, 10 | "multipleOf": 7, 11 | "exclusiveMinimum": 600 12 | }, 13 | "non_required": { "type": "integer" } 14 | }, 15 | 16 | "dependencies": { 17 | "credit_card": { 18 | "properties": { 19 | "billing_address": { "type": "string" } 20 | }, 21 | "required": ["billing_address"] 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/application__gzip.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import io 3 | 4 | from jsf.schema_types.string_utils.content_encoding import bytes_str_repr 5 | from jsf.schema_types.string_utils.content_type.application__zip import create_random_file_name 6 | from jsf.schema_types.string_utils.content_type.text__plain import random_fixed_length_sentence 7 | 8 | 9 | def create_random_gzip(*args, **kwargs) -> str: 10 | fgz = io.BytesIO() 11 | gzip_obj = gzip.GzipFile(filename=create_random_file_name(), mode="wb", fileobj=fgz) 12 | gzip_obj.write(random_fixed_length_sentence().encode("utf-8")) 13 | gzip_obj.close() 14 | 15 | fgz.seek(0) 16 | return bytes_str_repr(fgz.getvalue()) 17 | -------------------------------------------------------------------------------- /jsf/cli.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import typer # pants: no-infer-dep 4 | 5 | from jsf.parser import JSF 6 | 7 | app = typer.Typer() 8 | 9 | 10 | @app.command() 11 | def main( 12 | schema: Path = typer.Option( 13 | ..., 14 | exists=True, 15 | file_okay=True, 16 | dir_okay=False, 17 | writable=False, 18 | readable=True, 19 | resolve_path=True, 20 | ), 21 | instance: Path = typer.Option( 22 | ..., 23 | exists=False, 24 | file_okay=True, 25 | dir_okay=False, 26 | writable=True, 27 | readable=False, 28 | resolve_path=True, 29 | ), 30 | ): 31 | JSF.from_json(schema).to_json(instance) 32 | -------------------------------------------------------------------------------- /jsf/tests/data/array-dicts.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "Basket": { 5 | "type": "array", 6 | "items": { "$ref": "#/definitions/basket_of_items" }, 7 | "minItems": 2, 8 | "maxItems": 2, 9 | "uniqueItems": true 10 | } 11 | }, 12 | "definitions": { 13 | "basket_of_items": { 14 | "type": "object", 15 | "properties": { 16 | "Item Name": { 17 | "type": "string", 18 | "enum": ["A", "B", "C", "D", "E"] 19 | }, 20 | "Amount": { 21 | "type": "integer", 22 | "minimum": 0, 23 | "exclusiveMaximum": 5 24 | } 25 | }, 26 | "required": [ 27 | "Item Name", 28 | "Amount" 29 | ] 30 | } 31 | }, 32 | "required": [ 33 | "Basket" 34 | ] 35 | } 36 | -------------------------------------------------------------------------------- /jsf/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | from jsf.cli import app 5 | from jsonschema import validate 6 | from typer.testing import CliRunner # pants: no-infer-dep 7 | 8 | runner = CliRunner() 9 | 10 | 11 | def test_app(TestData): 12 | file = Path("tmp.json") 13 | try: 14 | result = runner.invoke( 15 | app, ["--schema", TestData / "custom.json", "--instance", "tmp.json"] 16 | ) 17 | assert result.exit_code == 0 18 | assert file.exists() 19 | with open(file) as f: 20 | instance = json.load(f) 21 | with open(TestData / "custom.json") as f: 22 | schema = json.load(f) 23 | validate(instance, schema) 24 | finally: 25 | file.unlink() 26 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.isort] 2 | profile = "black" 3 | line_length = 100 4 | 5 | [tool.black] 6 | line-length = 100 7 | 8 | [pyupgrade] 9 | args = ["--keep-runtime-typing", "--py38-plus"] 10 | 11 | [tool.mypy] 12 | namespace_packages = true 13 | explicit_package_bases = true 14 | plugins = "mypy_typing_asserts.mypy_plugin" 15 | 16 | no_implicit_optional = true 17 | implicit_reexport = false 18 | strict_equality = true 19 | 20 | warn_unused_configs = true 21 | warn_unused_ignores = true 22 | warn_return_any = true 23 | warn_redundant_casts = true 24 | warn_unreachable = true 25 | 26 | pretty = true 27 | show_column_numbers = true 28 | show_error_context = true 29 | show_error_codes = true 30 | show_traceback = true 31 | 32 | [[tool.mypy.overrides]] 33 | module = "jsf" 34 | ignore_missing_imports = true 35 | -------------------------------------------------------------------------------- /jsf/tests/data/type-list.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "randTypeValueNullable": { 5 | "type": [ 6 | "null", 7 | "boolean" 8 | ] 9 | }, 10 | "randTypeValue": { 11 | "type": [ 12 | "boolean", 13 | "number", 14 | "integer", 15 | "string" 16 | ] 17 | }, 18 | "int": { 19 | "type": [ 20 | "integer" 21 | ] 22 | }, 23 | "null": { 24 | "type": [ 25 | "null" 26 | ] 27 | } 28 | }, 29 | "additionalProperties": false, 30 | "required": [ 31 | "randTypeValueNullable", 32 | "randTypeValue", 33 | "int", 34 | "null" 35 | ] 36 | } -------------------------------------------------------------------------------- /jsf/tests/data/object_recursive.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "id": { 5 | "type": "string" 6 | }, 7 | "tree": { 8 | "$ref": "#/definitions/tree" 9 | } 10 | }, 11 | "required": [ 12 | "id" 13 | ], 14 | "definitions": { 15 | "tree": { 16 | "type": "object", 17 | "properties": { 18 | "value": { 19 | "type": "string" 20 | }, 21 | "branches": { 22 | "type": "array", 23 | "items": { 24 | "$ref": "#/definitions/tree" 25 | }, 26 | "minItems": 1 27 | } 28 | }, 29 | "required": [ 30 | "value" 31 | ] 32 | } 33 | } 34 | } -------------------------------------------------------------------------------- /jsf/tests/data/string-content-encoding.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "7-bit": { 5 | "type": "string", 6 | "contentEncoding": "7-bit" 7 | }, 8 | "8-bit": { 9 | "type": "string", 10 | "contentEncoding": "8-bit" 11 | }, 12 | "binary": { 13 | "type": "string", 14 | "contentEncoding": "binary" 15 | }, 16 | "quoted-printable": { 17 | "type": "string", 18 | "contentEncoding": "quoted-printable" 19 | }, 20 | "base-16": { 21 | "type": "string", 22 | "contentEncoding": "base-16" 23 | }, 24 | "base-32": { 25 | "type": "string", 26 | "contentEncoding": "base-32" 27 | }, 28 | "base-64": { 29 | "type": "string", 30 | "contentEncoding": "base-64" 31 | } 32 | }, 33 | "required": ["7-bit", "8-bit", "binary", "quoted-printable", "base-16", "base-32", "base-64"] 34 | } 35 | -------------------------------------------------------------------------------- /jsf/schema_types/anyof.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Any, Dict, List, Optional 3 | 4 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 5 | 6 | 7 | class AnyOf(BaseSchema): 8 | schemas: List[BaseSchema] = None 9 | 10 | @classmethod 11 | def from_dict(cls, d: Dict[str, Any]) -> "AnyOf": 12 | return AnyOf(**d) 13 | 14 | def generate(self, context: Dict[str, Any]) -> Optional[Any]: 15 | try: 16 | return super().generate(context) 17 | except ProviderNotSetException: 18 | filtered_schemas = [] 19 | if context["state"]["__depth__"] > self.max_recursive_depth: 20 | filtered_schemas = [schema for schema in self.schemas if not schema.is_recursive] 21 | return random.choice(filtered_schemas or self.schemas).generate(context) 22 | 23 | def model(self, context: Dict[str, Any]) -> None: 24 | pass 25 | -------------------------------------------------------------------------------- /jsf/schema_types/oneof.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Any, Dict, List, Optional 3 | 4 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 5 | 6 | 7 | class OneOf(BaseSchema): 8 | schemas: List[BaseSchema] = None 9 | 10 | @classmethod 11 | def from_dict(cls, d: Dict[str, Any]) -> "OneOf": 12 | return OneOf(**d) 13 | 14 | def generate(self, context: Dict[str, Any]) -> Optional[List[Any]]: 15 | try: 16 | return super().generate(context) 17 | except ProviderNotSetException: 18 | filtered_schemas = [] 19 | if context["state"]["__depth__"] > self.max_recursive_depth: 20 | filtered_schemas = [schema for schema in self.schemas if not schema.is_recursive] 21 | return random.choice(filtered_schemas or self.schemas).generate(context) 22 | 23 | def model(self, context: Dict[str, Any]) -> None: 24 | pass 25 | -------------------------------------------------------------------------------- /jsf/tests/data/external-ref.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-04/schema#", 3 | "type": "object", 4 | "additionalProperties": false, 5 | "required": ["ReferenceToLocalSchema", "ReferenceToExternalSchema"], 6 | "properties": { 7 | "ReferenceToLocalSchema": { 8 | "$ref": "#/definitions/LocalType" 9 | }, 10 | "ReferenceToExternalSchema": { 11 | "$ref": "https://raw.githubusercontent.com/ghandic/jsf/main/jsf/tests/data/external-ref-common.json#/definitions/ExternalType" 12 | } 13 | }, 14 | "definitions": { 15 | "LocalType": { 16 | "type": "object", 17 | "additionalProperties": false, 18 | "properties": { 19 | "no-write": { 20 | "type": "boolean", 21 | "default": false 22 | } 23 | }, 24 | "required": ["no-write"] 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /jsf/tests/data/type-list-null.json: -------------------------------------------------------------------------------- 1 | { 2 | "str": { 3 | "type": ["string", "null"] 4 | }, 5 | "int": { 6 | "type": ["integer", "null"] 7 | }, 8 | "num": { 9 | "type": ["number", "null"] 10 | }, 11 | "bool": { 12 | "type": ["boolean", "null"] 13 | }, 14 | "enum": { 15 | "enum": ["r", "g", "b", null] 16 | }, 17 | "arr": { 18 | "type": ["array", "null"], 19 | "items": {"type": "integer"} 20 | }, 21 | "arr_nested": { 22 | "type": "array", 23 | "items": {"type": ["integer", "null"]} 24 | }, 25 | "obj": { 26 | "type": ["object", "null"], 27 | "required": ["req"], 28 | "properties": { 29 | "req": { 30 | "type": "boolean" 31 | }, 32 | "non_req": { 33 | "type": "integer" 34 | } 35 | } 36 | }, 37 | "obj_nested": { 38 | "type": "object", 39 | "required": ["req"], 40 | "properties": { 41 | "req": { 42 | "type": ["boolean", "null"] 43 | } 44 | } 45 | } 46 | } -------------------------------------------------------------------------------- /jsf/tests/data/string-content-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "application/jwt": { 5 | "type": "string", 6 | "contentMediaType": "application/jwt" 7 | }, 8 | "application/zip": { 9 | "type": "string", 10 | "contentMediaType": "application/zip" 11 | }, 12 | "application/gzip": { 13 | "type": "string", 14 | "contentMediaType": "application/gzip" 15 | }, 16 | "text/plain": { 17 | "type": "string", 18 | "contentMediaType": "text/plain", 19 | "minLength": 5, 20 | "maxLength": 10 21 | }, 22 | "image/jpeg": { 23 | "type": "string", 24 | "contentMediaType": "image/jpeg" 25 | }, 26 | "image/webp": { 27 | "type": "string", 28 | "contentMediaType": "image/webp" 29 | } 30 | }, 31 | "required": [ 32 | "application/jwt", 33 | "application/zip", 34 | "application/gzip", 35 | "text/plain", 36 | "image/jpeg", 37 | "image/webp" 38 | ] 39 | } 40 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/application__zip.py: -------------------------------------------------------------------------------- 1 | import io 2 | import random 3 | import zipfile 4 | from typing import Tuple 5 | 6 | import rstr 7 | 8 | from jsf.schema_types.string_utils.content_encoding import bytes_str_repr 9 | from jsf.schema_types.string_utils.content_type.text__plain import random_fixed_length_sentence 10 | 11 | 12 | def create_random_file_name() -> str: 13 | return rstr.xeger(r"[a-zA-Z0-9]+\.txt") 14 | 15 | 16 | def create_random_file() -> Tuple[str, io.BytesIO]: 17 | return (create_random_file_name(), io.BytesIO(random_fixed_length_sentence().encode("utf-8"))) 18 | 19 | 20 | def create_random_zip(*args, **kwargs) -> str: 21 | zip_buffer = io.BytesIO() 22 | 23 | with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) as zip_file: 24 | for file_name, data in [create_random_file() for _ in range(random.randint(1, 10))]: 25 | zip_file.writestr(file_name, data.getvalue()) 26 | 27 | return bytes_str_repr(zip_buffer.getvalue()) 28 | -------------------------------------------------------------------------------- /jsf/tests/data/anyof_object.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": [ 4 | "ob" 5 | ], 6 | "properties": { 7 | "ob": { 8 | "type": "object", 9 | "anyOf": [ 10 | { 11 | "type": "object", 12 | "required": [ 13 | "name", 14 | "human_readable_value" 15 | ], 16 | "properties": { 17 | "name": { 18 | "type": "string" 19 | }, 20 | "human_readable_value": { 21 | "type": "string" 22 | } 23 | } 24 | }, 25 | { 26 | "type": "object", 27 | "required": [ 28 | "id", 29 | "machine_readable_value" 30 | ], 31 | "properties": { 32 | "id": { 33 | "type": "string" 34 | }, 35 | "machine_readable_value": { 36 | "type": "string" 37 | } 38 | } 39 | } 40 | ] 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /jsf/tests/data/oneof_object.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": [ 4 | "ob" 5 | ], 6 | "properties": { 7 | "ob": { 8 | "type": "object", 9 | "oneOf": [ 10 | { 11 | "type": "object", 12 | "required": [ 13 | "name", 14 | "human_readable_value" 15 | ], 16 | "properties": { 17 | "name": { 18 | "type": "string" 19 | }, 20 | "human_readable_value": { 21 | "type": "string" 22 | } 23 | } 24 | }, 25 | { 26 | "type": "object", 27 | "required": [ 28 | "id", 29 | "machine_readable_value" 30 | ], 31 | "properties": { 32 | "id": { 33 | "type": "string" 34 | }, 35 | "machine_readable_value": { 36 | "type": "string" 37 | } 38 | } 39 | } 40 | ] 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /jsf/schema_types/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from jsf.schema_types._tuple import JSFTuple 4 | from jsf.schema_types.allof import AllOf 5 | from jsf.schema_types.anyof import AnyOf 6 | from jsf.schema_types.array import Array 7 | from jsf.schema_types.boolean import Boolean 8 | from jsf.schema_types.enum import JSFEnum 9 | from jsf.schema_types.null import Null 10 | from jsf.schema_types.number import Integer, Number 11 | from jsf.schema_types.object import Object 12 | from jsf.schema_types.oneof import OneOf 13 | from jsf.schema_types.string import String 14 | 15 | Primitives = { 16 | "number": Number, 17 | "string": String, 18 | "integer": Integer, 19 | "object": Object, 20 | "boolean": Boolean, 21 | "null": Null, 22 | } 23 | 24 | AllTypes = Union[ 25 | JSFEnum, 26 | Object, 27 | Array, 28 | JSFTuple, 29 | String, 30 | Boolean, 31 | Null, 32 | Number, 33 | Integer, 34 | AnyOf, 35 | AllOf, 36 | OneOf, 37 | ] 38 | PrimitiveTypes = Union[String, Boolean, Null, Number, Integer] 39 | -------------------------------------------------------------------------------- /pants.toml: -------------------------------------------------------------------------------- 1 | [GLOBAL] 2 | pants_version = "2.18.0" 3 | backend_packages = [ 4 | "pants.backend.python", 5 | "pants.backend.python.lint.flake8", 6 | "pants.backend.python.typecheck.mypy", 7 | "pants.backend.python.lint.docformatter", 8 | "pants.backend.python.lint.isort", 9 | "pants.backend.python.lint.black", 10 | "pants.backend.python.lint.pyupgrade", 11 | ] 12 | 13 | [python] 14 | interpreter_constraints = ["CPython>=3.8,<4"] 15 | enable_resolves = true 16 | 17 | [flake8] 18 | config = ".flake8" 19 | 20 | [pytest] 21 | args = ["--no-header", "-vv"] 22 | install_from_resolve = "pytest" 23 | requirements = ["//jsf/3rdparty/python:pytest"] 24 | 25 | [test] 26 | use_coverage = true 27 | 28 | [coverage-py] 29 | report = ["xml", "html"] 30 | 31 | [mypy] 32 | install_from_resolve = "mypy" 33 | requirements = ["//jsf/3rdparty/python:mypy"] 34 | 35 | [python.resolves] 36 | python-default = "jsf/3rdparty/python/user_reqs.lock" 37 | pytest = "jsf/3rdparty/python/pytest.lock" 38 | mypy = "jsf/3rdparty/python/mypy.lock" 39 | 40 | [anonymous-telemetry] 41 | enabled = false 42 | -------------------------------------------------------------------------------- /jsf/tests/data/allof-complex.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "ParsingModel[PodConfig]", 3 | "$ref": "#/definitions/PodConfig", 4 | "definitions": { 5 | "Prometheus": { 6 | "title": "Prometheus", 7 | "type": "object", 8 | "properties": { 9 | "port": { 10 | "title": "Port", 11 | "description": "Port at which Prometheus metrics are exposed", 12 | "type": "integer" 13 | }, 14 | "path": { 15 | "title": "Path", 16 | "description": "Path at which Prometheus metrics are exposed", 17 | "type": "string" 18 | } 19 | }, 20 | "required": ["port", "path"] 21 | }, 22 | "PodConfig": { 23 | "title": "PodConfig", 24 | "type": "object", 25 | "properties": { 26 | "prometheus": { 27 | "title": "Prometheus", 28 | "description": "Prometheus details", 29 | "allOf": [ 30 | { 31 | "$ref": "#/definitions/Prometheus" 32 | } 33 | ] 34 | } 35 | }, 36 | "required": ["prometheus"] 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/text__plain.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | LOREM = """Lorem ipsum dolor sit amet consectetur adipisicing elit. 4 | Hic molestias, esse veniam placeat officiis nobis architecto modi 5 | possimus reiciendis accusantium exercitationem quas illum libero odit magnam, 6 | reprehenderit ipsum, repellendus culpa! Nullam vehicula ipsum a arcu cursus vitae congue. 7 | Enim nec dui nunc mattis enim ut tellus.""".split() 8 | 9 | 10 | def random_fixed_length_sentence(_min: int = 0, _max: int = 50) -> str: 11 | if _min > _max: 12 | raise ValueError("'_max' should be greater than '_min'") # pragma: no cover 13 | output = "" 14 | while True: 15 | remaining = _max - len(output) 16 | valid_words = list(filter(lambda s: len(s) <= remaining, LOREM)) 17 | if len(valid_words) == 0: 18 | break 19 | if len(output) >= _min and random.uniform(0, 1) > 0.9: 20 | break 21 | output += random.choice(valid_words) + " " 22 | output = output.strip() 23 | if len(output) < _min: 24 | output = output + "." 25 | return output 26 | -------------------------------------------------------------------------------- /docs/features.md: -------------------------------------------------------------------------------- 1 | --- 2 | hide: 3 | - navigation 4 | --- 5 | 6 | # jsf Features 7 | 8 | **jsf** gives you the following: 9 | 10 | ## Based on open standards 11 | 12 | - Provides out of the box data generation from any [JSON schema](https://json-schema.org/) 📦 13 | - In memory conversion from [JSON Schema](https://json-schema.org/) to [Pydantic](https://docs.pydantic.dev/latest/) Models with generated examples 🤯 14 | - Inbuilt validation of fake JSON produced against the [JSON schema](https://json-schema.org/) ✅ 15 | 16 | ## Ability to extend the standard to maximize realness of the fake data 17 | 18 | - Extendable custom data providers using any lambda functions 🔗 19 | - Multi level state for dependant data (eg multiple objects sharing value, such as children with same surname) 🤓 20 | 21 | ## Plug and play 22 | 23 | - Seamless integration with [FastAPI](https://fastapi.tiangolo.com/), check out the [demo code](https://github.com/ghandic/jsf/tree/main/examples/fastapi) 🚀 24 | - Standardize on JSON schema and output to any file format, check out the [demo code](https://github.com/ghandic/jsf/tree/main/examples/flatfile) 📦 25 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | Security is very important for jsf and its community. 🔒 4 | 5 | Learn more about it below. 👇 6 | 7 | ## Versions 8 | 9 | The latest version of jsf is supported. 10 | 11 | You are encouraged to write tests for your application and update your jsf version frequently after ensuring that your tests are passing. This way you will benefit from the latest features, bug fixes, and **security fixes**. 12 | 13 | ## Reporting a Vulnerability 14 | 15 | If you think you found a vulnerability, and even if you are not sure about it, please report it right away by sending an email to: andrewchallis@hotmail.co.uk. Please try to be as explicit as possible, describing all the steps and example code to reproduce the security issue. 16 | 17 | I (the author, [@ghandic](https://linkedin.com/in/achallis)) will review it thoroughly and get back to you. 18 | 19 | ## Public Discussions 20 | 21 | Please restrain from publicly discussing a potential security vulnerability. 🙊 22 | 23 | It's better to discuss privately and try to find a solution first, to limit the potential impact as much as possible. 24 | 25 | --- 26 | 27 | Thanks for your help! 28 | 29 | The jsf community and I thank you for that. 🙇 30 | -------------------------------------------------------------------------------- /jsf/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | ================================== 3 | 4 | Copyright (c) 2020 Andy Challis 5 | ------------------------------- 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to deal 9 | in the Software without restriction, including without limitation the rights 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all 15 | copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 22 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 23 | SOFTWARE. -------------------------------------------------------------------------------- /docs/assets/css/styles.css: -------------------------------------------------------------------------------- 1 | /* Indentation. */ 2 | div.doc-contents:not(.first) { 3 | padding-left: 25px; 4 | border-left: 4px solid rgba(230, 230, 230); 5 | margin-bottom: 80px; 6 | } 7 | 8 | /* Don't capitalize names. */ 9 | h5.doc-heading { 10 | text-transform: none !important; 11 | } 12 | 13 | /* Don't use vertical space on hidden ToC entries. */ 14 | h6.hidden-toc { 15 | margin: 0 !important; 16 | position: relative; 17 | top: -70px; 18 | } 19 | h6.hidden-toc::before { 20 | margin-top: 0 !important; 21 | padding-top: 0 !important; 22 | } 23 | 24 | /* Don't show permalink of hidden ToC entries. */ 25 | h6.hidden-toc a.headerlink { 26 | display: none; 27 | } 28 | 29 | /* Avoid breaking parameters name, etc. in table cells. */ 30 | td code { 31 | word-break: normal !important; 32 | } 33 | 34 | /* For pieces of Markdown rendered in table cells. */ 35 | td p { 36 | margin-top: 0 !important; 37 | margin-bottom: 0 !important; 38 | } 39 | 40 | .md-source__repository { 41 | max-width: 100%; 42 | } 43 | 44 | .md-icon svg { 45 | fill: var(--md-primary-bg-color); 46 | } 47 | 48 | .md-header-nav__button.md-logo img, 49 | .md-header-nav__button.md-logo svg { 50 | width: unset; 51 | } 52 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | hide: 3 | - navigation 4 | --- 5 | 6 | # Getting Started 7 | 8 | ## Installation 9 | 10 | 11 | ```bash 12 | $ pip install jsf 13 | ---> 100% 14 | Installed 15 | ``` 16 | 17 | ## Usage 18 | 19 | ### From your Python program 20 | 21 | ```python 22 | from jsf import JSF 23 | 24 | faker = JSF.from_json("demo-schema.json") 25 | fake_json = faker.generate() 26 | ``` 27 | 28 | ### From the command line 29 | 30 | #### System installation 31 | 32 | First, you'll need to install `jsf[cli]` as it has the additional dependencies for the command line tools. 33 | 34 | 35 | ```bash 36 | $ pip install jsf[cli] 37 | ---> 100% 38 | Installed 39 | ``` 40 | 41 | Now the cli is installed, all you will need to do is supply the JSON schema and the file path you wish to save the output to. 42 | 43 | ```bash 44 | jsf --schema jsf/tests/data/custom.json --instance wow.json 45 | ``` 46 | 47 | #### Docker 48 | 49 | For convenience, you can also make use of the Docker image that is provided so there is no need to rely on package management. 50 | 51 | ```bash 52 | docker build . -t challisa/jsf 53 | docker run -v $PWD:/data challisa/jsf jsf --schema /data/jsf/tests/data/custom.json --instance /data/wow.json 54 | ``` 55 | -------------------------------------------------------------------------------- /jsf/schema_types/enum.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import random 3 | from enum import Enum 4 | from typing import Any, Dict, List, Optional, Tuple, Type, Union 5 | 6 | from pydantic import ConfigDict 7 | 8 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 9 | 10 | logger = logging.getLogger() 11 | _types = {"string": str, "integer": int, "number": float} 12 | 13 | 14 | class JSFEnum(BaseSchema): 15 | enum: Optional[List[Union[str, int, float, dict, None]]] = [] 16 | model_config = ConfigDict() 17 | 18 | def generate(self, context: Dict[str, Any]) -> Optional[Union[str, int, float]]: 19 | try: 20 | return super().generate(context) 21 | except ProviderNotSetException: 22 | return random.choice(self.enum) 23 | 24 | @classmethod 25 | def from_dict(cls, d: Dict[str, Any]) -> "JSFEnum": 26 | return JSFEnum(**d) 27 | 28 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 29 | base = _types.get(self.type, str) 30 | _type = Enum( 31 | value=self._get_unique_name(context), 32 | type=base, 33 | names={str(v): v for v in self.enum}, 34 | ) 35 | context["__internal__"][_type.__name__] = _type 36 | return self.to_pydantic(context, _type) 37 | -------------------------------------------------------------------------------- /jsf/BUILD: -------------------------------------------------------------------------------- 1 | python_sources(name="src") 2 | 3 | resources( 4 | name="build_resources", 5 | sources=["README.md", "LICENSE"], 6 | ) 7 | 8 | python_distribution( 9 | name="jsf-dist", 10 | entry_points={ 11 | "console_scripts": {"jsf": "jsf.cli:app"}, 12 | }, 13 | dependencies=[ 14 | ":src", 15 | ":build_resources", 16 | "./tests:tests", 17 | ], 18 | long_description_path="README.md", 19 | provides=python_artifact( 20 | name="jsf", 21 | version="0.11.2", 22 | author="ghandic", 23 | description="Creates fake JSON files from a JSON schema", 24 | url="https://github.com/ghandic/jsf", 25 | long_description_content_type="text/markdown", 26 | classifiers=[ 27 | "Programming Language :: Python :: 3", 28 | "License :: OSI Approved :: MIT License", 29 | "Operating System :: OS Independent", 30 | ], 31 | keywords=[ 32 | "JSON Schema", 33 | "Fake data", 34 | "Test data", 35 | "Schema", 36 | "JSON", 37 | "Faker", 38 | "Hypothesis", 39 | "Rapid Prototype", 40 | "Data contract", 41 | ], 42 | zip_safe=True, 43 | python_requires=">=3.8", 44 | extras_require={"cli": ["typer>=0.7.0"]}, 45 | ), 46 | ) 47 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/application__jwt.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import hmac 4 | import json 5 | import secrets 6 | from datetime import timezone 7 | 8 | from faker import Faker 9 | 10 | faker = Faker() 11 | 12 | 13 | def base64url_encode(input: bytes) -> str: 14 | return base64.urlsafe_b64encode(input).decode("utf-8").replace("=", "") 15 | 16 | 17 | def jwt(api_key: str, expiry: int, api_sec: str) -> str: 18 | segments = [] 19 | 20 | header = {"typ": "JWT", "alg": "HS256"} 21 | payload = {"iss": api_key, "exp": expiry} 22 | 23 | json_header = json.dumps(header, separators=(",", ":")).encode() 24 | json_payload = json.dumps(payload, separators=(",", ":")).encode() 25 | 26 | segments.append(base64url_encode(json_header)) 27 | segments.append(base64url_encode(json_payload)) 28 | 29 | signing_input = ".".join(segments).encode() 30 | key = api_sec.encode() 31 | signature = hmac.new(key, signing_input, hashlib.sha256).digest() 32 | 33 | segments.append(base64url_encode(signature)) 34 | 35 | encoded_string = ".".join(segments) 36 | 37 | return encoded_string 38 | 39 | 40 | def create_random_jwt(*args, **kwargs) -> str: 41 | api_key = secrets.token_urlsafe(16) 42 | api_sec = secrets.token_urlsafe(16) 43 | 44 | expiry = int(faker.date_time(timezone.utc).timestamp()) 45 | 46 | return jwt(api_key, expiry, api_sec) 47 | -------------------------------------------------------------------------------- /jsf/tests/data/inner-ref.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "properties": { 4 | "user": { 5 | "type": "object", 6 | "properties": { 7 | "id": { 8 | "$ref": "#/definitions/positiveInt" 9 | }, 10 | "uuid": { 11 | "$ref": "#/$defs/uuid" 12 | }, 13 | "name": { 14 | "type": "string", 15 | "faker": "name.findName" 16 | }, 17 | "birthday": { 18 | "type": "string", 19 | "chance": { 20 | "birthday": { 21 | "string": true 22 | } 23 | } 24 | }, 25 | "email": { 26 | "type": "string", 27 | "format": "email", 28 | "faker": "internet.email" 29 | } 30 | }, 31 | "required": ["id", "uuid", "name", "birthday", "email"] 32 | } 33 | }, 34 | "required": ["user"], 35 | "definitions": { 36 | "positiveInt": { 37 | "type": "integer", 38 | "minimum": 0, 39 | "minimumExclusive": true 40 | } 41 | }, 42 | "$defs": { 43 | "uuid": { 44 | "type": "string", 45 | "pattern": "^[0-9A-F]{16,}$" 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /jsf/tests/data/complex_recursive.json: -------------------------------------------------------------------------------- 1 | { 2 | "$ref": "#/definitions/tree", 3 | "definitions": { 4 | "tree": { 5 | "anyOf": [ 6 | { 7 | "$ref": "#/definitions/node" 8 | }, 9 | { 10 | "type": "string" 11 | } 12 | ] 13 | }, 14 | "node": { 15 | "type": "object", 16 | "allOf": [ 17 | { 18 | "type": "object", 19 | "oneOf": [ 20 | { 21 | "type": "object", 22 | "properties": { 23 | "value": { 24 | "$ref": "#/definitions/tree" 25 | } 26 | }, 27 | "required": [ 28 | "value" 29 | ] 30 | }, 31 | { 32 | "type": "object", 33 | "properties": { 34 | "value": { 35 | "type": "string" 36 | } 37 | }, 38 | "required": ["value"] 39 | } 40 | ] 41 | } 42 | ] 43 | } 44 | } 45 | } -------------------------------------------------------------------------------- /jsf/tests/test_model_gen.py: -------------------------------------------------------------------------------- 1 | import json 2 | import platform 3 | from enum import Enum 4 | from typing import List, _GenericAlias 5 | 6 | import pytest # pants: no-infer-dep 7 | from jsf.parser import JSF 8 | from pydantic.main import create_model 9 | 10 | Object = create_model("Object") 11 | 12 | expected = [ 13 | ("boolean", bool), 14 | ("enum", Enum), 15 | ("inner-ref", Object), 16 | ("integer", int), 17 | ("null", type(None)), 18 | ("number", float), 19 | ("object", Object), 20 | ("custom", Object), 21 | ("string-enum", Enum), 22 | ("string", str), 23 | ] 24 | if int(platform.python_version_tuple()[1]) < 9: 25 | expected.append(("array", List)) 26 | 27 | else: 28 | 29 | def test_gen_model_list(TestData): 30 | with open(TestData / "array.json") as file: 31 | schema = json.load(file) 32 | p = JSF(schema) 33 | Model = p.pydantic() 34 | assert _GenericAlias == type(Model) 35 | 36 | 37 | def test_gen_model_tuple(TestData): 38 | with open(TestData / "tuple.json") as file: 39 | schema = json.load(file) 40 | p = JSF(schema) 41 | Model = p.pydantic() 42 | assert _GenericAlias == type(Model) 43 | 44 | 45 | @pytest.mark.parametrize( 46 | "filestem, expected_type_anno", 47 | expected, 48 | ) 49 | def test_gen_model(TestData, filestem, expected_type_anno): 50 | with open(TestData / f"{filestem}.json") as file: 51 | schema = json.load(file) 52 | p = JSF(schema) 53 | Model = p.pydantic() 54 | assert type(expected_type_anno) == type(Model) 55 | -------------------------------------------------------------------------------- /jsf/schema_types/_tuple.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List, Optional, Tuple, Type, Union 2 | 3 | from pydantic import Field 4 | 5 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 6 | 7 | 8 | class JSFTuple(BaseSchema): 9 | items: Optional[List[BaseSchema]] = None 10 | additionalItems: Optional[ 11 | Union[bool, BaseSchema] 12 | ] = None # TODO: Random additional items to be appended 13 | minItems: Optional[int] = 0 14 | maxItems: Optional[int] = 5 15 | uniqueItems: Optional[bool] = False 16 | fixed: Optional[Union[int, str]] = Field(None, alias="$fixed") 17 | 18 | @classmethod 19 | def from_dict(cls, d: Dict[str, Any]) -> "JSFTuple": 20 | return JSFTuple(**d) 21 | 22 | def generate(self, context: Dict[str, Any]) -> Optional[List[Tuple]]: 23 | # TODO: Random drop out "It's ok to not provide all of the items" 24 | try: 25 | return super().generate(context) 26 | except ProviderNotSetException: 27 | depth = context["state"]["__depth__"] 28 | output = [] 29 | for item in self.items: 30 | output.append(item.generate(context)) 31 | context["state"]["__depth__"] = depth 32 | return tuple(output) 33 | 34 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 35 | _type = eval( 36 | f"Tuple[{','.join([item.model(context)[0].__name__ for item in self.items])}]", 37 | context["__internal__"], 38 | ) 39 | return self.to_pydantic(context, _type) 40 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_encoding.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import quopri 3 | from enum import Enum 4 | 5 | 6 | class ContentEncoding(str, Enum): 7 | SEVEN_BIT = "7-bit" 8 | EIGHT_BIT = "8-bit" 9 | BINARY = "binary" 10 | QUOTED_PRINTABLE = "quoted-printable" 11 | BASE16 = "base-16" 12 | BASE32 = "base-32" 13 | BASE64 = "base-64" 14 | 15 | 16 | def binary_encoder(string: str) -> str: 17 | return "".join(format(x, "b") for x in bytearray(string, "utf-8")) 18 | 19 | 20 | def bytes_str_repr(b: bytes) -> str: 21 | return repr(b)[2:-1] 22 | 23 | 24 | def seven_bit_encoder(string: str) -> str: 25 | return bytes_str_repr(string.encode("utf-7")) 26 | 27 | 28 | def eight_bit_encoder(string: str) -> str: 29 | return bytes_str_repr(string.encode("utf-8")) 30 | 31 | 32 | def quoted_printable_encoder(string: str) -> str: 33 | return bytes_str_repr(quopri.encodestring(string.encode("utf-8"))) 34 | 35 | 36 | def b16_encoder(string: str) -> str: 37 | return bytes_str_repr(base64.b16encode(string.encode("utf-8"))) 38 | 39 | 40 | def b32_encoder(string: str) -> str: 41 | return bytes_str_repr(base64.b32encode(string.encode("utf-8"))) 42 | 43 | 44 | def b64_encoder(string: str) -> str: 45 | return bytes_str_repr(base64.b64encode(string.encode("utf-8"))) 46 | 47 | 48 | Encoder = { 49 | ContentEncoding.SEVEN_BIT: seven_bit_encoder, 50 | ContentEncoding.EIGHT_BIT: eight_bit_encoder, 51 | ContentEncoding.BINARY: binary_encoder, 52 | ContentEncoding.QUOTED_PRINTABLE: quoted_printable_encoder, 53 | ContentEncoding.BASE16: b16_encoder, 54 | ContentEncoding.BASE32: b32_encoder, 55 | ContentEncoding.BASE64: b64_encoder, 56 | } 57 | 58 | 59 | def encode(string: str, encoding: ContentEncoding) -> str: 60 | return Encoder.get(encoding, lambda s: s)(string) 61 | -------------------------------------------------------------------------------- /docs/help/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Contributions are welcome, and they are greatly appreciated! 4 | Every little bit helps, and credit will always be given. 5 | 6 | ## Environment setup 7 | 8 | Nothing easier! 9 | 10 | Fork and clone the repository, then install [`pants`](https://www.pantsbuild.org/2.18/docs/getting-started/installing-pants) 11 | 12 | ```bash 13 | # Install `pants` on macos 14 | brew install pantsbuild/tap/pants 15 | 16 | # Export the virtual environment for IDE 17 | pants export --py-resolve-format=symlinked_immutable_virtualenv --resolve=python-default 18 | ``` 19 | 20 | That's it! 21 | 22 | You now have the dependencies installed. 23 | 24 | ## Development 25 | 26 | As usual: 27 | 28 | 1. create a new branch: `git checkout -b feature-or-bugfix-name` 29 | 1. edit the code and/or the documentation 30 | 31 | **Before committing:** 32 | 33 | 1. Ensure to run `pants fmt ::` from the root directory to format all of the code in the repo before PR submission. 34 | 2. Follow our [commit message convention](#commit-message-convention) 35 | 36 | If you are unsure about how to fix or ignore a warning, 37 | just let the continuous integration fail, 38 | and we will help you during review. 39 | 40 | Don't bother updating the changelog, we will take care of this. 41 | 42 | ## Commit message convention 43 | 44 | Commits messages must follow the 45 | [Angular style](https://gist.github.com/stephenparish/9941e89d80e2bc58a153#format-of-the-commit-message): 46 | 47 | ```txt 48 | [(scope)]: Subject 49 | 50 | [Body] 51 | ``` 52 | 53 | Scope and body are optional. Type can be: 54 | 55 | - `build`: About packaging, building wheels, etc. 56 | - `chore`: About packaging or repo/files management. 57 | - `ci`: About Continuous Integration. 58 | - `docs`: About documentation. 59 | - `feat`: New feature. 60 | - `fix`: Bug fix. 61 | - `perf`: About performance. 62 | - `refactor`: Changes which are not features nor bug fixes. 63 | - `style`: A change in code style/format. 64 | - `tests`: About tests. 65 | 66 | **Subject (and body) must be valid Markdown.** 67 | If you write a body, please add issues references at the end: 68 | 69 | ```txt 70 | Body. 71 | 72 | References: #10, #11. 73 | Fixes #15. 74 | ``` 75 | -------------------------------------------------------------------------------- /examples/fastapi/model.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pydantic import BaseModel, Field 4 | 5 | 6 | class NewbornItem(BaseModel): 7 | surname: str = Field(..., description="The newborn's surname, eg: Reid") 8 | givenNames: str = Field(..., description="The newborn's given names, eg: Mathew David") 9 | sex: str = Field(..., description="The newborn's sex, eg: M, F or U") 10 | dateOfBirth: str = Field(..., description="The newborn's date of birth, eg: 17/03/2021") 11 | birthOrder: str = Field(..., description="The newborn's birth order, eg: 1") 12 | indigenousStatus: str = Field(..., description="The newborn's indigenous status, eg: 14") 13 | uniqueId: str = Field(..., description="The newborn's unique birth event id, eg: 20474417") 14 | 15 | 16 | class Address(BaseModel): 17 | suburb: str = Field(..., description="The address suburb (Australia Only), eg: Watson") 18 | postcode: str = Field(..., description="The address postcode (Australia Only), eg: 2602") 19 | street1: str = Field( 20 | ..., 21 | description="The address street name line 1 (Australia Only), eg: 49 Aspinall St", 22 | ) 23 | street2: str = Field( 24 | ..., description="The address street name line 2 (Australia Only), eg: Suite 1" 25 | ) 26 | 27 | 28 | class Parent(BaseModel): 29 | surname: str | None = Field(..., description="The mother's surname, eg: Mcdermott") 30 | givenNames: str = Field(..., description="The mother's given names, eg: Sarah Lousie") 31 | mailAddress: Address 32 | residentialAddress: Address 33 | mobile: str = Field(..., description="The mother's mobile phone number, eg: 0400182545") 34 | homePhone: str = Field(..., description="The mother's home phone number, eg: 0245458450") 35 | email: str = Field(..., description="The mother's email address, eg: jesse6565656565@gmail.com") 36 | hospital: str = Field(..., description="The hospital where the birth took place, eg: ACTCC") 37 | dateReceived: str = Field( 38 | ..., description="The date the birth event was received, eg: 17/03/2021" 39 | ) 40 | personId: str = Field(..., description="The mother's personId, eg: 123456789") 41 | 42 | 43 | class Model(BaseModel): 44 | parent: Parent 45 | newborn: list[NewbornItem] 46 | -------------------------------------------------------------------------------- /jsf/schema_types/number.py: -------------------------------------------------------------------------------- 1 | import math 2 | import random 3 | from typing import Any, Dict, Optional, Tuple, Type, Union 4 | 5 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 6 | 7 | 8 | class Number(BaseSchema): 9 | multipleOf: Optional[Union[float, int]] = None 10 | minimum: Optional[Union[float, int]] = 0 11 | exclusiveMinimum: Optional[Union[bool, float, int]] = None 12 | maximum: Optional[Union[float, int]] = 9999 13 | exclusiveMaximum: Optional[Union[bool, float, int]] = None 14 | # enum: List[Union[str, int, float]] = None # NOTE: Not used - enums go to enum class 15 | 16 | def generate(self, context: Dict[str, Any]) -> Optional[float]: 17 | try: 18 | return super().generate(context) 19 | except ProviderNotSetException: 20 | step = self.multipleOf if self.multipleOf is not None else 1 21 | 22 | if isinstance(self.exclusiveMinimum, bool): 23 | _min = self.minimum + step 24 | elif isinstance(self.exclusiveMinimum, (int, float)): 25 | _min = self.exclusiveMinimum + step 26 | else: 27 | _min = self.minimum 28 | 29 | if isinstance(self.exclusiveMaximum, bool): 30 | _max = self.maximum - step 31 | elif isinstance(self.exclusiveMaximum, (int, float)): 32 | _max = self.exclusiveMaximum - step 33 | else: 34 | _max = self.maximum 35 | 36 | return float( 37 | step * random.randint(math.ceil(float(_min) / step), math.floor(float(_max) / step)) 38 | ) 39 | 40 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 41 | return self.to_pydantic(context, float) 42 | 43 | @classmethod 44 | def from_dict(cls, d: Dict[str, Any]) -> "Number": 45 | return Number(**d) 46 | 47 | 48 | class Integer(Number): 49 | def generate(self, context: Dict[str, Any]) -> Optional[int]: 50 | n = super().generate(context) 51 | return int(n) if n is not None else n 52 | 53 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 54 | return self.to_pydantic(context, int) 55 | 56 | @classmethod 57 | def from_dict(cls, d: Dict[str, Any]) -> "Integer": 58 | return Integer(**d) 59 | -------------------------------------------------------------------------------- /jsf/schema_types/string_utils/content_type/__init__.py: -------------------------------------------------------------------------------- 1 | from jsf.schema_types.string_utils.content_type.application__gzip import create_random_gzip 2 | from jsf.schema_types.string_utils.content_type.application__jwt import create_random_jwt 3 | from jsf.schema_types.string_utils.content_type.application__zip import create_random_zip 4 | from jsf.schema_types.string_utils.content_type.image__jpeg import random_jpg 5 | from jsf.schema_types.string_utils.content_type.image__webp import random_webp 6 | from jsf.schema_types.string_utils.content_type.text__plain import random_fixed_length_sentence 7 | 8 | 9 | def not_implemented(*args, **kwargs): 10 | raise NotImplementedError() 11 | 12 | 13 | ContentTypeGenerator = { 14 | "application/jwt": create_random_jwt, 15 | # "text/html": not_implemented, 16 | # "application/xml": not_implemented, # To implement: Port code from https://onlinerandomtools.com/generate-random-xml 17 | # "image/bmp": not_implemented, # To implement: request jpg and convert to bmp 18 | # "text/css": not_implemented, 19 | # "text/csv": not_implemented, 20 | # "image/gif": not_implemented, # To implement: request jpg and convert to gif 21 | "image/jpeg": random_jpg, 22 | # "application/json": not_implemented, # To implement: Port code from https://onlinerandomtools.com/generate-random-xml 23 | # "text/javascript": not_implemented, 24 | # "image/png": not_implemented, # To implement: request jpg and convert to png 25 | # "image/tiff": not_implemented, # To implement: request jpg and convert to tiff 26 | "text/plain": random_fixed_length_sentence, 27 | "image/webp": random_webp, 28 | "application/zip": create_random_zip, 29 | "application/gzip": create_random_gzip, 30 | # "application/x-bzip": not_implemented, # To implement: create in memory random files using text/plain then zip 31 | # "application/x-bzip2": not_implemented, # To implement: create in memory random files using text/plain then zip 32 | # "application/pdf": not_implemented, # To implement: request jpg and convert to pdf and/or make pdf using python package 33 | # "text/calendar": not_implemented, 34 | } 35 | 36 | 37 | def generate(content_type: str, min_length: int, max_length: int) -> str: 38 | return ContentTypeGenerator.get(content_type, not_implemented)(min_length, max_length) 39 | -------------------------------------------------------------------------------- /jsf/schema_types/array.py: -------------------------------------------------------------------------------- 1 | import random 2 | from typing import Any, Dict, List, Optional, Tuple, Type, Union 3 | 4 | from pydantic import Field 5 | 6 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 7 | 8 | 9 | class Array(BaseSchema): 10 | items: Optional[BaseSchema] = None 11 | contains: Optional[BaseSchema] = None # NOTE: Validation only 12 | minItems: Optional[int] = 0 13 | maxItems: Optional[int] = 5 14 | uniqueItems: Optional[bool] = False 15 | fixed: Optional[Union[int, str]] = Field(None, alias="$fixed") 16 | 17 | @classmethod 18 | def from_dict(cls, d: Dict[str, Any]) -> "Array": 19 | return Array(**d) 20 | 21 | def generate(self, context: Dict[str, Any]) -> Optional[List[Any]]: 22 | try: 23 | return super().generate(context) 24 | except ProviderNotSetException: 25 | if isinstance(self.fixed, str): 26 | self.minItems = self.maxItems = eval(self.fixed, context)() 27 | elif isinstance(self.fixed, int): 28 | self.minItems = self.maxItems = self.fixed 29 | 30 | depth = context["state"]["__depth__"] 31 | output = [] 32 | for _ in range(random.randint(int(self.minItems), int(self.maxItems))): 33 | output.append(self.items.generate(context)) 34 | context["state"]["__depth__"] = depth 35 | if self.uniqueItems and self.items.type == "object": 36 | output = [dict(s) for s in {frozenset(d.items()) for d in output}] 37 | while len(output) < self.minItems: 38 | output.append(self.items.generate(context)) 39 | output = [dict(s) for s in {frozenset(d.items()) for d in output}] 40 | context["state"]["__depth__"] = depth 41 | elif self.uniqueItems: 42 | output = set(output) 43 | while len(output) < self.minItems: 44 | output.add(self.items.generate(context)) 45 | context["state"]["__depth__"] = depth 46 | output = list(output) 47 | return output 48 | 49 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 50 | _type = eval( 51 | f"List[Union[{','.join([self.items.model(context)[0].__name__])}]]", 52 | context["__internal__"], 53 | ) 54 | return self.to_pydantic(context, _type) 55 | -------------------------------------------------------------------------------- /examples/flatfile/main.py: -------------------------------------------------------------------------------- 1 | import json 2 | from enum import Enum 3 | from pathlib import Path 4 | 5 | import jsonlines 6 | import pandas as pd 7 | import typer 8 | from typing_extensions import Annotated 9 | 10 | from jsf import JSF 11 | 12 | 13 | class OutputFormat(str, Enum): 14 | csv = "csv" 15 | excel = "excel" 16 | parquet = "parquet" 17 | json = "json" 18 | jsonl = "jsonl" 19 | 20 | 21 | def main( 22 | schema: Annotated[ 23 | Path, 24 | typer.Option( 25 | exists=True, 26 | file_okay=True, 27 | dir_okay=False, 28 | writable=False, 29 | readable=True, 30 | resolve_path=True, 31 | help="Path to the JSON schema used to produce the fake data.", 32 | ), 33 | ], 34 | records: Annotated[int, typer.Option(min=0, help="Number of records you wish to produce.")], 35 | output_format: Annotated[OutputFormat, typer.Option(help="Fake data output format.")], 36 | output: Annotated[Path, typer.Option(help="Output file path")], 37 | ): 38 | faker = JSF.from_json(schema) 39 | fake_data = faker.generate(records) 40 | match output_format: 41 | case OutputFormat.csv: 42 | pd.DataFrame.from_records(fake_data).to_csv(output, index=False) 43 | case OutputFormat.excel: 44 | more_fake_data = faker.generate(records) 45 | custom_header = [ 46 | v.get("title") or k for k, v in faker.root_schema["properties"].items() 47 | ] 48 | with pd.ExcelWriter(output) as excel_writer: 49 | pd.DataFrame.from_records(fake_data).to_excel( 50 | excel_writer, sheet_name="Fake Data", index=False, header=custom_header 51 | ) 52 | pd.DataFrame.from_records(more_fake_data).to_excel( 53 | excel_writer, sheet_name="More Fake Data", index=False, header=custom_header 54 | ) 55 | case OutputFormat.json: 56 | with open(output, "w") as f: 57 | json.dump(fake_data, f) 58 | case OutputFormat.jsonl: 59 | with jsonlines.open(output, mode="w") as writer: 60 | writer.write_all(fake_data) 61 | case OutputFormat.parquet: 62 | pd.DataFrame.from_records(fake_data).to_parquet(output, index=False) 63 | case _: 64 | raise NotImplementedError("Unable to produce in this file format yet") 65 | 66 | 67 | if __name__ == "__main__": 68 | typer.run(main) 69 | -------------------------------------------------------------------------------- /jsf/schema_types/object.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import random 3 | from typing import Any, Dict, List, Optional, Tuple, Type, Union 4 | 5 | import rstr 6 | from pydantic import BaseModel, create_model 7 | 8 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 9 | 10 | logger = logging.getLogger() 11 | 12 | 13 | class PropertyNames(BaseModel): 14 | pattern: Optional[str] = None 15 | 16 | 17 | PropertyDependency = Dict[str, List[str]] 18 | SchemaDependency = Dict[str, "Object"] 19 | 20 | 21 | class Object(BaseSchema): 22 | properties: Dict[str, BaseSchema] = {} 23 | additionalProperties: Optional[Union[bool, BaseSchema]] = None 24 | required: Optional[List[str]] = None 25 | propertyNames: Optional[PropertyNames] = None 26 | minProperties: Optional[int] = None 27 | maxProperties: Optional[int] = None 28 | dependencies: Optional[Union[PropertyDependency, SchemaDependency]] = None 29 | patternProperties: Optional[Dict[str, BaseSchema]] = None 30 | 31 | @classmethod 32 | def from_dict(cls, d: Dict[str, Any]) -> "Object": 33 | return Object(**d) 34 | 35 | def should_keep(self, property_name: str, context: Dict[str, Any]) -> bool: 36 | if isinstance(self.required, list) and property_name in self.required: 37 | return True 38 | return ( 39 | random.uniform(0, 1) > self.allow_none_optionals 40 | and context["state"]["__depth__"] <= self.max_recursive_depth 41 | ) 42 | 43 | def generate(self, context: Dict[str, Any]) -> Optional[Dict[str, Any]]: 44 | try: 45 | return super().generate(context) 46 | except ProviderNotSetException: 47 | explicit_properties = { 48 | o.name: o.generate(context) 49 | for o in self.properties 50 | if self.should_keep(o.name, context) 51 | } 52 | pattern_props = {} 53 | if self.patternProperties: 54 | for o in self.patternProperties: 55 | for _ in range(random.randint(0, 10)): 56 | if self.should_keep(o.name, context): 57 | pattern_props[rstr.xeger(o.name)] = o.generate(context) 58 | return {**pattern_props, **explicit_properties} 59 | 60 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 61 | self.generate(context) 62 | name = self._get_unique_name(context) 63 | _type = create_model(name, **{o.name: o.model(context) for o in self.properties}) 64 | context["__internal__"][_type.__name__] = _type 65 | return self.to_pydantic(context, _type) 66 | 67 | 68 | Object.model_rebuild() 69 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yaml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | deploy: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Set up Python 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: "3.9" 20 | - uses: pantsbuild/actions/init-pants@v5-scie-pants 21 | # This action bootstraps pants and manages 2-3 GHA caches. 22 | # See: github.com/pantsbuild/actions/tree/main/init-pants/ 23 | with: 24 | # v0 makes it easy to bust the cache if needed 25 | # just increase the integer to start with a fresh cache 26 | gha-cache-key: v0 27 | # The Python backend uses named_caches for Pip/PEX state, 28 | # so it is appropriate to invalidate on lockfile changes. 29 | named-caches-hash: ${{ hashFiles('python-default.lock') }} 30 | # If you're not using a fine-grained remote caching service (see https://www.pantsbuild.org/docs/remote-caching), 31 | # then you may also want to preserve the local Pants cache (lmdb_store). However this must invalidate for 32 | # changes to any file that can affect the build, so may not be practical in larger repos. 33 | # A remote cache service integrates with Pants's fine-grained invalidation and avoids these problems. 34 | cache-lmdb-store: 'true' # defaults to 'false' 35 | # Note that named_caches and lmdb_store falls back to partial restore keys which 36 | # may give a useful partial result that will save time over completely clean state, 37 | # but will cause the cache entry to grow without bound over time. 38 | # See https://pants.readme.io/docs/using-pants-in-ci for tips on how to periodically clean it up. 39 | # Alternatively you change gha-cache-key to ignore old caches. 40 | - name: Install dependencies 41 | run: | 42 | python -m pip install --upgrade pip 43 | pip install setuptools wheel twine 44 | - name: Replace assignment expressions (walrus operators) 45 | run: | 46 | pip install -e "git+https://github.com/pybpc/walrus.git#egg=bpc-walrus" 47 | walrus jsf 48 | - name: Bootstrap Pants 49 | run: | 50 | pants --version 51 | - name: Build and publish 52 | env: 53 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 54 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 55 | run: | 56 | make build 57 | twine upload dist/* -------------------------------------------------------------------------------- /jsf/tests/data/string-format.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "object", 3 | "required": [ 4 | "date-time", 5 | "time", 6 | "date", 7 | "duration", 8 | "email", 9 | "idn-email", 10 | "hostname", 11 | "idn-hostname", 12 | "ipv4", 13 | "ipv6", 14 | "uri", 15 | "uri-reference", 16 | "iri", 17 | "iri-reference", 18 | "uri-template", 19 | "json-pointer", 20 | "relative-json-pointer", 21 | "uuid", 22 | "regex" 23 | ], 24 | "properties": { 25 | "date-time": { 26 | "type": "string", 27 | "format": "date-time" 28 | }, 29 | "time": { 30 | "type": "string", 31 | "format": "time" 32 | }, 33 | "date": { 34 | "type": "string", 35 | "format": "date" 36 | }, 37 | "duration": { 38 | "type": "string", 39 | "format": "duration" 40 | }, 41 | "email": { 42 | "type": "string", 43 | "format": "email" 44 | }, 45 | "idn-email": { 46 | "type": "string", 47 | "format": "idn-email" 48 | }, 49 | "hostname": { 50 | "type": "string", 51 | "format": "hostname" 52 | }, 53 | "idn-hostname": { 54 | "type": "string", 55 | "format": "idn-hostname" 56 | }, 57 | "ipv4": { 58 | "type": "string", 59 | "format": "ipv4" 60 | }, 61 | "ipv6": { 62 | "type": "string", 63 | "format": "ipv6" 64 | }, 65 | "uri": { 66 | "type": "string", 67 | "format": "uri" 68 | }, 69 | "uri-reference": { 70 | "type": "string", 71 | "format": "uri-reference" 72 | }, 73 | "iri": { 74 | "type": "string", 75 | "format": "iri" 76 | }, 77 | "iri-reference": { 78 | "type": "string", 79 | "format": "iri-reference" 80 | }, 81 | "uri-template": { 82 | "type": "string", 83 | "format": "uri-template" 84 | }, 85 | "json-pointer": { 86 | "type": "string", 87 | "format": "json-pointer" 88 | }, 89 | "relative-json-pointer": { 90 | "type": "string", 91 | "format": "relative-json-pointer" 92 | }, 93 | "uuid": { 94 | "type": "string", 95 | "format": "uuid" 96 | }, 97 | "regex": { 98 | "type": "string", 99 | "format": "regex", 100 | "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$" 101 | } 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /jsf/tests/test_nullable_types_gen.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from jsf.parser import JSF 4 | 5 | 6 | def test_string_null_gen(TestData): 7 | with open(TestData / "type-list-null.json") as file: 8 | schema = json.load(file) 9 | p = JSF(schema["str"]) 10 | 11 | actual = [p.generate() for _ in range(100)] 12 | assert all(each not in ["None"] for each in actual) 13 | assert all(type(each) in [type(None), str] for each in actual) 14 | 15 | 16 | def test_int_null_gen(TestData): 17 | with open(TestData / "type-list-null.json") as file: 18 | schema = json.load(file) 19 | p = JSF(schema["int"]) 20 | 21 | actual = [p.generate() for _ in range(100)] 22 | assert all(type(each) in [type(None), int] for each in actual) 23 | 24 | 25 | def test_number_null_gen(TestData): 26 | with open(TestData / "type-list-null.json") as file: 27 | schema = json.load(file) 28 | p = JSF(schema["num"]) 29 | 30 | actual = [p.generate() for _ in range(100)] 31 | assert all(type(each) in [type(None), float] for each in actual) 32 | 33 | 34 | def test_boolean_null_gen(TestData): 35 | with open(TestData / "type-list-null.json") as file: 36 | schema = json.load(file) 37 | p = JSF(schema["bool"]) 38 | 39 | actual = [p.generate() for _ in range(100)] 40 | assert all(type(each) in [type(None), bool] for each in actual) 41 | 42 | 43 | def test_enum_null_gen(TestData): 44 | with open(TestData / "type-list-null.json") as file: 45 | schema = json.load(file) 46 | p = JSF(schema["enum"]) 47 | 48 | actual = [p.generate() for _ in range(100)] 49 | assert all(each in ["r", "g", "b", None] for each in actual) 50 | 51 | 52 | def test_array_null_gen(TestData): 53 | with open(TestData / "type-list-null.json") as file: 54 | schema = json.load(file) 55 | p = JSF(schema["arr"]) 56 | 57 | actual = [p.generate() for _ in range(100)] 58 | assert all(type(each) in [list, type(None)] for each in actual) 59 | 60 | 61 | def test_array_nested_null_gen(TestData): 62 | with open(TestData / "type-list-null.json") as file: 63 | schema = json.load(file) 64 | p = JSF(schema["arr_nested"]) 65 | 66 | actual = [p.generate() for _ in range(100)] 67 | items = [item for each in actual for item in each] 68 | 69 | assert all(type(each) in [int, type(None)] for each in items) 70 | 71 | 72 | def test_object_null_gen(TestData): 73 | with open(TestData / "type-list-null.json") as file: 74 | schema = json.load(file) 75 | p = JSF(schema["obj"]) 76 | 77 | actual = [p.generate() for _ in range(100)] 78 | assert all(type(each) in [dict, type(None)] for each in actual) 79 | 80 | 81 | def test_object_nested_null_gen(TestData): 82 | with open(TestData / "type-list-null.json") as file: 83 | schema = json.load(file) 84 | p = JSF(schema["obj_nested"]) 85 | 86 | actual = [p.generate() for _ in range(100)] 87 | assert all(type(each["req"]) in [bool, type(None)] for each in actual) 88 | -------------------------------------------------------------------------------- /jsf/tests/data/object_no_properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "AlertSync", 3 | "description": "\u5ba1\u8ba1\u544a\u8b66model", 4 | "type": "object", 5 | "properties": { 6 | "audit_label": { 7 | "title": "Audit Label", 8 | "type": "string", 9 | "format": "ipv4" 10 | }, 11 | "category": { 12 | "title": "Category", 13 | "minimum": 1, 14 | "maximum": 15, 15 | "type": "integer" 16 | }, 17 | "level": { 18 | "title": "Level", 19 | "minimum": 0, 20 | "maximum": 3, 21 | "type": "integer" 22 | }, 23 | "src_mac": { 24 | "title": "Src Mac", 25 | "default": "00:00:00:00:00:00", 26 | "pattern": "^([0-9A-F]{2})(\\:[0-9A-F]{2}){5}$", 27 | "type": "string" 28 | }, 29 | "src_ip": { 30 | "title": "Src Ip", 31 | "type": "string", 32 | "format": "ipv4" 33 | }, 34 | "src_port": { 35 | "title": "Src Port", 36 | "minimum": 1, 37 | "maximum": 65535, 38 | "type": "integer" 39 | }, 40 | "dst_mac": { 41 | "title": "Dst Mac", 42 | "default": "FF:FF:FF:FF:FF:FF", 43 | "pattern": "^([0-9A-F]{2})(\\:[0-9A-F]{2}){5}$", 44 | "type": "string" 45 | }, 46 | "dst_ip": { 47 | "title": "Dst Ip", 48 | "type": "string", 49 | "format": "ipv4" 50 | }, 51 | "dst_port": { 52 | "title": "Dst Port", 53 | "minimum": 1, 54 | "maximum": 65535, 55 | "type": "integer" 56 | }, 57 | "l4_protocol": { 58 | "$ref": "#/definitions/L4ProtocolEnum" 59 | }, 60 | "protocol": { 61 | "$ref": "#/definitions/ProtocolEnum" 62 | }, 63 | "illegal_ip": { 64 | "title": "Illegal Ip", 65 | "default": [], 66 | "type": "array", 67 | "items": { 68 | "type": "string", 69 | "format": "ipv4" 70 | } 71 | }, 72 | "last_at": { 73 | "title": "Last At", 74 | "default": "2022-12-30T14:08:30.753677", 75 | "type": "string", 76 | "format": "date-time" 77 | }, 78 | "count": { 79 | "title": "Count", 80 | "default": 1, 81 | "minimum": 1, 82 | "maximum": 100000, 83 | "type": "integer" 84 | }, 85 | "other_info": { 86 | "title": "Other Info", 87 | "type": "object" 88 | }, 89 | "payload": { 90 | "title": "Payload", 91 | "pattern": "^([0-9A-F]{2})+$", 92 | "type": "string" 93 | } 94 | }, 95 | "required": ["audit_label", "category", "level", "l4_protocol", "protocol"], 96 | "definitions": { 97 | "L4ProtocolEnum": { 98 | "title": "L4ProtocolEnum", 99 | "description": "An enumeration.", 100 | "enum": ["TCP", "UDP"], 101 | "type": "string" 102 | }, 103 | "ProtocolEnum": { 104 | "title": "ProtocolEnum", 105 | "description": "An enumeration.", 106 | "enum": ["S7COMM", "MODBUS"], 107 | "type": "string" 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /docs/about/code-of-conduct.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | nationality, personal appearance, race, religion, or sexual identity and 10 | orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at pawamoy@pm.me. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at [http://contributor-covenant.org/version/1/4][version] 72 | 73 | [homepage]: http://contributor-covenant.org 74 | [version]: http://contributor-covenant.org/version/1/4/ 75 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Application 2 | *.json 3 | !examples/**/*.json 4 | !.vscode/settings.json 5 | !jsf/tests/data/*.json 6 | TODO.md 7 | 8 | # MacOS 9 | # General 10 | .DS_Store 11 | .AppleDouble 12 | .LSOverride 13 | 14 | # Icon must end with two \r 15 | Icon 16 | 17 | # Thumbnails 18 | ._* 19 | 20 | # Files that might appear in the root of a volume 21 | .DocumentRevisions-V100 22 | .fseventsd 23 | .Spotlight-V100 24 | .TemporaryItems 25 | .Trashes 26 | .VolumeIcon.icns 27 | .com.apple.timemachine.donotpresent 28 | 29 | # Directories potentially created on remote AFP share 30 | .AppleDB 31 | .AppleDesktop 32 | Network Trash Folder 33 | Temporary Items 34 | .apdisk 35 | 36 | # Python 37 | 38 | # Byte-compiled / optimized / DLL files 39 | __pycache__/ 40 | *.py[cod] 41 | *$py.class 42 | 43 | # C extensions 44 | *.so 45 | 46 | # Distribution / packaging 47 | .Python 48 | build/ 49 | develop-eggs/ 50 | dist/ 51 | downloads/ 52 | eggs/ 53 | .eggs/ 54 | lib/ 55 | lib64/ 56 | parts/ 57 | sdist/ 58 | var/ 59 | wheels/ 60 | share/python-wheels/ 61 | *.egg-info/ 62 | .installed.cfg 63 | *.egg 64 | MANIFEST 65 | 66 | # PyInstaller 67 | # Usually these files are written by a python script from a template 68 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 69 | *.manifest 70 | *.spec 71 | 72 | # Installer logs 73 | pip-log.txt 74 | pip-delete-this-directory.txt 75 | 76 | # Unit test / coverage reports 77 | htmlcov/ 78 | .tox/ 79 | .nox/ 80 | .coverage 81 | .coverage.* 82 | .cache 83 | nosetests.xml 84 | coverage.xml 85 | *.cover 86 | *.py,cover 87 | .hypothesis/ 88 | .pytest_cache/ 89 | cover/ 90 | 91 | # Translations 92 | *.mo 93 | *.pot 94 | 95 | # Django stuff: 96 | *.log 97 | local_settings.py 98 | db.sqlite3 99 | db.sqlite3-journal 100 | 101 | # Flask stuff: 102 | instance/ 103 | .webassets-cache 104 | 105 | # Scrapy stuff: 106 | .scrapy 107 | 108 | # Sphinx documentation 109 | docs/_build/ 110 | 111 | # PyBuilder 112 | .pybuilder/ 113 | target/ 114 | 115 | # Jupyter Notebook 116 | .ipynb_checkpoints 117 | 118 | # IPython 119 | profile_default/ 120 | ipython_config.py 121 | 122 | # pyenv 123 | # For a library or package, you might want to ignore these files since the code is 124 | # intended to run in multiple environments; otherwise, check them in: 125 | # .python-version 126 | 127 | # pipenv 128 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 129 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 130 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 131 | # install all needed dependencies. 132 | #Pipfile.lock 133 | 134 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 135 | __pypackages__/ 136 | 137 | # Celery stuff 138 | celerybeat-schedule 139 | celerybeat.pid 140 | 141 | # SageMath parsed files 142 | *.sage.py 143 | 144 | # Environments 145 | .env 146 | .venv 147 | env/ 148 | venv/ 149 | ENV/ 150 | env.bak/ 151 | venv.bak/ 152 | 153 | # Spyder project settings 154 | .spyderproject 155 | .spyproject 156 | 157 | # Rope project settings 158 | .ropeproject 159 | 160 | # mkdocs documentation 161 | /site 162 | 163 | # mypy 164 | .mypy_cache/ 165 | .dmypy.json 166 | dmypy.json 167 | 168 | # Pyre type checker 169 | .pyre/ 170 | 171 | # pytype static type analyzer 172 | .pytype/ 173 | 174 | # Cython debug symbols 175 | cython_debug/ 176 | 177 | # pants 178 | .pids/ 179 | .pants.d/ 180 | 181 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: "jsf" 2 | site_description: "Creates fake JSON files from a JSON schema" 3 | site_url: "https://ghandic.github.io/jsf" 4 | repo_url: "https://github.com/ghandic/jsf" 5 | repo_name: "ghandic/jsf" 6 | 7 | nav: 8 | - JSF: index.md 9 | - Features: features.md 10 | - Learn: 11 | - JSON Schema Intro: intro-to-json-schema.md 12 | - Tutorial - User Guide: user-guide/first-steps.md 13 | - Advanced User Guide: user-guide/advanced.md 14 | - About: 15 | - About: about/about.md 16 | - Alternatives, Inspiration and Comparisons: about/alternatives.md 17 | - Code of Conduct: about/code-of-conduct.md 18 | - Help: 19 | - Help: help/index.md 20 | - Get Help: help/get-help.md 21 | - Help jsf - Contributing: help/contributing.md 22 | - Release Notes: https://github.com/ghandic/jsf/releases 23 | 24 | theme: 25 | name: material 26 | logo: assets/imgs/index.png 27 | favicon: assets/imgs/index.png 28 | features: 29 | - search.suggest 30 | - search.highlight 31 | - content.tabs.link 32 | - navigation.indexes 33 | - content.tooltips 34 | - navigation.path 35 | - content.code.annotate 36 | - content.code.copy 37 | - content.code.select 38 | - navigation.tabs 39 | palette: 40 | 41 | # Palette toggle for automatic mode 42 | - media: "(prefers-color-scheme)" 43 | scheme: slate 44 | primary: blue 45 | accent: yellow 46 | toggle: 47 | icon: material/lightbulb 48 | name: Switch to light mode 49 | 50 | # Palette toggle for light mode 51 | - media: '(prefers-color-scheme: light)' 52 | scheme: default 53 | primary: blue 54 | accent: yellow 55 | toggle: 56 | icon: material/lightbulb 57 | name: Switch to dark mode 58 | 59 | # Palette toggle for dark mode 60 | - media: '(prefers-color-scheme: dark)' 61 | scheme: slate 62 | primary: blue 63 | accent: yellow 64 | toggle: 65 | icon: material/lightbulb-outline 66 | name: Switch to system preference 67 | 68 | extra_css: 69 | - assets/css/styles.css 70 | 71 | markdown_extensions: 72 | - admonition 73 | - codehilite: 74 | guess_lang: false 75 | - pymdownx.highlight: 76 | anchor_linenums: true 77 | line_spans: __span 78 | pygments_lang_class: true 79 | - pymdownx.inlinehilite 80 | - pymdownx.snippets 81 | - pymdownx.superfences 82 | - pymdownx.emoji: 83 | emoji_index: !!python/name:material.extensions.emoji.twemoji 84 | emoji_generator: !!python/name:material.extensions.emoji.to_svg 85 | - pymdownx.tabbed 86 | - toc: 87 | permalink: "¤" 88 | - pymdownx.tasklist: 89 | custom_checkbox: true 90 | 91 | 92 | plugins: 93 | - search 94 | - mkdocstrings: 95 | default_handler: python 96 | handlers: 97 | python: 98 | rendering: 99 | show_root_heading: no 100 | show_if_no_docstring: no 101 | watch: 102 | - jsf 103 | - termynal: 104 | title: "shell" 105 | buttons: "macos" 106 | prompt_literal_start: 107 | - "$" 108 | - ">" 109 | - ">>>" 110 | 111 | extra: 112 | social: 113 | - icon: fontawesome/brands/github 114 | link: https://github.com/ghandic 115 | - icon: fontawesome/brands/linkedin 116 | link: https://www.linkedin.com/in/achallis 117 | - icon: fontawesome/solid/globe 118 | link: https://www.andrewchallis.co.uk 119 | - icon: fontawesome/brands/docker 120 | link: https://hub.docker.com/orgs/challisa -------------------------------------------------------------------------------- /.github/workflows/python-package.yaml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [main] 9 | pull_request: 10 | branches: [main] 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | python-version: ["3.8", "3.9", "3.10", "3.11"] 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - uses: actions/cache@v4 22 | id: cache 23 | with: 24 | path: | 25 | ~/.cache/pants/setup 26 | ~/.cache/pants/lmdb_store 27 | ~/.cache/pants/named_caches 28 | key: ${{ runner.os }}- 29 | - name: Set up Python ${{ matrix.python-version }} 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: ${{ matrix.python-version }} 33 | - uses: pantsbuild/actions/init-pants@v5-scie-pants 34 | # This action bootstraps pants and manages 2-3 GHA caches. 35 | # See: github.com/pantsbuild/actions/tree/main/init-pants/ 36 | with: 37 | # v0 makes it easy to bust the cache if needed 38 | # just increase the integer to start with a fresh cache 39 | gha-cache-key: v0 40 | # The Python backend uses named_caches for Pip/PEX state, 41 | # so it is appropriate to invalidate on lockfile changes. 42 | named-caches-hash: ${{ hashFiles('python-default.lock') }} 43 | # If you're not using a fine-grained remote caching service (see https://www.pantsbuild.org/docs/remote-caching), 44 | # then you may also want to preserve the local Pants cache (lmdb_store). However this must invalidate for 45 | # changes to any file that can affect the build, so may not be practical in larger repos. 46 | # A remote cache service integrates with Pants's fine-grained invalidation and avoids these problems. 47 | cache-lmdb-store: 'true' # defaults to 'false' 48 | # Note that named_caches and lmdb_store falls back to partial restore keys which 49 | # may give a useful partial result that will save time over completely clean state, 50 | # but will cause the cache entry to grow without bound over time. 51 | # See https://pants.readme.io/docs/using-pants-in-ci for tips on how to periodically clean it up. 52 | # Alternatively you change gha-cache-key to ignore old caches. 53 | - name: Check BUILD files 54 | run: | 55 | pants tailor --check update-build-files --check :: 56 | - name: Lint 57 | run: | 58 | make lint 59 | - name: Test 60 | run: | 61 | make test 62 | - name: Package 63 | run: | 64 | make build 65 | - name: Validate package 66 | run: | 67 | ls dist 68 | PACKAGE=`ls dist/jsf-*.tar.gz` 69 | # Validate the CLI works 70 | pip3 install $PACKAGE\[cli\] 71 | jsf --help 72 | # Validate the sdist tests work for conda 73 | tar -xvf $PACKAGE 74 | cd jsf-* 75 | pip install . 76 | PACKAGE_DIR=`pip show jsf | grep "Location" | sed 's/^.*: //'` 77 | cd $PACKAGE_DIR/jsf 78 | pip install pytest pyjwt 79 | pytest 80 | - name: Upload coverage 81 | uses: codecov/codecov-action@v4 82 | with: 83 | files: ./dist/coverage/python/coverage.xml 84 | - name: Upload pants log 85 | uses: actions/upload-artifact@v4 86 | with: 87 | name: pants-log-${{ matrix.os }}-${{ matrix.python-version }} 88 | path: .pants.d/pants.log 89 | if: always() # We want the log even on failures. -------------------------------------------------------------------------------- /jsf/tests/test_parser.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest # pants: no-infer-dep 4 | from jsf.parser import JSF 5 | 6 | from jsf.schema_types import ( 7 | Array, 8 | Boolean, 9 | Integer, 10 | JSFEnum, 11 | JSFTuple, 12 | Null, 13 | Number, 14 | Object, 15 | String, 16 | ) 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "filestem, expected_type", 21 | [ 22 | ("array", Array), 23 | ("boolean", Boolean), 24 | ("enum", JSFEnum), 25 | ("inner-ref", Object), 26 | ("integer", Integer), 27 | ("null", Null), 28 | ("number", Number), 29 | ("object", Object), 30 | ("string-enum", JSFEnum), 31 | ("string", String), 32 | ("tuple", JSFTuple), 33 | ], 34 | ) 35 | def test_types(TestData, filestem, expected_type): 36 | with open(TestData / f"{filestem}.json") as file: 37 | schema = json.load(file) 38 | p = JSF(schema) 39 | 40 | assert isinstance(p.root, expected_type) 41 | 42 | 43 | def test_nested_array(TestData): 44 | with open(TestData / "array.json") as file: 45 | schema = json.load(file) 46 | p = JSF(schema) 47 | 48 | assert isinstance(p.root, Array) 49 | assert hasattr(p.root, "items") 50 | assert isinstance(p.root.items, JSFEnum) 51 | 52 | 53 | def test_nested_tuple(TestData): 54 | with open(TestData / "tuple.json") as file: 55 | schema = json.load(file) 56 | p = JSF(schema) 57 | 58 | assert isinstance(p.root, JSFTuple) 59 | assert hasattr(p.root, "items") 60 | expected_types = [Number, String, String, String] 61 | assert [isinstance(item, expected_types[i]) for i, item in enumerate(p.root.items)] 62 | 63 | 64 | def test_nested_object(TestData): 65 | with open(TestData / "object.json") as file: 66 | schema = json.load(file) 67 | p = JSF(schema) 68 | 69 | assert isinstance(p.root, Object) 70 | assert hasattr(p.root, "properties") 71 | expected_types = { 72 | "name": String, 73 | "credit_card": Number, 74 | "test": Integer, 75 | "non_required": Integer, 76 | } 77 | assert {prop.name: type(prop) for prop in p.root.properties} == expected_types 78 | 79 | 80 | def test_nested_object_ref(TestData): 81 | with open(TestData / "inner-ref.json") as file: 82 | schema = json.load(file) 83 | p = JSF(schema) 84 | 85 | assert isinstance(p.root, Object) 86 | assert hasattr(p.root, "properties") 87 | expected_types = {"user": Object} 88 | assert {prop.name: type(prop) for prop in p.root.properties} == expected_types 89 | expected_types = { 90 | "birthday": String, 91 | "email": String, 92 | "name": String, 93 | "id": Integer, 94 | "uuid": String, 95 | } 96 | assert {prop.name: type(prop) for prop in p.root.properties[0].properties} == expected_types 97 | 98 | 99 | def test_ordered_refs_object(TestData): 100 | with open(TestData / "ordered-refs.json") as file: 101 | schema = json.load(file) 102 | p = JSF(schema) 103 | 104 | assert isinstance(p.root, Object) 105 | assert hasattr(p.root, "properties") 106 | expected_types = {"foobar": Object} 107 | assert {prop.name: type(prop) for prop in p.root.properties} == expected_types 108 | expected_types = { 109 | "bar": JSFEnum, 110 | } 111 | assert {prop.name: type(prop) for prop in p.root.properties[0].properties} == expected_types 112 | 113 | 114 | def test_unordered_refs_object(TestData): 115 | with open(TestData / "unordered-refs.json") as file: 116 | schema = json.load(file) 117 | p = JSF(schema) 118 | 119 | assert isinstance(p.root, Object) 120 | assert hasattr(p.root, "properties") 121 | expected_types = {"foobar": Object} 122 | assert {prop.name: type(prop) for prop in p.root.properties} == expected_types 123 | expected_types = { 124 | "bar": JSFEnum, 125 | } 126 | assert {prop.name: type(prop) for prop in p.root.properties[0].properties} == expected_types 127 | -------------------------------------------------------------------------------- /jsf/schema_types/base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import random 3 | import uuid 4 | from typing import Any, Dict, List, Optional, Tuple, Type, Union 5 | 6 | from pydantic import BaseModel, Field 7 | from typing_extensions import Self 8 | 9 | logger = logging.getLogger() 10 | 11 | 12 | class ProviderNotSetException(Exception): 13 | ... 14 | 15 | 16 | class BaseSchema(BaseModel): 17 | # The type keyword is fundamental to JSON Schema. It specifies the data type for a schema. 18 | type: Optional[Union[str, List[str]]] = None 19 | # The title and description keywords must be strings. A "title" will preferably be short, whereas a "description" will provide a more lengthy explanation about the purpose of the data described by the schema. 20 | title: Optional[str] = None 21 | description: Optional[str] = None 22 | # The default keyword specifies a default value for an item. JSON processing tools may use this information to provide a default value for a missing key/value pair, though many JSON schema validators simply ignore the default keyword. It should validate against the schema in which it resides, but that isn't required. 23 | default: Optional[Any] = None 24 | # The examples keyword is a place to provide an array of examples that validate against the schema. This isn't used for validation, but may help with explaining the effect and purpose of the schema to a reader. Each entry should validate against the schema in which is resides, but that isn't strictly required. There is no need to duplicate the default value in the examples array, since default will be treated as another example. 25 | examples: Optional[List[Any]] = None 26 | # The $schema keyword is used to declare that a JSON fragment is actually a piece of JSON Schema. It also declares which version of the JSON Schema standard that the schema was written against. 27 | schema_: Optional[str] = Field(None, alias="$schema") 28 | # The $comment keyword is strictly intended for adding comments to the JSON schema source. Its value must always be a string. Unlike the annotations title, description and examples, JSON schema implementations aren't allowed to attach any meaning or behavior to it whatsoever, and may even strip them at any time. Therefore, they are useful for leaving notes to future editors of a JSON schema, (which is quite likely your future self), but should not be used to communicate to users of the schema. 29 | comments: Optional[str] = Field(None, alias="$comments") 30 | 31 | # JSF Custom fields 32 | path: Optional[str] = None 33 | name: Optional[str] = None 34 | provider: Optional[str] = Field(None, alias="$provider") 35 | set_state: Optional[Dict[str, str]] = Field(None, alias="$state") 36 | is_nullable: bool = False 37 | is_recursive: bool = False 38 | allow_none_optionals: float = Field(0.5, ge=0.0, le=1.0) 39 | max_recursive_depth: int = 10 40 | 41 | @classmethod 42 | def from_dict(cls, d: Dict[str, Any]) -> Self: 43 | raise NotImplementedError # pragma: no cover 44 | 45 | def generate(self, context: Dict[str, Any]) -> Any: 46 | if self.is_recursive: 47 | context["state"]["__depth__"] += 1 48 | 49 | if self.set_state is not None: 50 | context["state"][self.path] = {k: eval(v, context)() for k, v in self.set_state.items()} 51 | 52 | if self.is_nullable and ( 53 | random.uniform(0, 1) < self.allow_none_optionals 54 | or context["state"]["__depth__"] > self.max_recursive_depth 55 | ): 56 | return None 57 | if self.provider is not None: 58 | return eval(self.provider, context)() 59 | 60 | if context.get("use_defaults", False) and self.default: 61 | return self.default 62 | 63 | if context.get("use_examples", False) and self.examples: 64 | return random.choice(self.examples) 65 | 66 | raise ProviderNotSetException() 67 | 68 | def model(self, context: Dict[str, Any]) -> Optional[Tuple[Type, Field]]: 69 | raise NotImplementedError # pragma: no cover 70 | 71 | def _get_unique_name(self, context: Dict[str, Any]) -> str: 72 | if context["__internal__"].get(self.name.capitalize()) is None: 73 | return self.name.capitalize() 74 | return self.name.capitalize() + str(uuid.uuid4().hex) 75 | 76 | def to_pydantic(self, context: Dict[str, Any], _type: Type) -> Tuple[Type, Field]: 77 | example = self.generate(context) 78 | if self.is_nullable: 79 | return ( 80 | Optional[_type], 81 | Field(..., description=self.description, example=example), 82 | ) 83 | return _type, Field(..., description=self.description, example=example) 84 | -------------------------------------------------------------------------------- /jsf/README.md: -------------------------------------------------------------------------------- 1 |

2 | jsf 3 |

4 | 5 |

6 | 7 | Coverage 8 | 9 | 10 | Docs 11 | 12 | 13 | PyPI Latest Release 14 | 15 |
16 | 17 | License 18 | 19 | 20 | Code style: black 21 | 22 |

23 | 24 | Use **jsf** along with fake data generators to provide consistent and meaningful fake data for your system. 25 | 26 | ## Main Features 27 | 28 | - Provides out of the box data generation from any JSON schema 📦 29 | - Extendable custom data providers using any lambda functions 🔗 30 | - Multi level state for dependant data (eg multiple objects sharing value, such as children with same surname) 🤓 31 | - Inbuilt validation of fake JSON produced ✅ 32 | - In memory conversion from JSON Schema to Pydantic Models with generated examples 🤯 33 | - Seamless integration with [FastAPI](https://fastapi.tiangolo.com/) 🚀 34 | 35 | ## Installation 36 | 37 |
38 | 39 | ```console 40 | $ pip install jsf 41 | 42 | ---> 100% 43 | ``` 44 | 45 |
46 | 47 | ## Usage 48 | 49 | ### Basic 😊 50 | 51 | ```python 52 | from jsf import JSF 53 | 54 | faker = JSF( 55 | { 56 | "type": "object", 57 | "properties": { 58 | "name": {"type": "string", "$provider": "faker.name"}, 59 | "email": {"type": "string", "$provider": "faker.email"}, 60 | }, 61 | "required": ["name", "email"], 62 | } 63 | ) 64 | 65 | fake_json = faker.generate() 66 | ``` 67 | 68 | Results in ... 69 | 70 | ```python 71 | { 72 | 'name': 'Jesse Phillips', 73 | 'email': 'xroberson@hotmail.com' 74 | } 75 | ``` 76 | 77 | ### From JSON file 📁 78 | 79 | ```python 80 | from jsf import JSF 81 | 82 | faker = JSF.from_json("demo-schema.json") 83 | fake_json = faker.generate() 84 | ``` 85 | 86 |
87 | Or run straight from the commandline... 88 | 89 | #### Native install 90 | 91 | ```bash 92 | pip install jsf[cli] 93 | jsf --schema jsf/tests/data/custom.json --instance wow.json 94 | ``` 95 | 96 | #### Docker 97 | 98 | ```bash 99 | docker run -v $PWD:/data challisa/jsf jsf --schema /data/custom.json --instance /data/example.json 100 | ``` 101 | 102 |
103 | 104 | ### FastAPI Integration 🚀 105 | 106 | Create a file main.py with: 107 | 108 | ```python 109 | from jsf import JSF 110 | from fastapi import FastAPI 111 | 112 | app = FastAPI(docs_url="/") 113 | generator = JSF.from_json("custom.json") 114 | 115 | 116 | @app.get("/generate", response_model=generator.pydantic()) 117 | def read_root(): 118 | return generator.generate() 119 | 120 | ``` 121 | 122 | Run the server with: 123 | 124 |
125 | 126 | ```console 127 | $ uvicorn main:app --reload 128 | 129 | INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) 130 | INFO: Started reloader process [28720] 131 | INFO: Started server process [28722] 132 | INFO: Waiting for application startup. 133 | INFO: Application startup complete. 134 | ``` 135 | 136 | Navigate to [http://127.0.0.1:8000](http://127.0.0.1:8000) and check out your endpoint. Notice the following are all automatically created: 137 | 138 | - Schema with descriptions and examples 139 | - Example response 140 | - Data generation by clicking "try it out" 141 | 142 | ![Example Swagger UI - Page 1](docs/assets/imgs/ui-1.png) 143 | ![Example Swagger UI - Page 2](docs/assets/imgs/ui-2.png) 144 | ![Example Swagger UI - Page 3](docs/assets/imgs/ui-3.png) 145 | ![Example Swagger UI - Page 4](docs/assets/imgs/ui-4.png) 146 | 147 |
148 | 149 | ### Partially supported features 150 | 151 | - string `contentMediaType` - only a subset of these are supported, however they can be expanded within [this file](jsf/schema_types/string_utils/content_type/__init__.py) 152 | 153 | ## Credits 154 | 155 | - This repository is a Python port of [json-schema-faker](https://github.com/json-schema-faker/json-schema-faker) with some minor differences in implementation. 156 | 157 | ## License 158 | 159 | - [MIT License](/LICENSE) 160 | -------------------------------------------------------------------------------- /jsf/schema_types/string.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import random 3 | import re 4 | from datetime import timezone 5 | from typing import Any, Callable, Dict, Optional, Tuple, Type 6 | 7 | import rstr 8 | from faker import Faker 9 | 10 | from jsf.schema_types.base import BaseSchema, ProviderNotSetException 11 | from jsf.schema_types.string_utils import content_encoding, content_type 12 | from jsf.schema_types.string_utils.content_type.text__plain import random_fixed_length_sentence 13 | 14 | logger = logging.getLogger() 15 | faker = Faker() 16 | 17 | FRAGMENT = "[a-zA-Z][a-zA-Z0-9+-.]*" 18 | URI_PATTERN = f"https?://{{hostname}}(?:{FRAGMENT})+" 19 | PARAM_PATTERN = "(?:\\?([a-z]{1,7}(=\\w{1,5})?&){0,3})?" 20 | 21 | 22 | def temporal_duration( 23 | positive: bool = True, 24 | years: int = 0, 25 | months: int = 0, 26 | weeks: int = 0, 27 | days: int = 0, 28 | hours: int = 0, 29 | minutes: int = 0, 30 | seconds: int = 0, 31 | milliseconds: int = 0, 32 | microseconds: int = 0, 33 | nanoseconds: int = 0, 34 | ) -> str: 35 | duration = "P" 36 | # dur-date 37 | if years != 0: 38 | duration = f"{duration}{years}Y" 39 | if months != 0: 40 | duration = f"{duration}{months}M" 41 | if weeks != 0: 42 | duration = f"{duration}{weeks}W" 43 | if days != 0: 44 | duration = f"{duration}{days}D" 45 | 46 | # dur-time 47 | if hours + minutes + seconds + milliseconds + microseconds + nanoseconds != 0: 48 | duration = f"{duration}T" 49 | if hours != 0: 50 | duration = f"{duration}{hours}H" 51 | if minutes != 0: 52 | duration = f"{duration}{minutes}M" 53 | if seconds + milliseconds + microseconds + nanoseconds != 0: 54 | remainder = "".join(str(part) for part in [milliseconds, microseconds, nanoseconds] if part) 55 | if remainder: 56 | seconds = f"{seconds}.{remainder}" 57 | duration = f"{duration}{seconds}S" 58 | 59 | # Case where there was no duration specified, still need to output valid format string 60 | if duration == "P": 61 | duration = "PT0S" 62 | 63 | # direction 64 | if not positive: 65 | duration = f"-{duration}" 66 | 67 | return duration 68 | 69 | 70 | def mostly_zero_randint(_min: int, _max: int) -> int: 71 | return 0 if random.random() > 0.8 else random.randint(int(_min), int(_max)) 72 | 73 | 74 | def fake_duration() -> str: 75 | generic_max = 1000 76 | return temporal_duration( 77 | positive=random.random() > 0.5, 78 | years=mostly_zero_randint(0, generic_max), 79 | months=mostly_zero_randint(0, generic_max), 80 | weeks=mostly_zero_randint(0, generic_max), 81 | days=mostly_zero_randint(0, generic_max), 82 | hours=mostly_zero_randint(0, generic_max), 83 | minutes=mostly_zero_randint(0, generic_max), 84 | seconds=mostly_zero_randint(0, generic_max), 85 | milliseconds=mostly_zero_randint(0, 999), 86 | microseconds=mostly_zero_randint(0, 999), 87 | nanoseconds=mostly_zero_randint(0, 999), 88 | ) 89 | 90 | 91 | format_map: Dict[str, Callable] = { 92 | "date-time": lambda: faker.date_time(timezone.utc).isoformat(), 93 | "time": lambda: faker.date_time(timezone.utc).isoformat().split("T")[1], 94 | "date": lambda: faker.date_time(timezone.utc).isoformat().split("T")[0], 95 | "duration": fake_duration, 96 | "email": faker.email, 97 | "idn-email": faker.email, 98 | "hostname": faker.hostname, 99 | "idn-hostname": faker.hostname, 100 | "ipv4": faker.ipv4, 101 | "ipv6": faker.ipv6, 102 | "uri": faker.uri, 103 | "uri-reference": lambda: faker.uri() + rstr.xeger(PARAM_PATTERN), 104 | "iri": faker.uri, 105 | "iri-reference": lambda: faker.uri() + rstr.xeger(PARAM_PATTERN), 106 | "uri-template": lambda: rstr.xeger( 107 | URI_PATTERN.format(hostname=re.escape(faker.hostname())).replace( 108 | "(?:", "(?:/\\{[a-z][:a-zA-Z0-9-]*\\}|" 109 | ) 110 | ), 111 | "json-pointer": lambda: rstr.xeger(f"(/(?:${FRAGMENT.replace(']*', '/]*')}|~[01]))+"), 112 | "relative-json-pointer": lambda: rstr.xeger( 113 | f"(/(?:${FRAGMENT.replace(']*', '/]*')}|~[01]))+" 114 | ), # NOTE: Would need access to whole root object to mock properly 115 | "uuid": faker.uuid4, 116 | } 117 | 118 | 119 | class String(BaseSchema): 120 | minLength: Optional[int] = 0 121 | maxLength: Optional[int] = 50 122 | pattern: Optional[str] = None 123 | format: Optional[str] = None 124 | # enum: Optional[List[Union[str, int, float]]] = None # NOTE: Not used - enums go to enum class 125 | contentMediaType: Optional[str] = None 126 | contentEncoding: Optional[content_encoding.ContentEncoding] = None 127 | # contentSchema # Doesnt help with generation 128 | 129 | def generate(self, context: Dict[str, Any]) -> Optional[str]: 130 | try: 131 | s = super().generate(context) 132 | return str(content_encoding.encode(s, self.contentEncoding)) if s else s 133 | except ProviderNotSetException: 134 | format_map["regex"] = lambda: rstr.xeger(self.pattern) 135 | format_map["relative-json-pointer"] = lambda: random.choice( 136 | context["state"]["__all_json_paths__"] 137 | ) 138 | if format_map.get(self.format) is not None: 139 | return content_encoding.encode(format_map[self.format](), self.contentEncoding) 140 | if self.pattern is not None: 141 | return content_encoding.encode(rstr.xeger(self.pattern), self.contentEncoding) 142 | if self.contentMediaType is not None: 143 | return content_encoding.encode( 144 | content_type.generate(self.contentMediaType, self.minLength, self.maxLength), 145 | self.contentEncoding, 146 | ) 147 | return content_encoding.encode( 148 | random_fixed_length_sentence(self.minLength, self.maxLength), self.contentEncoding 149 | ) 150 | 151 | def model(self, context: Dict[str, Any]) -> Tuple[Type, Any]: 152 | return self.to_pydantic(context, str) 153 | 154 | @classmethod 155 | def from_dict(cls, d: Dict[str, Any]) -> "String": 156 | return String(**d) 157 | -------------------------------------------------------------------------------- /jsf/tests/data/custom.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json-schema.org/draft/2020-12/schema", 3 | "$state": { 4 | "surname": "faker.last_name", 5 | "dateOfBirth": "lambda: faker.date_of_birth(maximum_age=1).strftime('%d/%m/%Y')" 6 | }, 7 | "definitions": { 8 | "address": { 9 | "type": "object", 10 | "properties": { 11 | "suburb": { 12 | "type": "string", 13 | "description": "The address suburb (Australia Only), eg: Watson", 14 | "$provider": "faker.city" 15 | }, 16 | "postcode": { 17 | "type": "string", 18 | "description": "The address postcode (Australia Only), eg: 2602", 19 | "$provider": "faker.postcode" 20 | }, 21 | "street1": { 22 | "type": "string", 23 | "description": "The address street name line 1 (Australia Only), eg: 49 Aspinall St", 24 | "$provider": "faker.street_address" 25 | }, 26 | "street2": { 27 | "type": "string", 28 | "description": "The address street name line 2 (Australia Only), eg: Suite 1", 29 | "$provider": "lambda: ''" 30 | } 31 | }, 32 | "required": ["suburb", "postcode", "street1", "street2"] 33 | } 34 | }, 35 | "type": "object", 36 | "properties": { 37 | "parent": { 38 | "type": "object", 39 | "properties": { 40 | "surname": { 41 | "type": ["string", "null"], 42 | "description": "The mother's surname, eg: Mcdermott", 43 | "$provider": "lambda: state['#']['surname']" 44 | }, 45 | "givenNames": { 46 | "type": "string", 47 | "description": "The mother's given names, eg: Sarah Lousie", 48 | "$provider": "faker.first_name_female" 49 | }, 50 | "mailAddress": { 51 | "$ref": "#/definitions/address" 52 | }, 53 | "residentialAddress": { 54 | "$ref": "#/definitions/address" 55 | }, 56 | "mobile": { 57 | "type": "string", 58 | "description": "The mother's mobile phone number, eg: 0400182545", 59 | "$provider": "faker.phone_number" 60 | }, 61 | "homePhone": { 62 | "type": "string", 63 | "description": "The mother's home phone number, eg: 0245458450", 64 | "$provider": "faker.phone_number" 65 | }, 66 | "email": { 67 | "type": "string", 68 | "description": "The mother's email address, eg: jesse6565656565@gmail.com", 69 | "$provider": "faker.ascii_email" 70 | }, 71 | "hospital": { 72 | "type": "string", 73 | "description": "The hospital where the birth took place, eg: ACTCC", 74 | "$provider": "lambda: random.choice(['ACTCC'])" 75 | }, 76 | "dateReceived": { 77 | "type": "string", 78 | "description": "The date the birth event was received, eg: 17/03/2021", 79 | "$provider": "lambda: datetime.now().strftime('%d/%m/%Y')" 80 | }, 81 | "personId": { 82 | "type": "string", 83 | "description": "The mother's personId, eg: 123456789", 84 | "$provider": "lambda: random.choice(['', faker.ssn()])" 85 | } 86 | }, 87 | "required": [ 88 | "surname", 89 | "givenNames", 90 | "mailAddress", 91 | "residentialAddress", 92 | "mobile", 93 | "homePhone", 94 | "email", 95 | "hospital", 96 | "dateReceived", 97 | "personId" 98 | ] 99 | }, 100 | "newborn": { 101 | "type": "array", 102 | "items": { 103 | "type": "object", 104 | "properties": { 105 | "surname": { 106 | "type": "string", 107 | "description": "The newborn's surname, eg: Reid", 108 | "$provider": "lambda: state['#']['surname']" 109 | }, 110 | "givenNames": { 111 | "type": "string", 112 | "description": "The newborn's given names, eg: Mathew David", 113 | "$provider": "faker.first_name" 114 | }, 115 | "sex": { 116 | "type": "string", 117 | "description": "The newborn's sex, eg: M, F or U", 118 | "$provider": "lambda: random.choice(['M', 'F', 'U'])" 119 | }, 120 | "dateOfBirth": { 121 | "type": "string", 122 | "description": "The newborn's date of birth, eg: 17/03/2021", 123 | "$provider": "lambda: state['#']['dateOfBirth']" 124 | }, 125 | "birthOrder": { 126 | "type": "string", 127 | "description": "The newborn's birth order, eg: 1", 128 | "$provider": "lambda: next(state['__counter__'])" 129 | }, 130 | "indigenousStatus": { 131 | "type": "string", 132 | "description": "The newborn's indigenous status, eg: 14", 133 | "$provider": "lambda: str(random.randint(1, 15))" 134 | }, 135 | "uniqueId": { 136 | "type": "string", 137 | "description": "The newborn's unique birth event id, eg: 20474417", 138 | "$provider": "lambda: str(random.randint(100000, 999999))" 139 | } 140 | }, 141 | "required": [ 142 | "surname", 143 | "givenNames", 144 | "sex", 145 | "dateOfBirth", 146 | "birthOrder", 147 | "indigenousStatus", 148 | "uniqueId" 149 | ] 150 | } 151 | } 152 | }, 153 | "required": ["parent", "newborn"] 154 | } 155 | -------------------------------------------------------------------------------- /jsf/parser.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import random 4 | from collections import ChainMap 5 | from copy import deepcopy 6 | from datetime import datetime 7 | from itertools import count 8 | from pathlib import Path 9 | from types import MappingProxyType 10 | from typing import Any, Dict, List, Optional, Tuple, Union 11 | 12 | from faker import Faker 13 | from jsonschema import validate 14 | from pydantic import confloat 15 | from smart_open import open as s_open 16 | 17 | from jsf.schema_types import ( 18 | AllOf, 19 | AllTypes, 20 | AnyOf, 21 | Array, 22 | JSFEnum, 23 | JSFTuple, 24 | Object, 25 | OneOf, 26 | Primitives, 27 | PrimitiveTypes, 28 | ) 29 | 30 | logger = logging.getLogger() 31 | faker = Faker() 32 | 33 | 34 | class JSF: 35 | """The JSF class generates fake data based on a provided JSON Schema. 36 | 37 | Attributes: 38 | root_schema (Dict[str, Any]): The JSON schema based on which the fake data is generated. 39 | definitions (Dict): A dictionary to store definitions used in the JSON schema. 40 | base_state (Dict[str, Any]): A dictionary that represents the state of the parser. It includes a counter, a list of all JSON paths, and the provided initial state. 41 | """ 42 | 43 | def __init__( 44 | self, 45 | schema: Dict[str, Any], 46 | context: Dict[str, Any] = MappingProxyType( 47 | { 48 | "faker": faker, 49 | "random": random, 50 | "datetime": datetime, 51 | "__internal__": {"List": List, "Union": Union, "Tuple": Tuple}, 52 | } 53 | ), 54 | initial_state: Dict[str, Any] = MappingProxyType({}), 55 | allow_none_optionals: confloat(ge=0.0, le=1.0) = 0.5, 56 | max_recursive_depth: int = 10, 57 | ): 58 | """Initializes the JSF generator with the provided schema and 59 | configuration options. 60 | 61 | Args: 62 | schema (Dict[str, Any]): The JSON schema based on which the fake data is generated. 63 | context (Dict[str, Any], optional): A dictionary that provides additional utilities for handling the schema, such as a faker for generating fake data, a random number generator, and datetime utilities. It also includes an internal dictionary for handling List, Union, and Tuple types. Defaults to a dictionary with "faker", "random", "datetime", and "__internal__" keys. 64 | initial_state (Dict[str, Any], optional): A dictionary that represents the initial state of the parser. If you wish to extend the state so it can be accesses by your schema you can add any references in here. Defaults to an empty dictionary. 65 | allow_none_optionals (confloat, optional): A parameter that determines the probability of optional fields being set to None. Defaults to 0.5. 66 | max_recursive_depth (int, optional): A parameter that determines the maximum depth when generating a recursive schema. Defaults to 10. 67 | """ 68 | self.root_schema = schema 69 | self.definitions = {} 70 | self.base_state = { 71 | "__counter__": count(start=1), 72 | "__all_json_paths__": [], 73 | "__depth__": 0, 74 | **initial_state, 75 | } 76 | self.base_context = context 77 | self.allow_none_optionals = allow_none_optionals 78 | self.max_recursive_depth = max_recursive_depth 79 | 80 | self.root = None 81 | self._parse(schema) 82 | 83 | @staticmethod 84 | def from_json( 85 | path: Path, 86 | context: Dict[str, Any] = MappingProxyType( 87 | { 88 | "faker": faker, 89 | "random": random, 90 | "datetime": datetime, 91 | "__internal__": {"List": List, "Union": Union, "Tuple": Tuple}, 92 | } 93 | ), 94 | initial_state: Dict[str, Any] = MappingProxyType({}), 95 | allow_none_optionals: confloat(ge=0.0, le=1.0) = 0.5, 96 | max_recursive_depth: int = 10, 97 | ) -> "JSF": 98 | """Initializes the JSF generator with the provided schema at the given 99 | path and configuration options. 100 | 101 | Args: 102 | path (Path): The path to the JSON schema based on which the fake data is generated. 103 | context (Dict[str, Any], optional): A dictionary that provides additional utilities for handling the schema, such as a faker for generating fake data, a random number generator, and datetime utilities. It also includes an internal dictionary for handling List, Union, and Tuple types. Defaults to a dictionary with "faker", "random", "datetime", and "__internal__" keys. 104 | initial_state (Dict[str, Any], optional): A dictionary that represents the initial state of the parser. If you wish to extend the state so it can be accesses by your schema you can add any references in here. Defaults to an empty dictionary. 105 | allow_none_optionals (confloat, optional): A parameter that determines the probability of optional fields being set to None. Defaults to 0.5. 106 | max_recursive_depth (int, optional): A parameter that determines the maximum depth when generating a recursive schema. Defaults to 10. 107 | """ 108 | with open(path) as f: 109 | return JSF( 110 | json.load(f), context, initial_state, allow_none_optionals, max_recursive_depth 111 | ) 112 | 113 | def __parse_primitive(self, name: str, path: str, schema: Dict[str, Any]) -> PrimitiveTypes: 114 | item_type, is_nullable = self.__is_field_nullable(schema) 115 | cls = Primitives.get(item_type) 116 | return cls.from_dict( 117 | { 118 | "name": name, 119 | "path": path, 120 | "is_nullable": is_nullable, 121 | "allow_none_optionals": self.allow_none_optionals, 122 | "max_recursive_depth": self.max_recursive_depth, 123 | **schema, 124 | } 125 | ) 126 | 127 | def __parse_object( 128 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 129 | ) -> Object: 130 | _, is_nullable = self.__is_field_nullable(schema) 131 | model = Object.from_dict( 132 | { 133 | "name": name, 134 | "path": path, 135 | "is_nullable": is_nullable, 136 | "allow_none_optionals": self.allow_none_optionals, 137 | "max_recursive_depth": self.max_recursive_depth, 138 | **schema, 139 | } 140 | ) 141 | root = model if root is None else root 142 | props = [] 143 | for _name, definition in schema.get("properties", {}).items(): 144 | props.append( 145 | self.__parse_definition(_name, path=f"{path}/{_name}", schema=definition, root=root) 146 | ) 147 | model.properties = props 148 | pattern_props = [] 149 | for _name, definition in schema.get("patternProperties", {}).items(): 150 | pattern_props.append( 151 | self.__parse_definition(_name, path=f"{path}/{_name}", schema=definition, root=root) 152 | ) 153 | model.patternProperties = pattern_props 154 | 155 | return model 156 | 157 | def __parse_array( 158 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 159 | ) -> Array: 160 | _, is_nullable = self.__is_field_nullable(schema) 161 | arr = Array.from_dict( 162 | { 163 | "name": name, 164 | "path": path, 165 | "is_nullable": is_nullable, 166 | "allow_none_optionals": self.allow_none_optionals, 167 | "max_recursive_depth": self.max_recursive_depth, 168 | **schema, 169 | } 170 | ) 171 | root = arr if root is None else root 172 | arr.items = self.__parse_definition(name, f"{path}/items", schema["items"], root=root) 173 | return arr 174 | 175 | def __parse_tuple( 176 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 177 | ) -> JSFTuple: 178 | _, is_nullable = self.__is_field_nullable(schema) 179 | arr = JSFTuple.from_dict( 180 | { 181 | "name": name, 182 | "path": path, 183 | "is_nullable": is_nullable, 184 | "allow_none_optionals": self.allow_none_optionals, 185 | "max_recursive_depth": self.max_recursive_depth, 186 | **schema, 187 | } 188 | ) 189 | root = arr if root is None else root 190 | arr.items = [] 191 | for i, item in enumerate(schema["items"]): 192 | arr.items.append( 193 | self.__parse_definition(name, path=f"{path}/{name}[{i}]", schema=item, root=root) 194 | ) 195 | return arr 196 | 197 | def __is_field_nullable(self, schema: Dict[str, Any]) -> Tuple[str, bool]: 198 | item_type = schema.get("type") 199 | if isinstance(item_type, list): 200 | if "null" in item_type and len(set(item_type)) >= 2: 201 | item_type_deep_copy = deepcopy(item_type) 202 | item_type_deep_copy.remove("null") 203 | return random.choice(item_type_deep_copy), True 204 | if len(set(item_type)) >= 1: 205 | item_type_deep_copy = deepcopy(item_type) 206 | return random.choice(item_type_deep_copy), False 207 | return item_type, False 208 | 209 | def __parse_anyOf( 210 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 211 | ) -> AnyOf: 212 | model = AnyOf(name=name, path=path, max_recursive_depth=self.max_recursive_depth, **schema) 213 | root = model if root is None else root 214 | schemas = [] 215 | for d in schema["anyOf"]: 216 | schemas.append(self.__parse_definition(name, path, d, root=root)) 217 | model.schemas = schemas 218 | return model 219 | 220 | def __parse_allOf( 221 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 222 | ) -> AllOf: 223 | combined_schema = dict(ChainMap(*schema["allOf"])) 224 | model = AllOf(name=name, path=path, max_recursive_depth=self.max_recursive_depth, **schema) 225 | root = model if root is None else root 226 | model.combined_schema = self.__parse_definition(name, path, combined_schema, root=root) 227 | return model 228 | 229 | def __parse_oneOf( 230 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 231 | ) -> OneOf: 232 | model = OneOf(name=name, path=path, max_recursive_depth=self.max_recursive_depth, **schema) 233 | root = model if root is None else root 234 | schemas = [] 235 | for d in schema["oneOf"]: 236 | schemas.append(self.__parse_definition(name, path, d, root=root)) 237 | model.schemas = schemas 238 | return model 239 | 240 | def __parse_named_definition(self, path: str, def_name: str, root) -> AllTypes: 241 | schema = self.root_schema 242 | parsed_definition = None 243 | for def_tag in ("definitions", "$defs"): 244 | if path.startswith(f"#/{def_tag}/{def_name}"): 245 | root.is_recursive = True 246 | return root 247 | definition = schema.get(def_tag, {}).get(def_name) 248 | if definition is not None: 249 | parsed_definition = self.__parse_definition( 250 | def_name, path=f"{path}/#/{def_tag}/{def_name}", schema=definition, root=root 251 | ) 252 | self.definitions[f"#/{def_tag}/{def_name}"] = parsed_definition 253 | return parsed_definition 254 | 255 | def __parse_definition( 256 | self, name: str, path: str, schema: Dict[str, Any], root: Optional[AllTypes] = None 257 | ) -> AllTypes: 258 | self.base_state["__all_json_paths__"].append(path) 259 | item_type, is_nullable = self.__is_field_nullable(schema) 260 | if "const" in schema: 261 | schema["enum"] = [schema["const"]] 262 | 263 | if "enum" in schema: 264 | enum_list = schema["enum"] 265 | assert len(enum_list) > 0, "Enum List is Empty" 266 | assert all( 267 | isinstance(item, (int, float, str, dict, type(None))) for item in enum_list 268 | ), "Enum Type is not null, int, float, string or dict" 269 | return JSFEnum.from_dict( 270 | { 271 | "name": name, 272 | "path": path, 273 | "is_nullable": is_nullable, 274 | "allow_none_optionals": self.allow_none_optionals, 275 | "max_recursive_depth": self.max_recursive_depth, 276 | **schema, 277 | } 278 | ) 279 | elif "type" in schema: 280 | if item_type == "object" and "properties" in schema: 281 | return self.__parse_object(name, path, schema, root) 282 | elif item_type == "object" and "anyOf" in schema: 283 | return self.__parse_anyOf(name, path, schema, root) 284 | elif item_type == "object" and "allOf" in schema: 285 | return self.__parse_allOf(name, path, schema, root) 286 | elif item_type == "object" and "oneOf" in schema: 287 | return self.__parse_oneOf(name, path, schema, root) 288 | elif item_type == "array": 289 | if (schema.get("contains") is not None) or isinstance(schema.get("items"), dict): 290 | return self.__parse_array(name, path, schema, root) 291 | if isinstance(schema.get("items"), list) and all( 292 | isinstance(x, dict) for x in schema.get("items", []) 293 | ): 294 | return self.__parse_tuple(name, path, schema, root) 295 | else: 296 | return self.__parse_primitive(name, path, schema) 297 | elif "$ref" in schema: 298 | ext, frag = schema["$ref"].split("#") 299 | if ext == "": 300 | if f"#{frag}" in self.definitions: 301 | cls = deepcopy(self.definitions.get(f"#{frag}")) 302 | else: 303 | # parse referenced definition 304 | ref_name = frag.split("/")[-1] 305 | cls = self.__parse_named_definition(path, ref_name, root) 306 | else: 307 | with s_open(ext, "r") as f: 308 | external_jsf = JSF(json.load(f)) 309 | cls = deepcopy(external_jsf.definitions.get(f"#{frag}")) 310 | if path != "#" and cls == root: 311 | cls.name = name 312 | elif path != "#": 313 | cls.name = name 314 | cls.path = path 315 | return cls 316 | elif "anyOf" in schema: 317 | return self.__parse_anyOf(name, path, schema, root) 318 | elif "allOf" in schema: 319 | return self.__parse_allOf(name, path, schema, root) 320 | elif "oneOf" in schema: 321 | return self.__parse_oneOf(name, path, schema, root) 322 | elif not any(key in schema for key in ["not", "if", "then", "else"]): 323 | return self.__parse_primitive(name, path, {**schema, "type": list(Primitives.keys())}) 324 | else: 325 | raise ValueError(f"Cannot parse schema {repr(schema)}") # pragma: no cover 326 | 327 | def _parse(self, schema: Dict[str, Any]) -> AllTypes: 328 | for def_tag in ("definitions", "$defs"): 329 | for name, definition in schema.get(def_tag, {}).items(): 330 | if f"#/{def_tag}/{name}" not in self.definitions: 331 | item = self.__parse_definition( 332 | name, path=f"#/{def_tag}/{name}", schema=definition 333 | ) 334 | self.definitions[f"#/{def_tag}/{name}"] = item 335 | 336 | self.root = self.__parse_definition(name="root", path="#", schema=schema) 337 | 338 | @property 339 | def context(self): 340 | return {**self.base_context, "state": deepcopy(self.base_state)} 341 | 342 | def generate( 343 | self, n: Optional[int] = None, *, use_defaults: bool = False, use_examples: bool = False 344 | ) -> Any: 345 | """Generates a fake object from the provided schema, and returns the 346 | output. 347 | 348 | Args: 349 | n (int, optional): If n is provided, it returns a list of n objects. If n is 1 then it returns a single object. 350 | use_defaults (bool, optional): prefer the default value as defined in the schema over a randomly generated object. Defaults to False. 351 | use_examples (bool, optional): prefer an example as defined in the schema over a randomly generated object. This parameter is preceded by the `use_defaults` parameter if set. Defaults to False. 352 | """ 353 | context = {**self.context, "use_defaults": use_defaults, "use_examples": use_examples} 354 | if n is None or n == 1: 355 | return self.root.generate(context=context) 356 | return [self.root.generate(context=context) for _ in range(n)] 357 | 358 | def pydantic(self): 359 | """Generates a fake object from the provided schema and provides the 360 | output as a Pydantic model.""" 361 | return self.root.model(context=self.context)[0] 362 | 363 | def generate_and_validate(self) -> None: 364 | """Generates a fake object from the provided schema and performs 365 | validation on the result.""" 366 | fake = self.root.generate(context=self.context) 367 | validate(instance=fake, schema=self.root_schema) 368 | 369 | def to_json(self, path: Path, **kwargs) -> None: 370 | """Generates a fake object from the provided schema and saves the 371 | output to the given path.""" 372 | with open(path, "w") as f: 373 | json.dump(self.generate(), f, **kwargs) 374 | -------------------------------------------------------------------------------- /jsf/tests/test_default_fake.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from typing import Optional 4 | 5 | import jwt # pants: no-infer-dep 6 | from jsf.parser import JSF 7 | 8 | 9 | def test_fake_object_no_properties(TestData): 10 | with open(TestData / "object_no_properties.json") as file: 11 | schema = json.load(file) 12 | p = JSF(schema) 13 | 14 | [p.generate() for _ in range(10)] # Just validating no errors 15 | 16 | 17 | def test_fake_anyof(TestData): 18 | with open(TestData / "anyof.json") as file: 19 | schema = json.load(file) 20 | p = JSF(schema) 21 | 22 | fake_data = [p.generate() for _ in range(10)] 23 | for d in fake_data: 24 | assert isinstance(d, str) or isinstance(d, float) 25 | 26 | 27 | def test_fake_allof(TestData): 28 | with open(TestData / "allof.json") as file: 29 | schema = json.load(file) 30 | p = JSF(schema) 31 | 32 | fake_data = [p.generate() for _ in range(10)] 33 | for d in fake_data: 34 | assert isinstance(d, str) and len(d) <= 5 35 | 36 | 37 | def test_fake_allof_complex(TestData): 38 | with open(TestData / "allof-complex.json") as file: 39 | schema = json.load(file) 40 | p = JSF(schema) 41 | 42 | fake_data = [p.generate() for _ in range(10)] 43 | for d in fake_data: 44 | assert isinstance(d, dict) 45 | assert set(d.keys()) == {"prometheus"} 46 | assert set(d["prometheus"].keys()) == {"port", "path"} 47 | assert isinstance(d["prometheus"]["port"], int) 48 | assert isinstance(d["prometheus"]["path"], str) 49 | 50 | 51 | def test_fake_anyof_object(TestData): 52 | with open(TestData / "anyof_object.json") as file: 53 | schema = json.load(file) 54 | p = JSF(schema) 55 | 56 | fake_data = [p.generate() for _ in range(10)] 57 | for d in fake_data: 58 | assert isinstance(d, dict) 59 | assert ("name" in d["ob"]) or ("id" in d["ob"]) 60 | 61 | 62 | def test_fake_oneof(TestData): 63 | with open(TestData / "oneof.json") as file: 64 | schema = json.load(file) 65 | p = JSF(schema) 66 | 67 | fake_data = [p.generate() for _ in range(10)] 68 | for d in fake_data: 69 | assert isinstance(d, bool) or isinstance(d, str) 70 | 71 | 72 | def test_fake_oneof_allof(TestData): 73 | with open(TestData / "oneof_allof.json") as file: 74 | schema = json.load(file) 75 | p = JSF(schema) 76 | 77 | fake_data = [p.generate() for _ in range(10)] 78 | for d in fake_data: 79 | assert isinstance(d, bool) or (isinstance(d, str) and len(d) <= 5) 80 | 81 | 82 | def test_fake_oneof_object(TestData): 83 | with open(TestData / "oneof_object.json") as file: 84 | schema = json.load(file) 85 | p = JSF(schema) 86 | 87 | fake_data = [p.generate() for _ in range(10)] 88 | for d in fake_data: 89 | assert isinstance(d, dict) 90 | assert ("name" in d["ob"]) or ("id" in d["ob"]) 91 | 92 | 93 | def test_fake_boolean(TestData): 94 | with open(TestData / "boolean.json") as file: 95 | schema = json.load(file) 96 | p = JSF(schema) 97 | 98 | assert isinstance(p.generate(), bool) 99 | fake_data = [p.generate() for _ in range(100)] 100 | assert False in fake_data 101 | assert True in fake_data 102 | 103 | 104 | def test_fake_string(TestData): 105 | with open(TestData / "string.json") as file: 106 | schema = json.load(file) 107 | p = JSF(schema) 108 | assert isinstance(p.generate(), str) 109 | fake_data = [p.generate() for _ in range(100)] 110 | assert len(fake_data) - len(set(fake_data)) < 50 111 | 112 | 113 | def test_fake_string_max_min_length(TestData): 114 | with open(TestData / "string-max-min-length.json") as file: 115 | schema = json.load(file) 116 | p = JSF(schema) 117 | assert isinstance(p.generate(), str) 118 | fake_data = [p.generate() for _ in range(10)] 119 | assert all(len(fd) == 2 for fd in fake_data) 120 | 121 | 122 | def test_fake_string_content_encoding(TestData): 123 | with open(TestData / "string-content-encoding.json") as file: 124 | schema = json.load(file) 125 | p = JSF(schema) 126 | assert isinstance(p.generate(), dict) 127 | fake_data = [p.generate() for _ in range(100)] 128 | for d in fake_data: 129 | assert set(d["binary"]) - {"1", "0"} == set() 130 | # TODO: Test other encodings are working as expected 131 | 132 | 133 | def test_fake_string_content_type(TestData): 134 | with open(TestData / "string-content-type.json") as file: 135 | schema = json.load(file) 136 | p = JSF(schema) 137 | assert isinstance(p.generate(), dict) 138 | fake_data = [p.generate() for _ in range(10)] # Reducing for rate limiting of external requests 139 | for d in fake_data: 140 | assert len(d["text/plain"]) >= 5 and len(d["text/plain"]) <= 10 141 | 142 | decoded_jwt = jwt.decode(d["application/jwt"], options={"verify_signature": False}) 143 | assert set(decoded_jwt.keys()) == {"exp", "iss"} 144 | assert isinstance(decoded_jwt["exp"], int) 145 | assert isinstance(decoded_jwt["iss"], str) 146 | 147 | 148 | def test_fake_null(TestData): 149 | with open(TestData / "null.json") as file: 150 | schema = json.load(file) 151 | p = JSF(schema) 152 | 153 | assert isinstance(p.generate(), type(None)) 154 | fake_data = [p.generate() for _ in range(100)] 155 | assert len(set(fake_data)) == 1 156 | 157 | 158 | def test_fake_enum(TestData): 159 | with open(TestData / "enum.json") as file: 160 | schema = json.load(file) 161 | p = JSF(schema) 162 | 163 | assert isinstance(p.generate(), (str, type(None), int)) 164 | assert all(p.generate() in ["red", "amber", "green", None, 42] for _ in range(100)) 165 | 166 | 167 | def test_fake_string_enum(TestData): 168 | with open(TestData / "string-enum.json") as file: 169 | schema = json.load(file) 170 | p = JSF(schema) 171 | 172 | assert isinstance(p.generate(), str) 173 | assert all(p.generate() in ["Street", "Avenue", "Boulevard"] for _ in range(100)) 174 | 175 | 176 | def test_fake_object_enum(TestData): 177 | with open(TestData / "object-enum.json") as file: 178 | schema = json.load(file) 179 | p = JSF(schema) 180 | 181 | assert isinstance(p.generate(), dict) 182 | assert all( 183 | p.generate() in [{"code": "1", "value": "CHILD"}, {"code": "2", "value": "ADULT"}] 184 | for _ in range(100) 185 | ) 186 | 187 | 188 | def test_fake_int(TestData): 189 | with open(TestData / "integer.json") as file: 190 | schema = json.load(file) 191 | p = JSF(schema) 192 | 193 | assert isinstance(p.generate(), int) 194 | fake_data = [p.generate() for _ in range(1000)] 195 | assert all(d <= 700 for d in fake_data) 196 | assert all(d > 600 for d in fake_data), fake_data 197 | assert all(d != 600 for d in fake_data) 198 | assert all(d % 7 == 0 for d in fake_data) 199 | 200 | 201 | def test_fake_number(TestData): 202 | with open(TestData / "number.json") as file: 203 | schema = json.load(file) 204 | p = JSF(schema) 205 | 206 | assert isinstance(p.generate(), float) 207 | fake_data = [p.generate() for _ in range(1000)] 208 | assert all(d <= 700 for d in fake_data) 209 | assert all(d > 600 for d in fake_data), fake_data 210 | assert all(d != 600 for d in fake_data) 211 | 212 | 213 | def test_fake_number_exclusive(TestData): 214 | with open(TestData / "number-exclusive.json") as file: 215 | schema = json.load(file) 216 | p = JSF(schema) 217 | 218 | assert isinstance(p.generate(), float) 219 | fake_data = [p.generate() for _ in range(1000)] 220 | assert all(d < 700 for d in fake_data) 221 | assert all(d >= 600 for d in fake_data), fake_data 222 | assert all(d != 700 for d in fake_data) 223 | 224 | 225 | def test_fake_number_exclusive_float(TestData): 226 | with open(TestData / "number-exclusive-float.json") as file: 227 | schema = json.load(file) 228 | p = JSF(schema) 229 | 230 | assert isinstance(p.generate(), float) 231 | fake_data = [p.generate() for _ in range(1000)] 232 | assert all(d < 700 for d in fake_data), fake_data 233 | assert all(d > 600 for d in fake_data), fake_data 234 | assert all(d != 700 for d in fake_data) 235 | assert all(d != 600 for d in fake_data) 236 | 237 | 238 | def test_fake_array(TestData): 239 | with open(TestData / "array.json") as file: 240 | schema = json.load(file) 241 | p = JSF(schema) 242 | 243 | assert isinstance(p.generate(), list) 244 | fake_data = [p.generate() for _ in range(1000)] 245 | assert all(set(d) - {"red", "amber", "green"} == set() for d in fake_data), fake_data 246 | assert all(len(set(d)) == len(d) for d in fake_data), fake_data 247 | assert all(len(d) <= 5 for d in fake_data), fake_data 248 | assert all(len(d) >= 1 for d in fake_data), fake_data 249 | 250 | 251 | def test_fake_array_dicts(TestData): 252 | with open(TestData / "array-dicts.json") as file: 253 | schema = json.load(file) 254 | p = JSF(schema) 255 | 256 | assert isinstance(p.generate(), dict) 257 | fake_data = [p.generate() for _ in range(1000)] 258 | assert all(len(d["Basket"]) == 2 for d in fake_data), fake_data 259 | assert all( 260 | d["Basket"][0]["Item Name"] in ["A", "B", "C", "D", "E"] for d in fake_data 261 | ), fake_data 262 | assert all( 263 | d["Basket"][1]["Item Name"] in ["A", "B", "C", "D", "E"] for d in fake_data 264 | ), fake_data 265 | assert all(0 <= d["Basket"][0]["Amount"] < 5 for d in fake_data), fake_data 266 | assert all(0 <= d["Basket"][1]["Amount"] < 5 for d in fake_data), fake_data 267 | 268 | 269 | def test_fake_array_fixed_int(TestData): 270 | with open(TestData / "array-fixed-int.json") as file: 271 | schema = json.load(file) 272 | p = JSF(schema) 273 | 274 | assert isinstance(p.generate(), list) 275 | fake_data = [p.generate() for _ in range(1000)] 276 | assert all(set(d) - {"red", "amber", "green"} == set() for d in fake_data), fake_data 277 | assert all(len(d) == 5 for d in fake_data), fake_data 278 | 279 | 280 | def test_fake_array_fixed_str(TestData): 281 | with open(TestData / "array-fixed-str.json") as file: 282 | schema = json.load(file) 283 | p = JSF(schema) 284 | 285 | assert isinstance(p.generate(), list) 286 | fake_data = [p.generate() for _ in range(1000)] 287 | assert all(set(d) - {"red", "amber", "green"} == set() for d in fake_data), fake_data 288 | assert all(len(d) == 50 for d in fake_data), fake_data 289 | 290 | 291 | def test_fake_tuple(TestData): 292 | with open(TestData / "tuple.json") as file: 293 | schema = json.load(file) 294 | p = JSF(schema) 295 | 296 | assert isinstance(p.generate(), tuple) 297 | fake_data = [p.generate() for _ in range(1000)] 298 | for d in fake_data: 299 | assert isinstance(d[0], float) 300 | assert isinstance(d[1], str) 301 | assert isinstance(d[2], str) and d[2] in ["Street", "Avenue", "Boulevard"] 302 | assert isinstance(d[3], str) and d[3] in ["NW", "NE", "SW", "SE"] 303 | 304 | 305 | def test_fake_object(TestData): 306 | with open(TestData / "object.json") as file: 307 | schema = json.load(file) 308 | p = JSF(schema) 309 | 310 | assert isinstance(p.generate(), dict) 311 | fake_data = [p.generate() for _ in range(1000)] 312 | assert all(isinstance(d["name"], str) for d in fake_data), fake_data 313 | assert all(isinstance(d["credit_card"], float) for d in fake_data), fake_data 314 | assert all(isinstance(d["test"], int) for d in fake_data), fake_data 315 | 316 | 317 | def test_fake_object_pattern_properties(TestData): 318 | with open(TestData / "object-pattern-properties.json") as file: 319 | schema = json.load(file) 320 | p = JSF(schema) 321 | 322 | assert isinstance(p.generate(), dict) 323 | fake_data = [p.generate() for _ in range(1000)] 324 | assert all(isinstance(d["name"], str) for d in fake_data), fake_data 325 | all_str_names = set() 326 | all_int_names = set() 327 | for d in fake_data: 328 | string_types = [k for k in d.keys() if k.startswith("S_")] 329 | int_types = [k for k in d.keys() if k.startswith("I_")] 330 | all_str_names = all_str_names.union(set(string_types)) 331 | all_int_names = all_int_names.union(set(int_types)) 332 | assert all(isinstance(d[key], str) for key in string_types) 333 | assert all(isinstance(d[key], int) for key in int_types) 334 | 335 | assert len(all_str_names) > 0 336 | assert len(all_int_names) > 0 337 | 338 | 339 | def assert_regex(pattern: str, string: str, info: Optional[str]) -> None: 340 | assert bool(re.match(pattern, string)), (string, info) 341 | 342 | 343 | def test_fake_string_format(TestData): 344 | with open(TestData / "string-format.json") as file: 345 | schema = json.load(file) 346 | p = JSF(schema) 347 | 348 | assert isinstance(p.generate(), dict) 349 | fake_data = [p.generate() for _ in range(10)] 350 | 351 | for d in fake_data: 352 | assert_regex(r".*@.*", d["email"], "email") 353 | assert_regex(r".*@.*", d["idn-email"], "idn-email") 354 | assert_regex( 355 | r"\d{4}-\d{2}-\d{2}T\d{2}\:\d{2}\:\d{2}\.*\d*[-\+]\d{2}\:\d{2}", 356 | d["date-time"], 357 | "date-time", 358 | ) 359 | assert_regex(r"\d{4}-\d{2}-\d{2}", d["date"], "date") 360 | assert_regex( 361 | r"^(-?)P(?=\d|T\d)(?:(\d+)Y)?(?:(\d+)M)?(?:(\d+)W)?(?:(\d+)D)?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?$", 362 | d["duration"], 363 | "duration", 364 | ) 365 | assert_regex(r"\d{2}\:\d{2}\:\d{2}\.*\d*[-\+]\d{2}\:\d{2}", d["time"], "time") 366 | assert_regex(r"[a-zA-Z0-9+-\.]{1,33}\.[a-z]{2,4}", d["hostname"], "hostname") 367 | assert_regex(r"[a-zA-Z0-9+-\.]{1,33}\.[a-z]{2,4}", d["idn-hostname"], "idn-hostname") 368 | assert_regex(r"[a-f0-9]{0,4}(:[a-f0-9]{0,4}){7}", d["ipv6"], "ipv6") 369 | 370 | # TODO: add more regex tests 371 | # "ipv4" 372 | # "uri" 373 | # "uri-reference" 374 | # "iri" 375 | # "iri-reference" 376 | # "uri-template" 377 | # "json-pointer" 378 | # "relative-json-pointer" 379 | # "uuid" 380 | # "regex" 381 | 382 | 383 | # NO LONGER REQUIRED - dont think you can have unique items in a tuple? 384 | # def test_unique_items_tuple(TestData): 385 | # with open(TestData / "unique-items-tuple.json", "r") as file: 386 | # schema = json.load(file) 387 | # p = JSF(schema) 388 | # fake_data = p.generate(50) 389 | # for f in fake_data: 390 | # assert isinstance(f, list) 391 | # assert all([isinstance(t, tuple) for t in f]) 392 | # assert all(len(set(t)) == len(t) for t in f), f 393 | 394 | 395 | def test_unique_items_array(TestData): 396 | with open(TestData / "unique-items-array.json") as file: 397 | schema = json.load(file) 398 | p = JSF(schema) 399 | fake_data = p.generate(50) 400 | for f in fake_data: 401 | assert isinstance(f, list) 402 | assert all([isinstance(t, bool) for t in f]) 403 | assert len(set(f)) == len(f), f 404 | 405 | 406 | def test_const(TestData): 407 | with open(TestData / "const.json") as file: 408 | schema = json.load(file) 409 | p = JSF(schema) 410 | fake_data = p.generate(50) 411 | for f in fake_data: 412 | assert isinstance(f, dict) 413 | assert isinstance(f["country"], str) 414 | assert f["country"] == "United States of America" 415 | 416 | 417 | def test_external_ref(TestData): 418 | with open(TestData / "external-ref.json") as file: 419 | schema = json.load(file) 420 | p = JSF(schema) 421 | fake_data = p.generate(50) 422 | for f in fake_data: 423 | assert isinstance(f, dict) 424 | assert isinstance(f["ReferenceToLocalSchema"], dict) 425 | assert isinstance(f["ReferenceToLocalSchema"]["no-write"], bool) 426 | 427 | assert isinstance(f["ReferenceToExternalSchema"], dict) 428 | assert isinstance(f["ReferenceToExternalSchema"]["src"], list) 429 | assert all(isinstance(t, str) for t in f["ReferenceToExternalSchema"]["src"]) 430 | 431 | 432 | def test_gen_and_validate(TestData): 433 | with open(TestData / "custom.json") as file: 434 | schema = json.load(file) 435 | p = JSF(schema) 436 | [p.generate_and_validate() for _ in range(50)] 437 | 438 | 439 | def test_list_of_types(TestData): 440 | with open(TestData / "type-list.json") as file: 441 | schema = json.load(file) 442 | fake_data = [JSF(schema).generate() for _ in range(100)] 443 | for f in fake_data: 444 | print(f) 445 | assert all(isinstance(f, dict) for f in fake_data), fake_data 446 | assert all(type(f["randTypeValueNullable"]) in [type(None), bool] for f in fake_data), fake_data 447 | assert all(type(f["randTypeValue"]) in [bool, int, float, str] for f in fake_data), fake_data 448 | assert all(isinstance(f["int"], int) for f in fake_data), fake_data 449 | assert all(isinstance(f["null"], type(None)) for f in fake_data), fake_data 450 | 451 | 452 | def test_non_required_are_not_none(TestData): 453 | with open(TestData / "object-with-optionals.json") as file: 454 | schema = json.load(file) 455 | for _ in range(10): 456 | fake_data = JSF(schema, allow_none_optionals=0.0).generate() 457 | 458 | assert fake_data["name"] is not None 459 | assert fake_data["credit_card"] is not None 460 | 461 | 462 | def test_fake_object_recursive(TestData): 463 | with open(TestData / "object_recursive.json") as file: 464 | schema = json.load(file) 465 | p = JSF(schema, allow_none_optionals=0.0, max_recursive_depth=2) 466 | 467 | fake_data = [p.generate() for _ in range(5)] 468 | for d in fake_data: 469 | assert isinstance(d, dict) 470 | assert "tree" in d and "id" in d 471 | assert "branches" in d["tree"] and "value" in d["tree"] 472 | for subtree in d["tree"]["branches"]: 473 | assert isinstance(subtree, dict) 474 | assert "branches" in subtree and "value" in subtree 475 | for leave in subtree["branches"]: 476 | assert "branches" not in leave and "value" in leave 477 | 478 | 479 | def test_fake_oneof_recursive(TestData): 480 | with open(TestData / "oneof_recursive.json") as file: 481 | schema = json.load(file) 482 | p = JSF(schema, max_recursive_depth=2) 483 | 484 | fake_data = [p.generate() for _ in range(10)] 485 | for d in fake_data: 486 | assert isinstance(d, list) 487 | for item in d: 488 | assert isinstance(item, int) or isinstance(item, list) 489 | 490 | 491 | def test_fake_complex_recursive(TestData): 492 | with open(TestData / "complex_recursive.json") as file: 493 | schema = json.load(file) 494 | p = JSF(schema, max_recursive_depth=2) 495 | 496 | fake_data = [p.generate() for _ in range(10)] 497 | for d in fake_data: 498 | assert isinstance(d, str) or isinstance(d, dict) 499 | if isinstance(d, dict): 500 | assert "value" in d 501 | 502 | 503 | def test_fake_empty(TestData): 504 | with open(TestData / "empty.json") as file: 505 | schema = json.load(file) 506 | [JSF(schema).generate() for _ in range(10)] # Just validating no errors 507 | 508 | 509 | def test_use_defaults(TestData): 510 | with open(TestData / "object-with-examples.json") as file: 511 | schema = json.load(file) 512 | p = JSF(schema) 513 | 514 | fake_data = [p.generate(use_defaults=True) for _ in range(10)] 515 | for d in fake_data: 516 | assert isinstance(d, dict) 517 | breed = d.get("breed") 518 | assert breed is None or breed == "Mixed Breed" 519 | 520 | 521 | def test_use_examples(TestData): 522 | with open(TestData / "object-with-examples.json") as file: 523 | schema = json.load(file) 524 | p = JSF(schema) 525 | 526 | fake_data = [p.generate(use_examples=True) for _ in range(10)] 527 | for d in fake_data: 528 | assert isinstance(d, dict) 529 | assert d["species"] in ["Dog", "Cat", "Rabbit"] 530 | assert d["name"] in ["Chop", "Luna", "Thanos"] 531 | breed = d.get("breed") 532 | assert breed is None or breed in ["Labrador Retriever", "Siamese", "Golden Retriever"] 533 | 534 | 535 | def test_use_defaults_and_examples(TestData): 536 | with open(TestData / "object-with-examples.json") as file: 537 | schema = json.load(file) 538 | p = JSF(schema) 539 | 540 | fake_data = [p.generate(use_defaults=True, use_examples=True) for _ in range(10)] 541 | for d in fake_data: 542 | assert isinstance(d, dict) 543 | assert d["species"] in ["Dog", "Cat", "Rabbit"] 544 | assert d["name"] in ["Chop", "Luna", "Thanos"] 545 | breed = d.get("breed") 546 | assert breed is None or breed == "Mixed Breed" 547 | -------------------------------------------------------------------------------- /jsf/3rdparty/python/pytest.lock: -------------------------------------------------------------------------------- 1 | // This lockfile was autogenerated by Pants. To regenerate, run: 2 | // 3 | // pants generate-lockfiles --resolve=pytest 4 | // 5 | // --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE --- 6 | // { 7 | // "version": 3, 8 | // "valid_for_interpreter_constraints": [ 9 | // "CPython<4,>=3.8" 10 | // ], 11 | // "generated_with_requirements": [ 12 | // "pyjwt", 13 | // "pytest-cov", 14 | // "typer>=0.7.0" 15 | // ], 16 | // "manylinux": "manylinux2014", 17 | // "requirement_constraints": [], 18 | // "only_binary": [], 19 | // "no_binary": [] 20 | // } 21 | // --- END PANTS LOCKFILE METADATA --- 22 | 23 | { 24 | "allow_builds": true, 25 | "allow_prereleases": false, 26 | "allow_wheels": true, 27 | "build_isolation": true, 28 | "constraints": [], 29 | "locked_resolves": [ 30 | { 31 | "locked_requirements": [ 32 | { 33 | "artifacts": [ 34 | { 35 | "algorithm": "sha256", 36 | "hash": "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", 37 | "url": "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl" 38 | }, 39 | { 40 | "algorithm": "sha256", 41 | "hash": "ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", 42 | "url": "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz" 43 | } 44 | ], 45 | "project_name": "click", 46 | "requires_dists": [ 47 | "colorama; platform_system == \"Windows\"", 48 | "importlib-metadata; python_version < \"3.8\"" 49 | ], 50 | "requires_python": ">=3.7", 51 | "version": "8.1.7" 52 | }, 53 | { 54 | "artifacts": [ 55 | { 56 | "algorithm": "sha256", 57 | "hash": "32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166", 58 | "url": "https://files.pythonhosted.org/packages/65/b7/0c855c523d0e979ae43480cee806cae09ee0dbbd0b7c6fed9f9d50318b18/coverage-7.4.1-pp38.pp39.pp310-none-any.whl" 59 | }, 60 | { 61 | "algorithm": "sha256", 62 | "hash": "8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756", 63 | "url": "https://files.pythonhosted.org/packages/05/37/799839832bddad161a42eab64e3f42282c75ce0206b2e1c1fc4654e4a995/coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl" 64 | }, 65 | { 66 | "algorithm": "sha256", 67 | "hash": "3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1", 68 | "url": "https://files.pythonhosted.org/packages/07/c8/a0f6a2fe09bcada89661eca3c79ee71ad33acfa2ee141b4e300a7281563d/coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl" 69 | }, 70 | { 71 | "algorithm": "sha256", 72 | "hash": "b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295", 73 | "url": "https://files.pythonhosted.org/packages/0b/bd/008f9dad615d67e47221a983cd46cb5e87002e569dec60daa84d1b422859/coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl" 74 | }, 75 | { 76 | "algorithm": "sha256", 77 | "hash": "3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66", 78 | "url": "https://files.pythonhosted.org/packages/10/97/ca7dec2d9a1262bc0dbfb757989444fec8cde908083b15fb3339210aa7b8/coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" 79 | }, 80 | { 81 | "algorithm": "sha256", 82 | "hash": "6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74", 83 | "url": "https://files.pythonhosted.org/packages/10/b5/17ee4cc87f4415c28cd3b77df2dd58ce548d5033545faea2bc1a9b3e6b50/coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl" 84 | }, 85 | { 86 | "algorithm": "sha256", 87 | "hash": "3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c", 88 | "url": "https://files.pythonhosted.org/packages/12/8d/e078f0ccc4e91aa44f7754f0bac18bd6c62780a029b5d30f6242c6e06b23/coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl" 89 | }, 90 | { 91 | "algorithm": "sha256", 92 | "hash": "918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45", 93 | "url": "https://files.pythonhosted.org/packages/13/4e/66a3821f6fc8a28d07740d9115fdacffb7e7d61431b9ae112bacde846327/coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl" 94 | }, 95 | { 96 | "algorithm": "sha256", 97 | "hash": "1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35", 98 | "url": "https://files.pythonhosted.org/packages/16/ec/f8899be71d5c0964e4f34ccfe8ecef3e9cff25daa6728a8915c72004b1d5/coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl" 99 | }, 100 | { 101 | "algorithm": "sha256", 102 | "hash": "0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7", 103 | "url": "https://files.pythonhosted.org/packages/18/e3/eb7689641819f6c415aa7d88593e2d0d322e3adf364a0dd4f4d1eba00eeb/coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl" 104 | }, 105 | { 106 | "algorithm": "sha256", 107 | "hash": "077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7", 108 | "url": "https://files.pythonhosted.org/packages/26/1f/430384b8e428c87950583e775fee97bc83bcfd93a2ecc00b5e55a5a052a5/coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl" 109 | }, 110 | { 111 | "algorithm": "sha256", 112 | "hash": "379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d", 113 | "url": "https://files.pythonhosted.org/packages/2a/12/89d5f08eb9be53910e3b9b2d02dd932f9b50bac10281272cdbaf8dee58d9/coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" 114 | }, 115 | { 116 | "algorithm": "sha256", 117 | "hash": "18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd", 118 | "url": "https://files.pythonhosted.org/packages/2b/cc/bf2bfaf953a47d9771383cb87ce2985ec3d4aad445fb88143c24d9839079/coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" 119 | }, 120 | { 121 | "algorithm": "sha256", 122 | "hash": "f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581", 123 | "url": "https://files.pythonhosted.org/packages/37/34/2089e0b24759a207184b41a4e4b4af7004282a5b3a93bb408c2fa19b9b16/coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl" 124 | }, 125 | { 126 | "algorithm": "sha256", 127 | "hash": "8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218", 128 | "url": "https://files.pythonhosted.org/packages/3c/75/a4abb6a0d1d4814fbcf8d9e552fd08b579236d8f5c5bb4cfd8a566c43612/coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl" 129 | }, 130 | { 131 | "algorithm": "sha256", 132 | "hash": "b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06", 133 | "url": "https://files.pythonhosted.org/packages/46/4d/9d6a7081c31d1388bff379250ab3ab0c873330c8139c07e8f4b6df61fe65/coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" 134 | }, 135 | { 136 | "algorithm": "sha256", 137 | "hash": "6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19", 138 | "url": "https://files.pythonhosted.org/packages/49/d5/9d66fd984979b58927588efb0398953acbdb4c45eb7cfcd74fa9b8d51d12/coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" 139 | }, 140 | { 141 | "algorithm": "sha256", 142 | "hash": "10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75", 143 | "url": "https://files.pythonhosted.org/packages/54/4c/e2d59855d36921e3025380f75e110e672bb8500a5e5832af59b65a218ee4/coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl" 144 | }, 145 | { 146 | "algorithm": "sha256", 147 | "hash": "6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630", 148 | "url": "https://files.pythonhosted.org/packages/64/98/2f02659fdd92467a78e35fcf756b373f2a374bb68a42a16546ad3005bb18/coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl" 149 | }, 150 | { 151 | "algorithm": "sha256", 152 | "hash": "a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60", 153 | "url": "https://files.pythonhosted.org/packages/72/31/a8d0a018aceecf8b2728f924c0a2d1c07c36be611301db1843538315dca8/coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl" 154 | }, 155 | { 156 | "algorithm": "sha256", 157 | "hash": "9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628", 158 | "url": "https://files.pythonhosted.org/packages/86/25/6b70cb21b6e62158aab40a0e930361d4397f4ef4cbd2a04d3d01b6e4c5cf/coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" 159 | }, 160 | { 161 | "algorithm": "sha256", 162 | "hash": "5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676", 163 | "url": "https://files.pythonhosted.org/packages/87/71/0d90c4cda220c1f20f0eeaa997633eb1ec0bcaf5d8250c299d0f27a5885d/coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" 164 | }, 165 | { 166 | "algorithm": "sha256", 167 | "hash": "7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6", 168 | "url": "https://files.pythonhosted.org/packages/8e/d5/af7155aa1a6d8496c4436b45c9330dc7b26d962a154054359e2cf6055ed6/coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl" 169 | }, 170 | { 171 | "algorithm": "sha256", 172 | "hash": "d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee", 173 | "url": "https://files.pythonhosted.org/packages/8f/bf/9b1e104690d4976b17d515ee49b648c26d7244e148d1c845708d58b8f4fe/coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" 174 | }, 175 | { 176 | "algorithm": "sha256", 177 | "hash": "8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54", 178 | "url": "https://files.pythonhosted.org/packages/9f/ae/0d439dc9adc0111ffbed38149d73ddf34f7a8768e377020181e624cf2634/coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl" 179 | }, 180 | { 181 | "algorithm": "sha256", 182 | "hash": "a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25", 183 | "url": "https://files.pythonhosted.org/packages/a3/6a/02369bde2559d7c511d62eaca5668d6e15d2b741da87bc6a7e9c8999777d/coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" 184 | }, 185 | { 186 | "algorithm": "sha256", 187 | "hash": "02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1", 188 | "url": "https://files.pythonhosted.org/packages/a9/59/fd1a59a55e6b99b72e77e20933185bacaf3cd35a3729cba8465af1e1cdbe/coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl" 189 | }, 190 | { 191 | "algorithm": "sha256", 192 | "hash": "f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766", 193 | "url": "https://files.pythonhosted.org/packages/b3/b9/49b1028a69b1e9476db7508705fc67a1218ece54af07b87339eac1b5600a/coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" 194 | }, 195 | { 196 | "algorithm": "sha256", 197 | "hash": "6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad", 198 | "url": "https://files.pythonhosted.org/packages/b5/e3/87ee5c1250934d42038680c41c04bac813025913c460c761859b04dcbff7/coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl" 199 | }, 200 | { 201 | "algorithm": "sha256", 202 | "hash": "c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c", 203 | "url": "https://files.pythonhosted.org/packages/ba/77/c12e72e82324914d4b36d89535a18dc57de06829597b345080acbf4083d0/coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl" 204 | }, 205 | { 206 | "algorithm": "sha256", 207 | "hash": "fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3", 208 | "url": "https://files.pythonhosted.org/packages/c3/92/f2d89715c3397e76fe365b1ecbb861d1279ff8d47d23635040a358bc75dc/coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" 209 | }, 210 | { 211 | "algorithm": "sha256", 212 | "hash": "1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04", 213 | "url": "https://files.pythonhosted.org/packages/ca/41/e2ba20f090d0d16b73ad1f6fc542eb31b0db20662576583fb4f02554891f/coverage-7.4.1.tar.gz" 214 | }, 215 | { 216 | "algorithm": "sha256", 217 | "hash": "536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1", 218 | "url": "https://files.pythonhosted.org/packages/cb/0f/93a0ffe448b16e713f50b968733cfc590adb70878b14290718e6471ae6fa/coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl" 219 | }, 220 | { 221 | "algorithm": "sha256", 222 | "hash": "46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70", 223 | "url": "https://files.pythonhosted.org/packages/ce/e1/df16e7e353c2ba5a5b3e02a6bad7dbf1bc62d5b9cfe5c06ed0e31fc64122/coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl" 224 | }, 225 | { 226 | "algorithm": "sha256", 227 | "hash": "b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b", 228 | "url": "https://files.pythonhosted.org/packages/d3/e9/4dd7343cfb4458e3279da877cdd73e006624f6b24fd5c6675e4f973724c0/coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl" 229 | }, 230 | { 231 | "algorithm": "sha256", 232 | "hash": "dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011", 233 | "url": "https://files.pythonhosted.org/packages/d5/a7/36bd1c439fab5d450c69b7cdf4be4291d56885ae8be11ebed9ec240b919f/coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" 234 | }, 235 | { 236 | "algorithm": "sha256", 237 | "hash": "e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156", 238 | "url": "https://files.pythonhosted.org/packages/d7/95/5f7e7f7d46d671d1b81e36ef6439798645ed042c1ffd116ded89897b254d/coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" 239 | }, 240 | { 241 | "algorithm": "sha256", 242 | "hash": "23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6", 243 | "url": "https://files.pythonhosted.org/packages/de/37/4f3eb8e6f4be39eeca4318e3c2ef10e954e86871a68b0e71f004835d6a30/coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl" 244 | }, 245 | { 246 | "algorithm": "sha256", 247 | "hash": "0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61", 248 | "url": "https://files.pythonhosted.org/packages/ed/2d/db83db65d0c3d457f993830b97271a80f11bdc051d86dd44405c436db147/coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl" 249 | }, 250 | { 251 | "algorithm": "sha256", 252 | "hash": "ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc", 253 | "url": "https://files.pythonhosted.org/packages/f1/a3/77220a6212f874ccb0b32e36de5803b379a7a3ee95c2b3cded1e1c78c683/coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl" 254 | }, 255 | { 256 | "algorithm": "sha256", 257 | "hash": "aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950", 258 | "url": "https://files.pythonhosted.org/packages/fc/cc/c4da6426501cdbad3b37edbeca7b485137f74a6030d5a974060d8369f898/coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" 259 | }, 260 | { 261 | "algorithm": "sha256", 262 | "hash": "d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1", 263 | "url": "https://files.pythonhosted.org/packages/ff/e3/351477165426da841458f2c1b732360dd42da140920e3cd4b70676e5b77f/coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" 264 | } 265 | ], 266 | "project_name": "coverage", 267 | "requires_dists": [ 268 | "tomli; python_full_version <= \"3.11.0a6\" and extra == \"toml\"" 269 | ], 270 | "requires_python": ">=3.8", 271 | "version": "7.4.1" 272 | }, 273 | { 274 | "artifacts": [ 275 | { 276 | "algorithm": "sha256", 277 | "hash": "4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14", 278 | "url": "https://files.pythonhosted.org/packages/b8/9a/5028fd52db10e600f1c4674441b968cf2ea4959085bfb5b99fb1250e5f68/exceptiongroup-1.2.0-py3-none-any.whl" 279 | }, 280 | { 281 | "algorithm": "sha256", 282 | "hash": "91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68", 283 | "url": "https://files.pythonhosted.org/packages/8e/1c/beef724eaf5b01bb44b6338c8c3494eff7cab376fab4904cfbbc3585dc79/exceptiongroup-1.2.0.tar.gz" 284 | } 285 | ], 286 | "project_name": "exceptiongroup", 287 | "requires_dists": [ 288 | "pytest>=6; extra == \"test\"" 289 | ], 290 | "requires_python": ">=3.7", 291 | "version": "1.2.0" 292 | }, 293 | { 294 | "artifacts": [ 295 | { 296 | "algorithm": "sha256", 297 | "hash": "b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", 298 | "url": "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" 299 | }, 300 | { 301 | "algorithm": "sha256", 302 | "hash": "2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", 303 | "url": "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz" 304 | } 305 | ], 306 | "project_name": "iniconfig", 307 | "requires_dists": [], 308 | "requires_python": ">=3.7", 309 | "version": "2.0.0" 310 | }, 311 | { 312 | "artifacts": [ 313 | { 314 | "algorithm": "sha256", 315 | "hash": "8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", 316 | "url": "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl" 317 | }, 318 | { 319 | "algorithm": "sha256", 320 | "hash": "048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", 321 | "url": "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz" 322 | } 323 | ], 324 | "project_name": "packaging", 325 | "requires_dists": [], 326 | "requires_python": ">=3.7", 327 | "version": "23.2" 328 | }, 329 | { 330 | "artifacts": [ 331 | { 332 | "algorithm": "sha256", 333 | "hash": "7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981", 334 | "url": "https://files.pythonhosted.org/packages/a5/5b/0cc789b59e8cc1bf288b38111d002d8c5917123194d45b29dcdac64723cc/pluggy-1.4.0-py3-none-any.whl" 335 | }, 336 | { 337 | "algorithm": "sha256", 338 | "hash": "8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be", 339 | "url": "https://files.pythonhosted.org/packages/54/c6/43f9d44d92aed815e781ca25ba8c174257e27253a94630d21be8725a2b59/pluggy-1.4.0.tar.gz" 340 | } 341 | ], 342 | "project_name": "pluggy", 343 | "requires_dists": [ 344 | "pre-commit; extra == \"dev\"", 345 | "pytest-benchmark; extra == \"testing\"", 346 | "pytest; extra == \"testing\"", 347 | "tox; extra == \"dev\"" 348 | ], 349 | "requires_python": ">=3.8", 350 | "version": "1.4.0" 351 | }, 352 | { 353 | "artifacts": [ 354 | { 355 | "algorithm": "sha256", 356 | "hash": "59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320", 357 | "url": "https://files.pythonhosted.org/packages/2b/4f/e04a8067c7c96c364cef7ef73906504e2f40d690811c021e1a1901473a19/PyJWT-2.8.0-py3-none-any.whl" 358 | }, 359 | { 360 | "algorithm": "sha256", 361 | "hash": "57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de", 362 | "url": "https://files.pythonhosted.org/packages/30/72/8259b2bccfe4673330cea843ab23f86858a419d8f1493f66d413a76c7e3b/PyJWT-2.8.0.tar.gz" 363 | } 364 | ], 365 | "project_name": "pyjwt", 366 | "requires_dists": [ 367 | "coverage[toml]==5.0.4; extra == \"dev\"", 368 | "coverage[toml]==5.0.4; extra == \"tests\"", 369 | "cryptography>=3.4.0; extra == \"crypto\"", 370 | "cryptography>=3.4.0; extra == \"dev\"", 371 | "pre-commit; extra == \"dev\"", 372 | "pytest<7.0.0,>=6.0.0; extra == \"dev\"", 373 | "pytest<7.0.0,>=6.0.0; extra == \"tests\"", 374 | "sphinx-rtd-theme; extra == \"dev\"", 375 | "sphinx-rtd-theme; extra == \"docs\"", 376 | "sphinx<5.0.0,>=4.5.0; extra == \"dev\"", 377 | "sphinx<5.0.0,>=4.5.0; extra == \"docs\"", 378 | "typing-extensions; python_version <= \"3.7\"", 379 | "zope.interface; extra == \"dev\"", 380 | "zope.interface; extra == \"docs\"" 381 | ], 382 | "requires_python": ">=3.7", 383 | "version": "2.8.0" 384 | }, 385 | { 386 | "artifacts": [ 387 | { 388 | "algorithm": "sha256", 389 | "hash": "50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6", 390 | "url": "https://files.pythonhosted.org/packages/c7/10/727155d44c5e04bb08e880668e53079547282e4f950535234e5a80690564/pytest-8.0.0-py3-none-any.whl" 391 | }, 392 | { 393 | "algorithm": "sha256", 394 | "hash": "249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c", 395 | "url": "https://files.pythonhosted.org/packages/50/fd/af2d835eed57448960c4e7e9ab76ee42f24bcdd521e967191bc26fa2dece/pytest-8.0.0.tar.gz" 396 | } 397 | ], 398 | "project_name": "pytest", 399 | "requires_dists": [ 400 | "argcomplete; extra == \"testing\"", 401 | "attrs>=19.2.0; extra == \"testing\"", 402 | "colorama; sys_platform == \"win32\"", 403 | "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", 404 | "hypothesis>=3.56; extra == \"testing\"", 405 | "iniconfig", 406 | "mock; extra == \"testing\"", 407 | "nose; extra == \"testing\"", 408 | "packaging", 409 | "pluggy<2.0,>=1.3.0", 410 | "pygments>=2.7.2; extra == \"testing\"", 411 | "requests; extra == \"testing\"", 412 | "setuptools; extra == \"testing\"", 413 | "tomli>=1.0.0; python_version < \"3.11\"", 414 | "xmlschema; extra == \"testing\"" 415 | ], 416 | "requires_python": ">=3.8", 417 | "version": "8.0.0" 418 | }, 419 | { 420 | "artifacts": [ 421 | { 422 | "algorithm": "sha256", 423 | "hash": "6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", 424 | "url": "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl" 425 | }, 426 | { 427 | "algorithm": "sha256", 428 | "hash": "3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", 429 | "url": "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz" 430 | } 431 | ], 432 | "project_name": "pytest-cov", 433 | "requires_dists": [ 434 | "coverage[toml]>=5.2.1", 435 | "fields; extra == \"testing\"", 436 | "hunter; extra == \"testing\"", 437 | "process-tests; extra == \"testing\"", 438 | "pytest-xdist; extra == \"testing\"", 439 | "pytest>=4.6", 440 | "six; extra == \"testing\"", 441 | "virtualenv; extra == \"testing\"" 442 | ], 443 | "requires_python": ">=3.7", 444 | "version": "4.1.0" 445 | }, 446 | { 447 | "artifacts": [ 448 | { 449 | "algorithm": "sha256", 450 | "hash": "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 451 | "url": "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl" 452 | }, 453 | { 454 | "algorithm": "sha256", 455 | "hash": "de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", 456 | "url": "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz" 457 | } 458 | ], 459 | "project_name": "tomli", 460 | "requires_dists": [], 461 | "requires_python": ">=3.7", 462 | "version": "2.0.1" 463 | }, 464 | { 465 | "artifacts": [ 466 | { 467 | "algorithm": "sha256", 468 | "hash": "5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee", 469 | "url": "https://files.pythonhosted.org/packages/bf/0e/c68adf10adda05f28a6ed7b9f4cd7b8e07f641b44af88ba72d9c89e4de7a/typer-0.9.0-py3-none-any.whl" 470 | }, 471 | { 472 | "algorithm": "sha256", 473 | "hash": "50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2", 474 | "url": "https://files.pythonhosted.org/packages/5b/49/39f10d0f75886439ab3dac889f14f8ad511982a754e382c9b6ca895b29e9/typer-0.9.0.tar.gz" 475 | } 476 | ], 477 | "project_name": "typer", 478 | "requires_dists": [ 479 | "autoflake<2.0.0,>=1.3.1; extra == \"dev\"", 480 | "black<23.0.0,>=22.3.0; extra == \"test\"", 481 | "cairosvg<3.0.0,>=2.5.2; extra == \"doc\"", 482 | "click<9.0.0,>=7.1.1", 483 | "colorama<0.5.0,>=0.4.3; extra == \"all\"", 484 | "coverage<7.0,>=6.2; extra == \"test\"", 485 | "flake8<4.0.0,>=3.8.3; extra == \"dev\"", 486 | "isort<6.0.0,>=5.0.6; extra == \"test\"", 487 | "mdx-include<2.0.0,>=1.4.1; extra == \"doc\"", 488 | "mkdocs-material<9.0.0,>=8.1.4; extra == \"doc\"", 489 | "mkdocs<2.0.0,>=1.1.2; extra == \"doc\"", 490 | "mypy==0.910; extra == \"test\"", 491 | "pillow<10.0.0,>=9.3.0; extra == \"doc\"", 492 | "pre-commit<3.0.0,>=2.17.0; extra == \"dev\"", 493 | "pytest-cov<5.0.0,>=2.10.0; extra == \"test\"", 494 | "pytest-sugar<0.10.0,>=0.9.4; extra == \"test\"", 495 | "pytest-xdist<4.0.0,>=1.32.0; extra == \"test\"", 496 | "pytest<8.0.0,>=4.4.0; extra == \"test\"", 497 | "rich<14.0.0,>=10.11.0; extra == \"all\"", 498 | "rich<14.0.0,>=10.11.0; extra == \"test\"", 499 | "shellingham<2.0.0,>=1.3.0; extra == \"all\"", 500 | "shellingham<2.0.0,>=1.3.0; extra == \"test\"", 501 | "typing-extensions>=3.7.4.3" 502 | ], 503 | "requires_python": ">=3.6", 504 | "version": "0.9.0" 505 | }, 506 | { 507 | "artifacts": [ 508 | { 509 | "algorithm": "sha256", 510 | "hash": "af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd", 511 | "url": "https://files.pythonhosted.org/packages/b7/f4/6a90020cd2d93349b442bfcb657d0dc91eee65491600b2cb1d388bc98e6b/typing_extensions-4.9.0-py3-none-any.whl" 512 | }, 513 | { 514 | "algorithm": "sha256", 515 | "hash": "23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783", 516 | "url": "https://files.pythonhosted.org/packages/0c/1d/eb26f5e75100d531d7399ae800814b069bc2ed2a7410834d57374d010d96/typing_extensions-4.9.0.tar.gz" 517 | } 518 | ], 519 | "project_name": "typing-extensions", 520 | "requires_dists": [], 521 | "requires_python": ">=3.8", 522 | "version": "4.9.0" 523 | } 524 | ], 525 | "platform_tag": null 526 | } 527 | ], 528 | "path_mappings": {}, 529 | "pex_version": "2.1.137", 530 | "pip_version": "23.1.2", 531 | "prefer_older_binary": false, 532 | "requirements": [ 533 | "pyjwt", 534 | "pytest-cov", 535 | "typer>=0.7.0" 536 | ], 537 | "requires_python": [ 538 | "<4,>=3.8" 539 | ], 540 | "resolver_version": "pip-2020-resolver", 541 | "style": "universal", 542 | "target_systems": [ 543 | "linux", 544 | "mac" 545 | ], 546 | "transitive": true, 547 | "use_pep517": null 548 | } 549 | --------------------------------------------------------------------------------