├── tests ├── __init__.py └── test_to_spark.py ├── src └── pydantic_spark │ ├── __init__.py │ ├── spark_to_pydantic.py │ ├── __main__.py │ └── base.py ├── .gitignore ├── .github ├── dependabot.yml └── workflows │ ├── codeql-analysis.yml │ └── python-package.yml ├── LICENSE ├── pyproject.toml ├── README.md └── poetry.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/pydantic_spark/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | 3 | junit/ 4 | dist/ 5 | 6 | __pycache__ 7 | .pytest_cache 8 | 9 | *.iml -------------------------------------------------------------------------------- /src/pydantic_spark/spark_to_pydantic.py: -------------------------------------------------------------------------------- 1 | def convert_file(spark_schema_path: str, output_path: str): 2 | raise NotImplementedError 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /src/pydantic_spark/__main__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | from typing import List 4 | 5 | from pydantic_spark.spark_to_pydantic import convert_file 6 | 7 | 8 | def main(input_args: List[str]): 9 | parser = argparse.ArgumentParser() 10 | subparsers = parser.add_subparsers(dest="sub_command", required=True) 11 | 12 | parser_cache = subparsers.add_parser("spark_to_pydantic") 13 | parser_cache.add_argument("--spark-schema", type=str, dest="spark_schema", required=True) 14 | parser_cache.add_argument("--output", type=str, dest="output") 15 | 16 | args = parser.parse_args(input_args) 17 | 18 | if args.sub_command == "spark_to_pydantic": 19 | convert_file(args.spark_schema, args.output) 20 | 21 | 22 | def root_main(): 23 | main(sys.argv[1:]) 24 | 25 | 26 | if __name__ == "__main__": 27 | root_main() 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 GoDataDriven 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pydantic-spark" 3 | version = "1.0.1" 4 | description = "Converting pydantic classes to spark schemas" 5 | authors = ["Peter van 't Hof' "] 6 | 7 | keywords = ["pydantic", "spark"] 8 | readme = "README.md" 9 | license = "MIT" 10 | homepage = "https://github.com/godatadriven/pydantic-spark" 11 | repository = "https://github.com/godatadriven/pydantic-spark" 12 | include = [ 13 | "LICENSE", 14 | ] 15 | 16 | 17 | packages = [{ include = "pydantic_spark", from = "src"}] 18 | 19 | [tool.poetry.dependencies] 20 | python = ">=3.8,<4.0" 21 | pydantic = "^2.5.2" 22 | 23 | #spark 24 | pyspark = {version=">=3.1.2,<3.3.0", optional=true} 25 | 26 | 27 | [tool.poetry.extras] 28 | spark = ["pyspark"] 29 | 30 | [tool.poetry.dev-dependencies] 31 | coverage= {version= "^6.1.1", extras=["toml"]} 32 | pytest= "7.0.1" 33 | pytest-mock="3.6.1" 34 | pyproject-flake8 ="^0.0.1a2" 35 | isort ="^5.10.0" 36 | pytest-cov= "^3.0.0" 37 | mypy = "^0.961" 38 | black = "^22.10.0" 39 | 40 | 41 | [tool.poetry.scripts] 42 | pydantic-spark = "pydantic_spark.__main__:root_main" 43 | 44 | [tool.black] 45 | line-length = 120 46 | target-version = ["py38"] 47 | 48 | [tool.pytest.ini_options] 49 | addopts = "--junitxml=junit/report.xml" 50 | testpaths = ["tests"] 51 | markers = "requires_database: requires postgresql" 52 | 53 | [tool.coverage.report] 54 | omit = ["tests/*", ".venv/*"] 55 | precision = 2 56 | show_missing = true 57 | 58 | [tool.coverage.run] 59 | command_line = "-m pytest -v tests/" 60 | omit = ["tests/*",".venv/*"] 61 | 62 | [tool.flake8] 63 | max-line-length = 180 64 | extend-exclude = ["deploy", ".venv"] 65 | extend-ignore = "E203" 66 | 67 | [tool.isort] 68 | profile = "black" 69 | line_length = 120 70 | 71 | [tool.mypy] 72 | files = "src/" 73 | python_version = "3.9" 74 | 75 | [build-system] 76 | requires = ["poetry-core>=1.0.0"] 77 | build-backend = "poetry.core.masonry.api" 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Python package](https://github.com/godatadriven/pydantic-spark/actions/workflows/python-package.yml/badge.svg)](https://github.com/godatadriven/pydantic-spark/actions/workflows/python-package.yml) 2 | [![codecov](https://codecov.io/gh/godatadriven/pydantic-spark/branch/main/graph/badge.svg?token=5L08GOERAW)](https://codecov.io/gh/godatadriven/pydantic-spark) 3 | [![PyPI version](https://badge.fury.io/py/pydantic-spark.svg)](https://badge.fury.io/py/pydantic-spark) 4 | [![CodeQL](https://github.com/godatadriven/pydantic-spark/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/godatadriven/pydantic-spark/actions/workflows/codeql-analysis.yml) 5 | 6 | # pydantic-spark 7 | 8 | This library can convert a pydantic class to a spark schema or generate python code from a spark schema. 9 | 10 | ### Install 11 | 12 | ```bash 13 | pip install pydantic-spark 14 | ``` 15 | 16 | ### Pydantic class to spark schema 17 | 18 | ```python 19 | import json 20 | from typing import Optional 21 | 22 | from pydantic_spark.base import SparkBase 23 | 24 | class TestModel(SparkBase): 25 | key1: str 26 | key2: int 27 | key2: Optional[str] 28 | 29 | schema_dict: dict = TestModel.spark_schema() 30 | print(json.dumps(schema_dict)) 31 | 32 | ``` 33 | #### Coerce type 34 | Pydantic-spark provides a `coerce_type` option that allows type coercion. 35 | When applied to a field, pydantic-spark converts the column's data type to the specified coercion type. 36 | 37 | ```python 38 | import json 39 | from pydantic import Field 40 | from pydantic_spark.base import SparkBase, CoerceType 41 | 42 | class TestModel(SparkBase): 43 | key1: str = Field(extra_json_schema={"coerce_type": CoerceType.integer}) 44 | 45 | schema_dict: dict = TestModel.spark_schema() 46 | print(json.dumps(schema_dict)) 47 | 48 | ``` 49 | 50 | 51 | ### Install for developers 52 | 53 | ###### Install package 54 | 55 | - Requirement: Poetry 1.* 56 | 57 | ```shell 58 | poetry install 59 | ``` 60 | 61 | ###### Run unit tests 62 | ```shell 63 | pytest 64 | coverage run -m pytest # with coverage 65 | # or (depends on your local env) 66 | poetry run pytest 67 | poetry run coverage run -m pytest # with coverage 68 | ``` 69 | 70 | ##### Run linting 71 | 72 | The linting is checked in the github workflow. To fix and review issues run this: 73 | ```shell 74 | black . # Auto fix all issues 75 | isort . # Auto fix all issues 76 | pflake . # Only display issues, fixing is manual 77 | ``` 78 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '42 15 * * 5' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v2 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v1 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 52 | 53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 54 | # If this step fails, then you should remove it and run the build manually (see below) 55 | - name: Autobuild 56 | uses: github/codeql-action/autobuild@v1 57 | 58 | # ℹ️ Command-line programs to run using the OS shell. 59 | # 📚 https://git.io/JvXDl 60 | 61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 62 | # and modify them (or add more) to build your code if your project 63 | # uses a compiled language 64 | 65 | #- run: | 66 | # make bootstrap 67 | # make release 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@v1 71 | -------------------------------------------------------------------------------- /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | push: 8 | branches: [ main ] 9 | pull_request: 10 | branches: [ main ] 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | python-version: ["3.8", "3.9", "3.10", "3.11"] 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v2 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Install dependencies 27 | run: | 28 | curl -sSL https://install.python-poetry.org | python3 - 29 | poetry install --extras spark 30 | - name: Test with pytest 31 | run: | 32 | poetry run coverage run -m pytest 33 | poetry run coverage report 34 | poetry run coverage xml 35 | - name: Publish Test Report 36 | uses: mikepenz/action-junit-report@v2 37 | if: always() # always run even if the previous step fails 38 | with: 39 | report_paths: 'junit/report.xml' 40 | - name: Upload Coverage to Codecov 41 | uses: codecov/codecov-action@v1 42 | with: 43 | files: ./coverage.xml 44 | flake8: 45 | runs-on: ubuntu-latest 46 | steps: 47 | - uses: actions/checkout@v2 48 | - name: Set up Python 3.8 49 | uses: actions/setup-python@v2 50 | with: 51 | python-version: 3.8 52 | - name: Install dependencies 53 | run: | 54 | curl -sSL https://install.python-poetry.org | python3 - 55 | poetry install 56 | - name: Lint with flake8 57 | run: | 58 | # stop the build if there are Python syntax errors or undefined names 59 | poetry run pflake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 60 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 61 | poetry run pflake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 62 | 63 | black: 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: actions/checkout@v2 67 | - name: Set up Python 3.8 68 | uses: actions/setup-python@v2 69 | with: 70 | python-version: 3.8 71 | - name: Install dependencies 72 | run: | 73 | curl -sSL https://install.python-poetry.org | python3 - 74 | poetry install 75 | - name: Lint with black 76 | run: | 77 | poetry run black --check . 78 | 79 | isort: 80 | runs-on: ubuntu-latest 81 | steps: 82 | - uses: actions/checkout@v2 83 | - name: Set up Python 3.8 84 | uses: actions/setup-python@v2 85 | with: 86 | python-version: 3.8 87 | - name: Install dependencies 88 | run: | 89 | curl -sSL https://install.python-poetry.org | python3 - 90 | poetry install 91 | - name: Lint with isort 92 | run: | 93 | poetry run isort --check . 94 | -------------------------------------------------------------------------------- /src/pydantic_spark/base.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from typing import List, Tuple 3 | 4 | from pydantic import BaseModel 5 | 6 | 7 | class CoerceType(str, Enum): 8 | integer = "integer" 9 | long = "long" 10 | double = "double" 11 | string = "string" 12 | boolean = "boolean" 13 | date = "date" 14 | timestamp = "timestamp" 15 | 16 | 17 | class SparkBase(BaseModel): 18 | """This is base pydantic class that will add some methods""" 19 | 20 | @classmethod 21 | def spark_schema(cls) -> dict: 22 | """Return the avro schema for the pydantic class""" 23 | schema = cls.schema() 24 | return cls._spark_schema(schema) 25 | 26 | @staticmethod 27 | def _spark_schema(schema: dict) -> dict: 28 | """Return the spark schema for the given pydantic schema""" 29 | classes_seen = {} 30 | 31 | def get_definition(ref: str, schema: dict): 32 | id = ref.replace("#/$defs/", "") 33 | d = schema.get("$defs", {}).get(id) 34 | if d is None: 35 | raise RuntimeError(f"Definition {id} does not exist") 36 | return d 37 | 38 | def get_type_of_definition(ref: str, schema: dict): 39 | """Reading definition of base schema for nested structs""" 40 | d = get_definition(ref, schema) 41 | 42 | if "enum" in d: 43 | enum_type = d.get("type") 44 | if enum_type == "string": 45 | return "string" 46 | elif enum_type == "numeric": 47 | return "double" 48 | elif enum_type == "integer": 49 | return "long" 50 | else: 51 | raise RuntimeError(f"Unknown enum type: {enum_type}") 52 | else: 53 | return { 54 | "type": "struct", 55 | "fields": get_fields(d), 56 | } 57 | 58 | def get_type(value: dict) -> Tuple[str, dict]: 59 | """Returns a type of single field""" 60 | t = value.get("type") 61 | ao = value.get("anyOf") 62 | f = value.get("format") 63 | r = value.get("$ref") 64 | a = value.get("additionalProperties") 65 | ft = value.get("coerce_type") 66 | metadata = {} 67 | 68 | if ft is not None: 69 | return ft, metadata 70 | 71 | if ao is not None: 72 | if len(ao) == 2 and (ao[0].get("type") == "null" or ao[1].get("type") == "null"): 73 | # this is an optional column. We will remove the null type 74 | t = ao[0].get("type") if ao[0].get("type") != "null" else ao[1].get("type") 75 | f = ao[0].get("format") if ao[0].get("type") != "null" else ao[1].get("format") 76 | else: 77 | NotImplementedError(f"Union type {ao} is not supported yet. Use coerce_type option to specify type") 78 | 79 | if "default" in value: 80 | metadata["default"] = value.get("default") 81 | if r is not None: 82 | class_name = r.replace("#/definitions/", "") 83 | if class_name in classes_seen: 84 | spark_type = classes_seen[class_name] 85 | else: 86 | spark_type = get_type_of_definition(r, schema) 87 | classes_seen[class_name] = spark_type 88 | elif t == "array": 89 | items = value.get("items") 90 | tn, metadata = get_type(items) 91 | spark_type = { 92 | "type": "array", 93 | "elementType": tn, 94 | "containsNull": True, 95 | } 96 | elif t == "string" and f == "date-time": 97 | spark_type = "timestamp" 98 | elif t == "string" and f == "date": 99 | spark_type = "date" 100 | # elif t == "string" and f == "time": # FIXME: time type in spark does not exist 101 | # spark_type = { 102 | # "type": "long", 103 | # "logicalType": "time-micros", 104 | # } 105 | elif t == "string" and f == "uuid": 106 | spark_type = "string" 107 | metadata["logicalType"] = "uuid" 108 | elif t == "string": 109 | spark_type = "string" 110 | elif t == "null": 111 | spark_type = "string" 112 | elif t == "number": 113 | spark_type = "double" 114 | elif t == "integer": 115 | # integer in python can be a long 116 | spark_type = "long" 117 | elif t == "boolean": 118 | spark_type = "boolean" 119 | elif t == "object": 120 | if a is None: 121 | value_type = "string" 122 | else: 123 | value_type, m = get_type(a) 124 | # if isinstance(value_type, dict) and len(value_type) == 1: 125 | # value_type = value_type.get("type") 126 | spark_type = {"keyType": "string", "type": "map", "valueContainsNull": True, "valueType": value_type} 127 | else: 128 | raise NotImplementedError( 129 | f"Type '{t}' not support yet, " 130 | f"please report this at https://github.com/godatadriven/pydantic-avro/issues" 131 | ) 132 | return spark_type, metadata 133 | 134 | def get_fields(s: dict) -> List[dict]: 135 | """Return a list of fields of a struct""" 136 | fields = [] 137 | 138 | for key, value in s.get("properties", {}).items(): 139 | spark_type, metadata = get_type(value) 140 | metadata["parentClass"] = s.get("title") 141 | struct_field = { 142 | "name": key, 143 | "nullable": "default" not in metadata and value.get("anyOf") is not None, 144 | "metadata": metadata, 145 | "type": spark_type, 146 | } 147 | 148 | fields.append(struct_field) 149 | return fields 150 | 151 | fields = get_fields(schema) 152 | 153 | return {"fields": fields, "type": "struct"} 154 | -------------------------------------------------------------------------------- /tests/test_to_spark.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from datetime import date, datetime 5 | from enum import Enum 6 | from typing import Dict, List, Optional, Union 7 | from uuid import UUID 8 | 9 | from pydantic import Field 10 | from pyspark.sql.types import ( 11 | ArrayType, 12 | BooleanType, 13 | DateType, 14 | DoubleType, 15 | IntegerType, 16 | LongType, 17 | MapType, 18 | StringType, 19 | StructField, 20 | StructType, 21 | TimestampType, 22 | ) 23 | 24 | from pydantic_spark.base import CoerceType, SparkBase 25 | 26 | 27 | class Nested2Model(SparkBase): 28 | c111: str 29 | 30 | 31 | class NestedModel(SparkBase): 32 | c11: Nested2Model 33 | 34 | 35 | class TestModel(SparkBase): 36 | c1: str 37 | c2: int 38 | c3: float 39 | c4: datetime 40 | c5: date 41 | c6: Optional[str] 42 | c7: bool 43 | c8: UUID 44 | c9: Optional[UUID] 45 | c10: Dict[str, str] 46 | c11: dict 47 | 48 | 49 | class ComplexTestModel(SparkBase): 50 | c1: List[str] 51 | c2: NestedModel 52 | c3: List[NestedModel] 53 | c4: List[datetime] 54 | c5: Dict[str, NestedModel] 55 | 56 | 57 | class ReusedObject(SparkBase): 58 | c1: Nested2Model 59 | c2: Nested2Model 60 | 61 | 62 | class ReusedObjectArray(SparkBase): 63 | c1: List[Nested2Model] 64 | c2: Nested2Model 65 | 66 | 67 | class DefaultValues(SparkBase): 68 | c1: str = "test" 69 | 70 | 71 | def test_spark(): 72 | expected_schema = StructType( 73 | [ 74 | StructField("c1", StringType(), nullable=False, metadata={"parentClass": "TestModel"}), 75 | StructField("c2", LongType(), nullable=False, metadata={"parentClass": "TestModel"}), 76 | StructField("c3", DoubleType(), nullable=False, metadata={"parentClass": "TestModel"}), 77 | StructField("c4", TimestampType(), nullable=False, metadata={"parentClass": "TestModel"}), 78 | StructField("c5", DateType(), nullable=False, metadata={"parentClass": "TestModel"}), 79 | StructField("c6", StringType(), nullable=True, metadata={"parentClass": "TestModel"}), 80 | StructField("c7", BooleanType(), nullable=False, metadata={"parentClass": "TestModel"}), 81 | StructField( 82 | "c8", StringType(), nullable=False, metadata={"logicalType": "uuid", "parentClass": "TestModel"} 83 | ), 84 | StructField( 85 | "c9", StringType(), nullable=True, metadata={"logicalType": "uuid", "parentClass": "TestModel"} 86 | ), 87 | StructField( 88 | "c10", MapType(StringType(), StringType()), nullable=False, metadata={"parentClass": "TestModel"} 89 | ), 90 | StructField( 91 | "c11", MapType(StringType(), StringType()), nullable=False, metadata={"parentClass": "TestModel"} 92 | ), 93 | ] 94 | ) 95 | result = TestModel.spark_schema() 96 | assert result == json.loads(expected_schema.json()) 97 | # Reading schema with spark library to be sure format is correct 98 | schema = StructType.fromJson(result) 99 | assert len(schema.fields) == 11 100 | 101 | 102 | # def test_spark_write(): 103 | # record1 = TestModel( 104 | # c1="1", 105 | # c2=2, 106 | # c3=3, 107 | # c4=4, 108 | # c5=5, 109 | # c6=6, 110 | # c7=7, 111 | # c8=True, 112 | # c9=uuid.uuid4(), 113 | # c10=uuid.uuid4(), 114 | # c11={"key": "value"}, 115 | # c12={}, 116 | # ) 117 | # 118 | # parsed_schema = parse_schema(TestModel.spark_schema()) 119 | # 120 | # # 'records' can be an iterable (including generator) 121 | # records = [ 122 | # record1.dict(), 123 | # ] 124 | # 125 | # with tempfile.TemporaryDirectory() as dir: 126 | # # Writing 127 | # with open(os.path.join(dir, "test.spark"), "wb") as out: 128 | # writer(out, parsed_schema, records) 129 | # 130 | # result_records = [] 131 | # # Reading 132 | # with open(os.path.join(dir, "test.spark"), "rb") as fo: 133 | # for record in reader(fo): 134 | # result_records.append(TestModel.parse_obj(record)) 135 | # assert records == result_records 136 | 137 | 138 | def test_reused_object(): 139 | 140 | expected_schema = StructType( 141 | [ 142 | StructField( 143 | "c1", 144 | StructType.fromJson(Nested2Model.spark_schema()), 145 | nullable=False, 146 | metadata={"parentClass": "ReusedObject"}, 147 | ), 148 | StructField( 149 | "c2", 150 | StructType.fromJson(Nested2Model.spark_schema()), 151 | nullable=False, 152 | metadata={"parentClass": "ReusedObject"}, 153 | ), 154 | ] 155 | ) 156 | result = ReusedObject.spark_schema() 157 | assert result == json.loads(expected_schema.json()) 158 | # Reading schema with spark library to be sure format is correct 159 | schema = StructType.fromJson(result) 160 | assert len(schema.fields) == 2 161 | 162 | 163 | def test_reused_object_array(): 164 | expected_schema = StructType( 165 | [ 166 | StructField( 167 | "c1", 168 | ArrayType(StructType.fromJson(Nested2Model.spark_schema())), 169 | nullable=False, 170 | metadata={"parentClass": "ReusedObjectArray"}, 171 | ), 172 | StructField( 173 | "c2", 174 | StructType.fromJson(Nested2Model.spark_schema()), 175 | nullable=False, 176 | metadata={"parentClass": "ReusedObjectArray"}, 177 | ), 178 | ] 179 | ) 180 | result = ReusedObjectArray.spark_schema() 181 | assert result == json.loads(expected_schema.json()) 182 | # Reading schema with spark library to be sure format is correct 183 | schema = StructType.fromJson(result) 184 | assert len(schema.fields) == 2 185 | 186 | 187 | def test_complex_spark(): 188 | expected_schema = StructType( 189 | [ 190 | StructField("c1", ArrayType(StringType()), nullable=False, metadata={"parentClass": "ComplexTestModel"}), 191 | StructField( 192 | "c2", 193 | StructType.fromJson(NestedModel.spark_schema()), 194 | nullable=False, 195 | metadata={"parentClass": "ComplexTestModel"}, 196 | ), 197 | StructField( 198 | "c3", 199 | ArrayType(StructType.fromJson(NestedModel.spark_schema())), 200 | nullable=False, 201 | metadata={"parentClass": "ComplexTestModel"}, 202 | ), 203 | StructField("c4", ArrayType(TimestampType()), nullable=False, metadata={"parentClass": "ComplexTestModel"}), 204 | StructField( 205 | "c5", 206 | MapType(StringType(), StructType.fromJson(NestedModel.spark_schema())), 207 | nullable=False, 208 | metadata={"parentClass": "ComplexTestModel"}, 209 | ), 210 | ] 211 | ) 212 | result = ComplexTestModel.spark_schema() 213 | assert result == json.loads(expected_schema.json()) 214 | # Reading schema with spark library to be sure format is correct 215 | schema = StructType.fromJson(result) 216 | assert len(schema.fields) == 5 217 | 218 | 219 | # def test_spark_write_complex(): 220 | # record1 = ComplexTestModel( 221 | # c1=["1", "2"], 222 | # c2=NestedModel(c11=Nested2Model(c111="test")), 223 | # c3=[NestedModel(c11=Nested2Model(c111="test"))], 224 | # c4=[1, 2, 3, 4], 225 | # c5={"key": NestedModel(c11=Nested2Model(c111="test"))}, 226 | # ) 227 | # 228 | # parsed_schema = parse_schema(ComplexTestModel.spark_schema()) 229 | # 230 | # # 'records' can be an iterable (including generator) 231 | # records = [ 232 | # record1.dict(), 233 | # ] 234 | # 235 | # with tempfile.TemporaryDirectory() as dir: 236 | # # Writing 237 | # with open(os.path.join(dir, "test.spark"), "wb") as out: 238 | # writer(out, parsed_schema, records) 239 | # 240 | # result_records = [] 241 | # # Reading 242 | # with open(os.path.join(dir, "test.spark"), "rb") as fo: 243 | # for record in reader(fo): 244 | # result_records.append(ComplexTestModel.parse_obj(record)) 245 | # assert records == result_records 246 | 247 | 248 | def test_defaults(): 249 | expected_schema = StructType( 250 | [StructField("c1", StringType(), nullable=False, metadata={"parentClass": "DefaultValues", "default": "test"})] 251 | ) 252 | result = DefaultValues.spark_schema() 253 | assert result == json.loads(expected_schema.json()) 254 | # Reading schema with spark library to be sure format is correct 255 | schema = StructType.fromJson(result) 256 | assert len(schema.fields) == 1 257 | 258 | 259 | class StringEnumValue(str, Enum): 260 | v1 = "v1" 261 | v2 = "v2" 262 | 263 | 264 | class IntEnumValue(int, Enum): 265 | v1 = 1 266 | v2 = 2 267 | 268 | 269 | class FloatEnumValue(float, Enum): 270 | v1 = 1.1 271 | v2 = 2.2 272 | 273 | 274 | class TestEnum(SparkBase): 275 | c1: StringEnumValue 276 | c2: IntEnumValue 277 | c3: FloatEnumValue 278 | 279 | 280 | def test_enum(): 281 | expected_schema = StructType( 282 | [ 283 | StructField("c1", StringType(), nullable=False, metadata={"parentClass": "TestEnum"}), 284 | StructField("c2", LongType(), nullable=False, metadata={"parentClass": "TestEnum"}), 285 | StructField("c3", DoubleType(), nullable=False, metadata={"parentClass": "TestEnum"}), 286 | ] 287 | ) 288 | result = TestEnum.spark_schema() 289 | assert result == json.loads(expected_schema.json()) 290 | 291 | 292 | def test_coerce_type(): 293 | class TestCoerceType(SparkBase): 294 | c1: int = Field(json_schema_extra={"coerce_type": CoerceType.integer}) 295 | c2: Union[str, int] = Field(json_schema_extra={"coerce_type": CoerceType.string}) 296 | 297 | result = TestCoerceType.spark_schema() 298 | assert result["fields"][0]["type"] == "integer" 299 | assert result["fields"][1]["type"] == "string" 300 | 301 | 302 | class Nested2ModelCoerceType(SparkBase): 303 | c111: str = Field(json_schema_extra={"coerce_type": CoerceType.integer}) 304 | 305 | 306 | class NestedModelCoerceType(SparkBase): 307 | c11: Nested2ModelCoerceType 308 | 309 | 310 | class ComplexTestModelCoerceType(SparkBase): 311 | c1: List[NestedModelCoerceType] 312 | 313 | 314 | def test_coerce_type_complex_spark(): 315 | expected_schema = StructType( 316 | [ 317 | StructField( 318 | "c1", 319 | ArrayType(StructType.fromJson(NestedModelCoerceType.spark_schema())), 320 | nullable=False, 321 | metadata={"parentClass": "ComplexTestModelCoerceType"}, 322 | ) 323 | ] 324 | ) 325 | result = ComplexTestModelCoerceType.spark_schema() 326 | assert result == json.loads(expected_schema.json()) 327 | # Reading schema with spark library to be sure format is correct 328 | schema = StructType.fromJson(result) 329 | assert len(schema.fields) == 1 330 | assert isinstance(schema.fields[0].dataType.elementType.fields[0].dataType.fields[0].dataType, IntegerType) 331 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "annotated-types" 5 | version = "0.6.0" 6 | description = "Reusable constraint types to use with typing.Annotated" 7 | optional = false 8 | python-versions = ">=3.8" 9 | files = [ 10 | {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, 11 | {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, 12 | ] 13 | 14 | [package.dependencies] 15 | typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} 16 | 17 | [[package]] 18 | name = "atomicwrites" 19 | version = "1.4.1" 20 | description = "Atomic file writes." 21 | optional = false 22 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 23 | files = [ 24 | {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, 25 | ] 26 | 27 | [[package]] 28 | name = "attrs" 29 | version = "23.1.0" 30 | description = "Classes Without Boilerplate" 31 | optional = false 32 | python-versions = ">=3.7" 33 | files = [ 34 | {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, 35 | {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, 36 | ] 37 | 38 | [package.extras] 39 | cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] 40 | dev = ["attrs[docs,tests]", "pre-commit"] 41 | docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] 42 | tests = ["attrs[tests-no-zope]", "zope-interface"] 43 | tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] 44 | 45 | [[package]] 46 | name = "black" 47 | version = "22.12.0" 48 | description = "The uncompromising code formatter." 49 | optional = false 50 | python-versions = ">=3.7" 51 | files = [ 52 | {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, 53 | {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, 54 | {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, 55 | {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, 56 | {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, 57 | {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, 58 | {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, 59 | {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, 60 | {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, 61 | {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, 62 | {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, 63 | {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, 64 | ] 65 | 66 | [package.dependencies] 67 | click = ">=8.0.0" 68 | mypy-extensions = ">=0.4.3" 69 | pathspec = ">=0.9.0" 70 | platformdirs = ">=2" 71 | tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} 72 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 73 | 74 | [package.extras] 75 | colorama = ["colorama (>=0.4.3)"] 76 | d = ["aiohttp (>=3.7.4)"] 77 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 78 | uvloop = ["uvloop (>=0.15.2)"] 79 | 80 | [[package]] 81 | name = "click" 82 | version = "8.1.7" 83 | description = "Composable command line interface toolkit" 84 | optional = false 85 | python-versions = ">=3.7" 86 | files = [ 87 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 88 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 89 | ] 90 | 91 | [package.dependencies] 92 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 93 | 94 | [[package]] 95 | name = "colorama" 96 | version = "0.4.6" 97 | description = "Cross-platform colored terminal text." 98 | optional = false 99 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 100 | files = [ 101 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 102 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 103 | ] 104 | 105 | [[package]] 106 | name = "coverage" 107 | version = "6.5.0" 108 | description = "Code coverage measurement for Python" 109 | optional = false 110 | python-versions = ">=3.7" 111 | files = [ 112 | {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, 113 | {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, 114 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, 115 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, 116 | {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, 117 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, 118 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, 119 | {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, 120 | {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, 121 | {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, 122 | {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, 123 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, 124 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, 125 | {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, 126 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, 127 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, 128 | {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, 129 | {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, 130 | {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, 131 | {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, 132 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, 133 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, 134 | {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, 135 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, 136 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, 137 | {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, 138 | {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, 139 | {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, 140 | {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, 141 | {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, 142 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, 143 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, 144 | {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, 145 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, 146 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, 147 | {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, 148 | {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, 149 | {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, 150 | {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, 151 | {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, 152 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, 153 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, 154 | {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, 155 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, 156 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, 157 | {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, 158 | {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, 159 | {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, 160 | {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, 161 | {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, 162 | ] 163 | 164 | [package.dependencies] 165 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 166 | 167 | [package.extras] 168 | toml = ["tomli"] 169 | 170 | [[package]] 171 | name = "flake8" 172 | version = "4.0.1" 173 | description = "the modular source code checker: pep8 pyflakes and co" 174 | optional = false 175 | python-versions = ">=3.6" 176 | files = [ 177 | {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, 178 | {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, 179 | ] 180 | 181 | [package.dependencies] 182 | mccabe = ">=0.6.0,<0.7.0" 183 | pycodestyle = ">=2.8.0,<2.9.0" 184 | pyflakes = ">=2.4.0,<2.5.0" 185 | 186 | [[package]] 187 | name = "iniconfig" 188 | version = "2.0.0" 189 | description = "brain-dead simple config-ini parsing" 190 | optional = false 191 | python-versions = ">=3.7" 192 | files = [ 193 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 194 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 195 | ] 196 | 197 | [[package]] 198 | name = "isort" 199 | version = "5.12.0" 200 | description = "A Python utility / library to sort Python imports." 201 | optional = false 202 | python-versions = ">=3.8.0" 203 | files = [ 204 | {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, 205 | {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, 206 | ] 207 | 208 | [package.extras] 209 | colors = ["colorama (>=0.4.3)"] 210 | pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] 211 | plugins = ["setuptools"] 212 | requirements-deprecated-finder = ["pip-api", "pipreqs"] 213 | 214 | [[package]] 215 | name = "mccabe" 216 | version = "0.6.1" 217 | description = "McCabe checker, plugin for flake8" 218 | optional = false 219 | python-versions = "*" 220 | files = [ 221 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 222 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 223 | ] 224 | 225 | [[package]] 226 | name = "mypy" 227 | version = "0.961" 228 | description = "Optional static typing for Python" 229 | optional = false 230 | python-versions = ">=3.6" 231 | files = [ 232 | {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"}, 233 | {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"}, 234 | {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"}, 235 | {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"}, 236 | {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"}, 237 | {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"}, 238 | {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"}, 239 | {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"}, 240 | {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"}, 241 | {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"}, 242 | {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"}, 243 | {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"}, 244 | {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"}, 245 | {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"}, 246 | {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"}, 247 | {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"}, 248 | {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"}, 249 | {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"}, 250 | {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"}, 251 | {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"}, 252 | {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"}, 253 | {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"}, 254 | {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"}, 255 | ] 256 | 257 | [package.dependencies] 258 | mypy-extensions = ">=0.4.3" 259 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 260 | typing-extensions = ">=3.10" 261 | 262 | [package.extras] 263 | dmypy = ["psutil (>=4.0)"] 264 | python2 = ["typed-ast (>=1.4.0,<2)"] 265 | reports = ["lxml"] 266 | 267 | [[package]] 268 | name = "mypy-extensions" 269 | version = "1.0.0" 270 | description = "Type system extensions for programs checked with the mypy type checker." 271 | optional = false 272 | python-versions = ">=3.5" 273 | files = [ 274 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 275 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 276 | ] 277 | 278 | [[package]] 279 | name = "packaging" 280 | version = "23.2" 281 | description = "Core utilities for Python packages" 282 | optional = false 283 | python-versions = ">=3.7" 284 | files = [ 285 | {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, 286 | {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, 287 | ] 288 | 289 | [[package]] 290 | name = "pathspec" 291 | version = "0.11.2" 292 | description = "Utility library for gitignore style pattern matching of file paths." 293 | optional = false 294 | python-versions = ">=3.7" 295 | files = [ 296 | {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, 297 | {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, 298 | ] 299 | 300 | [[package]] 301 | name = "platformdirs" 302 | version = "4.0.0" 303 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 304 | optional = false 305 | python-versions = ">=3.7" 306 | files = [ 307 | {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, 308 | {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, 309 | ] 310 | 311 | [package.extras] 312 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] 313 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] 314 | 315 | [[package]] 316 | name = "pluggy" 317 | version = "1.3.0" 318 | description = "plugin and hook calling mechanisms for python" 319 | optional = false 320 | python-versions = ">=3.8" 321 | files = [ 322 | {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, 323 | {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, 324 | ] 325 | 326 | [package.extras] 327 | dev = ["pre-commit", "tox"] 328 | testing = ["pytest", "pytest-benchmark"] 329 | 330 | [[package]] 331 | name = "py" 332 | version = "1.11.0" 333 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 334 | optional = false 335 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 336 | files = [ 337 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 338 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 339 | ] 340 | 341 | [[package]] 342 | name = "py4j" 343 | version = "0.10.9.5" 344 | description = "Enables Python programs to dynamically access arbitrary Java objects" 345 | optional = true 346 | python-versions = "*" 347 | files = [ 348 | {file = "py4j-0.10.9.5-py2.py3-none-any.whl", hash = "sha256:52d171a6a2b031d8a5d1de6efe451cf4f5baff1a2819aabc3741c8406539ba04"}, 349 | {file = "py4j-0.10.9.5.tar.gz", hash = "sha256:276a4a3c5a2154df1860ef3303a927460e02e97b047dc0a47c1c3fb8cce34db6"}, 350 | ] 351 | 352 | [[package]] 353 | name = "pycodestyle" 354 | version = "2.8.0" 355 | description = "Python style guide checker" 356 | optional = false 357 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 358 | files = [ 359 | {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, 360 | {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, 361 | ] 362 | 363 | [[package]] 364 | name = "pydantic" 365 | version = "2.5.2" 366 | description = "Data validation using Python type hints" 367 | optional = false 368 | python-versions = ">=3.7" 369 | files = [ 370 | {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, 371 | {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, 372 | ] 373 | 374 | [package.dependencies] 375 | annotated-types = ">=0.4.0" 376 | pydantic-core = "2.14.5" 377 | typing-extensions = ">=4.6.1" 378 | 379 | [package.extras] 380 | email = ["email-validator (>=2.0.0)"] 381 | 382 | [[package]] 383 | name = "pydantic-core" 384 | version = "2.14.5" 385 | description = "" 386 | optional = false 387 | python-versions = ">=3.7" 388 | files = [ 389 | {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, 390 | {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, 391 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, 392 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, 393 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, 394 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, 395 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, 396 | {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, 397 | {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, 398 | {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, 399 | {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, 400 | {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, 401 | {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, 402 | {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, 403 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, 404 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, 405 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, 406 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, 407 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, 408 | {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, 409 | {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, 410 | {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, 411 | {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, 412 | {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, 413 | {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, 414 | {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, 415 | {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, 416 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, 417 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, 418 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, 419 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, 420 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, 421 | {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, 422 | {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, 423 | {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, 424 | {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, 425 | {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, 426 | {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, 427 | {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, 428 | {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, 429 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, 430 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, 431 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, 432 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, 433 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, 434 | {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, 435 | {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, 436 | {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, 437 | {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, 438 | {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, 439 | {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, 440 | {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, 441 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, 442 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, 443 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, 444 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, 445 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, 446 | {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, 447 | {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, 448 | {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, 449 | {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, 450 | {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, 451 | {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, 452 | {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, 453 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, 454 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, 455 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, 456 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, 457 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, 458 | {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, 459 | {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, 460 | {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, 461 | {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, 462 | {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, 463 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, 464 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, 465 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, 466 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, 467 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, 468 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, 469 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, 470 | {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, 471 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, 472 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, 473 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, 474 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, 475 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, 476 | {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, 477 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, 478 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, 479 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, 480 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, 481 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, 482 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, 483 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, 484 | {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, 485 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, 486 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, 487 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, 488 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, 489 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, 490 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, 491 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, 492 | {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, 493 | {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, 494 | ] 495 | 496 | [package.dependencies] 497 | typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" 498 | 499 | [[package]] 500 | name = "pyflakes" 501 | version = "2.4.0" 502 | description = "passive checker of Python programs" 503 | optional = false 504 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 505 | files = [ 506 | {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, 507 | {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, 508 | ] 509 | 510 | [[package]] 511 | name = "pyproject-flake8" 512 | version = "0.0.1a5" 513 | description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" 514 | optional = false 515 | python-versions = "*" 516 | files = [ 517 | {file = "pyproject-flake8-0.0.1a5.tar.gz", hash = "sha256:22542080ba90d4bd80ee060852db15a24aeea61c9a29ed7c16f5b59b0e47a03a"}, 518 | {file = "pyproject_flake8-0.0.1a5-py2.py3-none-any.whl", hash = "sha256:c843d760c49d7b270e9abda58a57765c031918a9d10da25aa43572f5d77cac43"}, 519 | ] 520 | 521 | [package.dependencies] 522 | flake8 = "<5.0.0" 523 | tomli = {version = "*", markers = "python_version < \"3.11\""} 524 | 525 | [[package]] 526 | name = "pyspark" 527 | version = "3.2.4" 528 | description = "Apache Spark Python API" 529 | optional = true 530 | python-versions = ">=3.6" 531 | files = [ 532 | {file = "pyspark-3.2.4.tar.gz", hash = "sha256:aaa592f2f03fbef34c91bfcfee389cf53cbff6662ae47bd5f700573ed9ed61f2"}, 533 | ] 534 | 535 | [package.dependencies] 536 | py4j = "0.10.9.5" 537 | 538 | [package.extras] 539 | ml = ["numpy (>=1.7)"] 540 | mllib = ["numpy (>=1.7)"] 541 | pandas-on-spark = ["numpy (>=1.14)", "pandas (>=0.23.2)", "pyarrow (>=1.0.0)"] 542 | sql = ["pandas (>=0.23.2)", "pyarrow (>=1.0.0)"] 543 | 544 | [[package]] 545 | name = "pytest" 546 | version = "7.0.1" 547 | description = "pytest: simple powerful testing with Python" 548 | optional = false 549 | python-versions = ">=3.6" 550 | files = [ 551 | {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, 552 | {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, 553 | ] 554 | 555 | [package.dependencies] 556 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 557 | attrs = ">=19.2.0" 558 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 559 | iniconfig = "*" 560 | packaging = "*" 561 | pluggy = ">=0.12,<2.0" 562 | py = ">=1.8.2" 563 | tomli = ">=1.0.0" 564 | 565 | [package.extras] 566 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] 567 | 568 | [[package]] 569 | name = "pytest-cov" 570 | version = "3.0.0" 571 | description = "Pytest plugin for measuring coverage." 572 | optional = false 573 | python-versions = ">=3.6" 574 | files = [ 575 | {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, 576 | {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, 577 | ] 578 | 579 | [package.dependencies] 580 | coverage = {version = ">=5.2.1", extras = ["toml"]} 581 | pytest = ">=4.6" 582 | 583 | [package.extras] 584 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] 585 | 586 | [[package]] 587 | name = "pytest-mock" 588 | version = "3.6.1" 589 | description = "Thin-wrapper around the mock package for easier use with pytest" 590 | optional = false 591 | python-versions = ">=3.6" 592 | files = [ 593 | {file = "pytest-mock-3.6.1.tar.gz", hash = "sha256:40217a058c52a63f1042f0784f62009e976ba824c418cced42e88d5f40ab0e62"}, 594 | {file = "pytest_mock-3.6.1-py3-none-any.whl", hash = "sha256:30c2f2cc9759e76eee674b81ea28c9f0b94f8f0445a1b87762cadf774f0df7e3"}, 595 | ] 596 | 597 | [package.dependencies] 598 | pytest = ">=5.0" 599 | 600 | [package.extras] 601 | dev = ["pre-commit", "pytest-asyncio", "tox"] 602 | 603 | [[package]] 604 | name = "tomli" 605 | version = "2.0.1" 606 | description = "A lil' TOML parser" 607 | optional = false 608 | python-versions = ">=3.7" 609 | files = [ 610 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 611 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 612 | ] 613 | 614 | [[package]] 615 | name = "typing-extensions" 616 | version = "4.8.0" 617 | description = "Backported and Experimental Type Hints for Python 3.8+" 618 | optional = false 619 | python-versions = ">=3.8" 620 | files = [ 621 | {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, 622 | {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, 623 | ] 624 | 625 | [extras] 626 | spark = ["pyspark"] 627 | 628 | [metadata] 629 | lock-version = "2.0" 630 | python-versions = ">=3.8,<4.0" 631 | content-hash = "195eba2e5fc00b94cec0c571a5c55d2b75cbe787f0f491850269b0304aee0bb2" 632 | --------------------------------------------------------------------------------