├── tests
├── __init__.py
├── assets
│ ├── bad_sas_files
│ │ ├── bad_xpt_file.xpt
│ │ └── bad_sas_file.sas7bdat
│ ├── expected_xpt
│ │ ├── file1.csv
│ │ ├── file2.csv
│ │ ├── file1.xlsx
│ │ ├── file2.xlsx
│ │ ├── file1.parquet
│ │ ├── file2.parquet
│ │ ├── file1.json
│ │ ├── file2.json
│ │ ├── file1.xml
│ │ └── file2.xml
│ ├── xpt_files
│ │ ├── file1.xpt
│ │ └── file2.xpt
│ ├── expected_files
│ │ ├── file1.xlsx
│ │ ├── file2.xlsx
│ │ ├── file3.xlsx
│ │ ├── file1.parquet
│ │ ├── file2.parquet
│ │ ├── file3.parquet
│ │ ├── file1.csv
│ │ ├── file1.json
│ │ ├── file3.csv
│ │ ├── file3.json
│ │ ├── file2.json
│ │ ├── file2.csv
│ │ ├── file1.xml
│ │ ├── file3.xml
│ │ └── file2.xml
│ └── sas7bdat_files
│ │ ├── file1.sas7bdat
│ │ ├── file2.sas7bdat
│ │ └── file3.sas7bdat
└── conftest.py
├── sas7bdat_converter
├── py.typed
├── __init__.py
└── converter.py
├── .github
├── FUNDING.yml
├── renovate.json5
├── workflows
│ ├── release-drafter.yaml
│ ├── pypi_publish.yml
│ └── tests.yaml
└── release-draft-template.yaml
├── .pre-commit-config.yaml
├── .gitignore
├── LICENSE
├── pyproject.toml
├── CONTRIBUTING.md
├── README.md
└── poetry.lock
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/sas7bdat_converter/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | github: [sanders41]
2 |
--------------------------------------------------------------------------------
/tests/assets/bad_sas_files/bad_xpt_file.xpt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/assets/bad_sas_files/bad_sas_file.sas7bdat:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file1.csv:
--------------------------------------------------------------------------------
1 | "irow","trow","frow"
2 | 1.0,"Some text",1.5
3 | 2.0,"Some more test",17.23
4 |
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file2.csv:
--------------------------------------------------------------------------------
1 | "irow","trow","frow"
2 | 3.0,"text 1",21.5
3 | 4.0,"text 2",19.21
4 | 5.0,"text 3",42.42
5 |
--------------------------------------------------------------------------------
/tests/assets/xpt_files/file1.xpt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/xpt_files/file1.xpt
--------------------------------------------------------------------------------
/tests/assets/xpt_files/file2.xpt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/xpt_files/file2.xpt
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file1.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_xpt/file1.xlsx
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file2.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_xpt/file2.xlsx
--------------------------------------------------------------------------------
/tests/assets/expected_files/file1.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file1.xlsx
--------------------------------------------------------------------------------
/tests/assets/expected_files/file2.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file2.xlsx
--------------------------------------------------------------------------------
/tests/assets/expected_files/file3.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file3.xlsx
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file1.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_xpt/file1.parquet
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file2.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_xpt/file2.parquet
--------------------------------------------------------------------------------
/tests/assets/expected_files/file1.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file1.parquet
--------------------------------------------------------------------------------
/tests/assets/expected_files/file2.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file2.parquet
--------------------------------------------------------------------------------
/tests/assets/expected_files/file3.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/expected_files/file3.parquet
--------------------------------------------------------------------------------
/tests/assets/sas7bdat_files/file1.sas7bdat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/sas7bdat_files/file1.sas7bdat
--------------------------------------------------------------------------------
/tests/assets/sas7bdat_files/file2.sas7bdat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/sas7bdat_files/file2.sas7bdat
--------------------------------------------------------------------------------
/tests/assets/sas7bdat_files/file3.sas7bdat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sanders41/sas7bdat-converter/HEAD/tests/assets/sas7bdat_files/file3.sas7bdat
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file1.json:
--------------------------------------------------------------------------------
1 | {
2 | "irow": {"0": 1.0, "1": 2.0},
3 | "trow": {"0": "Some text", "1": "Some more test"},
4 | "frow": {"0": 1.5, "1": 17.23}
5 | }
6 |
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file2.json:
--------------------------------------------------------------------------------
1 | {
2 | "irow": {"0": 3.0, "1": 4.0, "2": 5.0},
3 | "trow": {"0": "text 1", "1": "text 2", "2": "text 3"},
4 | "frow": {"0": 21.50, "1": 19.21, "2": 42.42}
5 | }
6 |
--------------------------------------------------------------------------------
/.github/renovate.json5:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:recommended",
5 | ":disableDependencyDashboard"
6 | ],
7 | lockFileMaintenance: {
8 | enabled: true,
9 | },
10 | "labels": ["dependencies", "skip-changelog"],
11 | }
12 |
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 | 1.0
5 | Some text
6 | 1.5
7 |
8 | -
9 | 2.0
10 | Some more test
11 | 17.23
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yaml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | update_release_draft:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: release-drafter/release-drafter@v6
13 | with:
14 | config-name: release-draft-template.yaml
15 | env:
16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
17 |
--------------------------------------------------------------------------------
/tests/assets/expected_xpt/file2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 | 3.0
5 | text 1
6 | 21.5
7 |
8 | -
9 | 4.0
10 | text 2
11 | 19.21
12 |
13 | -
14 | 5.0
15 | text 3
16 | 42.42
17 |
18 |
19 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v6.0.0
4 | hooks:
5 | - id: check-added-large-files
6 | - id: check-toml
7 | - id: check-yaml
8 | - id: debug-statements
9 | - id: end-of-file-fixer
10 | - id: trailing-whitespace
11 | - repo: https://github.com/pre-commit/mirrors-mypy
12 | rev: v1.19.1
13 | hooks:
14 | - id: mypy
15 | - repo: https://github.com/astral-sh/ruff-pre-commit
16 | rev: v0.14.9
17 | hooks:
18 | - id: ruff-check
19 | args: [--fix, --exit-non-zero-on-fix]
20 | - id: ruff-format
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | *.swp
3 | *.DS_Store
4 |
5 | # Editor
6 | .vscode
7 |
8 | # Byte-compiled / optimized / DLL files
9 | __pycache__/
10 | *.py[cod]
11 | *$py.class
12 |
13 | # C extensions
14 | *.so
15 |
16 | # Distribution / packaging
17 | build/
18 | develop-eggs/
19 | dist/
20 | eggs/
21 | .eggs/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # Unit test / coverage reports
28 | htmlcov/
29 | .tox/
30 | .coverage
31 | .coverage.*
32 | .cache
33 | nosetests.xml
34 | coverage.xml
35 | *.cover
36 | .pytest_cache
37 |
38 | # virtualenv
39 | .venv
40 | venv/
41 | env/
42 | ENV/
43 |
44 | # pyenv
45 | .python-version
46 |
47 | # mypy
48 | .mypy_cache
49 |
--------------------------------------------------------------------------------
/.github/release-draft-template.yaml:
--------------------------------------------------------------------------------
1 | name-template: 'v$RESOLVED_VERSION'
2 | tag-template: 'v$RESOLVED_VERSION'
3 | exclude-labels:
4 | - 'dependencies'
5 | - 'skip-changelog'
6 | version-resolver:
7 | major:
8 | labels:
9 | - 'breaking-change'
10 | minor:
11 | labels:
12 | - 'enhancement'
13 | - 'feature'
14 | default: patch
15 | categories:
16 | - title: 'Features'
17 | labels:
18 | - 'feature'
19 | - 'enhancement'
20 | - title: 'Bug Fixes'
21 | labels:
22 | - 'bug-fix'
23 | - title: '⚠️ Breaking changes'
24 | label: 'breaking-change'
25 | change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
26 | template: |
27 | ## Changes
28 |
29 | $CHANGES
30 |
--------------------------------------------------------------------------------
/sas7bdat_converter/__init__.py:
--------------------------------------------------------------------------------
1 | from sas7bdat_converter.converter import ( # noqa: F401
2 | batch_to_csv,
3 | batch_to_excel,
4 | batch_to_json,
5 | batch_to_xml,
6 | dir_to_csv,
7 | dir_to_excel,
8 | dir_to_json,
9 | dir_to_xml,
10 | to_csv,
11 | to_dataframe,
12 | to_excel,
13 | to_json,
14 | to_xml,
15 | )
16 |
17 | __version__ = "3.0.0"
18 |
19 | __all__ = [
20 | "batch_to_csv",
21 | "batch_to_excel",
22 | "batch_to_json",
23 | "batch_to_xml",
24 | "dir_to_csv",
25 | "dir_to_excel",
26 | "dir_to_json",
27 | "dir_to_xml",
28 | "to_csv",
29 | "to_dataframe",
30 | "to_excel",
31 | "to_json",
32 | "to_xml",
33 | ]
34 |
35 | name = "sas7bdat_converter"
36 |
--------------------------------------------------------------------------------
/.github/workflows/pypi_publish.yml:
--------------------------------------------------------------------------------
1 | name: PyPi Publish
2 | on:
3 | release:
4 | types:
5 | - published
6 | env:
7 | PYTHON_VERSION: "3.10"
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | permissions:
12 | # For PyPI's trusted publishing.
13 | id-token: write
14 | steps:
15 | - uses: actions/checkout@v4
16 | - name: Install uv
17 | uses: astral-sh/setup-uv@v6
18 | with:
19 | enable-cache: true
20 | - name: Set up Python
21 | uses: actions/setup-python@v5
22 | with:
23 | python-version: ${{ env.PYTHON_VERSION }}
24 | - name: Install Dependencies
25 | run: uv sync --frozen --all-extras
26 | - name: Build package
27 | run: uv build
28 | - name: Publish package
29 | run: uv publish
30 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file1.csv:
--------------------------------------------------------------------------------
1 | "integer_row","text_row","float_row","date_row"
2 | 1.0,"Some text",2.5,"2018-01-02"
3 | 2.0,"Some more text",17.23,"2018-02-05"
4 | 3.0,"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.",3.21,"2017-11-21"
5 | 4.0,"Text",100.9,"2016-05-19"
6 | 5.0,"Test",98.6,"1999-10-25"
7 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file1.json:
--------------------------------------------------------------------------------
1 | {"integer_row":{"0":1.0,"1":2.0,"2":3.0,"3":4.0,"4":5.0},"text_row":{"0":"Some text","1":"Some more text","2":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.","3":"Text","4":"Test"},"float_row":{"0":2.5,"1":17.23,"2":3.21,"3":100.9,"4":98.6},"date_row":{"0":1514,"1":1517,"2":1511,"3":1463,"4":940}}
2 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file3.csv:
--------------------------------------------------------------------------------
1 | "float_row","text_row","date_row"
2 | 2.5,"Some text","2018-01-02"
3 | 17.23,"Some more text","2018-02-05"
4 | 3.21,"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.","2017-11-21"
5 | 100.9,"Text","2016-05-19"
6 | 98.6,"Test","1999-10-25"
7 | 159.357,"more","2016-06-15"
8 | 852.369,"again","1973-07-14"
9 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file3.json:
--------------------------------------------------------------------------------
1 | {"float_row":{"0":2.5,"1":17.23,"2":3.21,"3":100.9,"4":98.6,"5":159.357,"6":852.369},"text_row":{"0":"Some text","1":"Some more text","2":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.","3":"Text","4":"Test","5":"more","6":"again"},"date_row":{"0":1514,"1":1517,"2":1511,"3":1463,"4":940,"5":1465,"6":111}}
2 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file2.json:
--------------------------------------------------------------------------------
1 | {"date_row":{"0":1514,"1":1517,"2":1511,"3":1463,"4":940,"5":1465,"6":111,"7":986},"text_row":{"0":"Some text","1":"Some more text","2":"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.","3":"Text","4":"Test","5":"more","6":"again","7":"one more"},"integer_row":{"0":6.0,"1":7.0,"2":8.0,"3":9.0,"4":10.0,"5":11.0,"6":12.0,"7":13.0}}
2 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file2.csv:
--------------------------------------------------------------------------------
1 | "date_row","text_row","integer_row"
2 | "2018-01-02","Some text",6.0
3 | "2018-02-05","Some more text",7.0
4 | "2017-11-21","Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.",8.0
5 | "2016-05-19","Text",9.0
6 | "1999-10-25","Test",10.0
7 | "2016-06-15","more",11.0
8 | "1973-07-14","again",12.0
9 | "2001-04-03","one more",13.0
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018-2021 Paul Sanders
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import pytest
4 |
5 | ASSETS_DIR = Path().absolute().joinpath("tests/assets")
6 |
7 |
8 | @pytest.fixture(scope="module")
9 | def expected_dir():
10 | return ASSETS_DIR / "expected_files"
11 |
12 |
13 | @pytest.fixture(scope="module")
14 | def sas_file_1():
15 | return ASSETS_DIR.joinpath("sas7bdat_files/file1.sas7bdat")
16 |
17 |
18 | @pytest.fixture(scope="module")
19 | def sas_file_2():
20 | return ASSETS_DIR.joinpath("sas7bdat_files/file2.sas7bdat")
21 |
22 |
23 | @pytest.fixture(scope="module")
24 | def sas_file_3():
25 | return ASSETS_DIR.joinpath("sas7bdat_files/file3.sas7bdat")
26 |
27 |
28 | @pytest.fixture(scope="module")
29 | def sas7bdat_dir():
30 | return ASSETS_DIR / "sas7bdat_files"
31 |
32 |
33 | @pytest.fixture
34 | def bad_sas_file():
35 | return ASSETS_DIR.joinpath("bad_sas_files/bad_sas_file.sas7bdat")
36 |
37 |
38 | @pytest.fixture(scope="module")
39 | def xpt_file_1():
40 | return ASSETS_DIR.joinpath("xpt_files/file1.xpt")
41 |
42 |
43 | @pytest.fixture(scope="module")
44 | def xpt_file_2():
45 | return ASSETS_DIR.joinpath("xpt_files/file2.xpt")
46 |
47 |
48 | @pytest.fixture(scope="module")
49 | def xpt_dir():
50 | return ASSETS_DIR / "xpt_files"
51 |
52 |
53 | @pytest.fixture
54 | def bad_xpt_file():
55 | return ASSETS_DIR.joinpath("bad_sas_files/bad_xpt_file.xpt")
56 |
57 |
58 | @pytest.fixture(scope="module")
59 | def xpt_expected_dir():
60 | return ASSETS_DIR / "expected_xpt"
61 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yaml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on:
3 | push:
4 | branches:
5 | - main
6 | pull_request:
7 | branches:
8 | - main
9 | jobs:
10 | linting:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Install uv
15 | uses: astral-sh/setup-uv@v6
16 | with:
17 | enable-cache: true
18 | - name: Set up Python
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: "3.10"
22 | - name: Install Dependencies
23 | run: uv sync --frozen --all-extras
24 | - name: mypy check
25 | run: uv run mypy sas7bdat_converter
26 |
27 | Testing:
28 | strategy:
29 | fail-fast: false
30 | matrix:
31 | python-version: ["3.10", "3.11", "3.12", "3.13"]
32 | os: [ubuntu-latest, windows-latest, macos-latest]
33 | runs-on: ${{ matrix.os }}
34 | steps:
35 | - uses: actions/checkout@v4
36 | - name: Install uv
37 | uses: astral-sh/setup-uv@v6
38 | with:
39 | enable-cache: true
40 | - name: Setup Python
41 | uses: actions/setup-python@v5
42 | with:
43 | python-version: ${{ matrix.python-version }}
44 | - name: Install Dependencies
45 | run: uv sync --frozen --all-extras
46 | - name: Test with pytest
47 | run: uv run pytest --cov=sas7bdat_converter --cov-report=xml
48 | - name: Upload coverage
49 | uses: codecov/codecov-action@v5
50 | with:
51 | token: ${{ secrets.CODECOV_TOKEN }}
52 | fail_ci_if_error: true
53 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file1.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 | 1.0
5 | Some text
6 | 2.5
7 | 2018-01-02 00:00:00
8 |
9 | -
10 | 2.0
11 | Some more text
12 | 17.23
13 | 2018-02-05 00:00:00
14 |
15 | -
16 | 3.0
17 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.
18 | 3.21
19 | 2017-11-21 00:00:00
20 |
21 | -
22 | 4.0
23 | Text
24 | 100.9
25 | 2016-05-19 00:00:00
26 |
27 | -
28 | 5.0
29 | Test
30 | 98.6
31 | 1999-10-25 00:00:00
32 |
33 |
34 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file3.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 | 2.5
5 | Some text
6 | 2018-01-02 00:00:00
7 |
8 | -
9 | 17.23
10 | Some more text
11 | 2018-02-05 00:00:00
12 |
13 | -
14 | 3.21
15 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.
16 | 2017-11-21 00:00:00
17 |
18 | -
19 | 100.9
20 | Text
21 | 2016-05-19 00:00:00
22 |
23 | -
24 | 98.6
25 | Test
26 | 1999-10-25 00:00:00
27 |
28 | -
29 | 159.357
30 | more
31 | 2016-06-15 00:00:00
32 |
33 | -
34 | 852.369
35 | again
36 | 1973-07-14 00:00:00
37 |
38 |
39 |
--------------------------------------------------------------------------------
/tests/assets/expected_files/file2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | -
4 | 2018-01-02 00:00:00
5 | Some text
6 | 6.0
7 |
8 | -
9 | 2018-02-05 00:00:00
10 | Some more text
11 | 7.0
12 |
13 | -
14 | 2017-11-21 00:00:00
15 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nunc lobortis, risus nec euismod condimentum, lectus ligula porttitor massa, vel ornare mauris arcu vel augue. Maecenas rhoncus consectetur nisl, ac convallis enim pellentesque efficitur. Praesent tristique . End of textlectus a dolor sodales, in porttitor felis auctor. Etiam dui mauris, commodo at venenatis eu, lacinia nec tellus. Curabitur dictum tincidunt convallis. Duis vestibulum mauris quis felis euismod bibendum. Nulla eget nunc arcu. Nam quis est urna. In eleifend ultricies ultrices. In lacinia auctor ex, sed commodo nisl fringilla sed. Fusce iaculis viverra eros, nec elementum velit aliquam non. Aenean sollicitudin consequat libero, eget mattis.
16 | 8.0
17 |
18 | -
19 | 2016-05-19 00:00:00
20 | Text
21 | 9.0
22 |
23 | -
24 | 1999-10-25 00:00:00
25 | Test
26 | 10.0
27 |
28 | -
29 | 2016-06-15 00:00:00
30 | more
31 | 11.0
32 |
33 | -
34 | 1973-07-14 00:00:00
35 | again
36 | 12.0
37 |
38 | -
39 | 2001-04-03 00:00:00
40 | one more
41 | 13.0
42 |
43 |
44 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling"]
3 | build-backend = "hatchling.build"
4 |
5 | [project]
6 | name = "sas7bdat-converter"
7 | description = "Convert sas7bdat and xport files into other formats"
8 | authors = [ { name = "Paul Sanders", email = "paul@paulsanders.dev" } ]
9 | requires-python = ">=3.10"
10 | license = { file = "LICENSE" }
11 | readme = "README.md"
12 | keywords = ["sas", "sas7bdat", "converter", "xpt", "XPort"]
13 | classifiers=[
14 | "Development Status :: 5 - Production/Stable",
15 | "Programming Language :: Python :: 3.10",
16 | "Programming Language :: Python :: 3.11",
17 | "Programming Language :: Python :: 3.12",
18 | "Programming Language :: Python :: 3.13",
19 | "License :: OSI Approved :: MIT License",
20 | "Operating System :: OS Independent",
21 | "Typing :: Typed",
22 | ]
23 | dynamic = ["version"]
24 | dependencies = [
25 | "pandas>=2.0.0",
26 | ]
27 |
28 | [project.optional-dependencies]
29 | openpyxl = ["openpyxl>=3.0.5"]
30 | all = ["openpyxl>=3.0.5"]
31 |
32 | [dependency-groups]
33 | dev = [
34 | "mypy[faster-cache]==1.15.0",
35 | "pre-commit==4.2.0",
36 | "pytest==8.3.5",
37 | "pytest-cov==6.1.1",
38 | "ruff==0.11.11",
39 | "pandas-stubs==2.2.3.250308",
40 | ]
41 |
42 | [tool.hatch.version]
43 | path = "sas7bdat_converter/__init__.py"
44 |
45 | [project.urls]
46 | repository = "https://github.com/sanders41/sas7bdat-converter"
47 | homepage = "https://github.com/sanders41/sas7bdat-converter"
48 | documentation = "https://github.com/sanders41/sas7bdat-converter"
49 |
50 | [tool.mypy]
51 | disallow_untyped_defs = true
52 | strict = true
53 |
54 | [[tool.mypy.overrides]]
55 | module = ["tests.*"]
56 | disallow_untyped_defs = false
57 |
58 | [[tool.mypy.overrides]]
59 | module = ["pyarrow.*"]
60 | ignore_missing_imports = true
61 |
62 | [tool.pytest.ini_options]
63 | minversion = "6.0"
64 | addopts = "--cov=sas7bdat_converter --cov-report term-missing"
65 |
66 | [tool.ruff]
67 | line-length = 100
68 | target-version = "py310"
69 | fix = true
70 |
71 | [tool.ruff.lint]
72 | select=[
73 | "E", # pycodestyle
74 | "B", # flake8-bugbear
75 | "W", # Warning
76 | "F", # pyflakes
77 | "UP", # pyupgrade
78 | "I001", # unsorted-imports
79 | "T201", # Don't allow print
80 | "T203", # Don't allow pprint
81 | "RUF022", # Unsorted __all__
82 | "RUF023", # Unforted __slots__
83 | ]
84 | ignore=[
85 | # Recommened ignores by ruff when using formatter
86 | "E501",
87 | "W191",
88 | "E111",
89 | "E114",
90 | "E117",
91 | "D206",
92 | "D300",
93 | "Q000",
94 | "Q001",
95 | "Q002",
96 | "Q003",
97 | "COM812",
98 | "COM819",
99 | "ISC001",
100 | "ISC002",
101 | ]
102 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ## Where to start
4 |
5 | All contributions, bug reports, bug fixes, documentation improvements, enhancements, and ideas are
6 | welcome.
7 |
8 | The best place to start is to check the [issues](https://github.com/sanders41/sas7bdat-converter/issues)
9 | for something that interests you.
10 |
11 | ## Bug Reports
12 |
13 | Please include:
14 |
15 | 1. A short, self-contained Python snippet reproducing the problem. You can format the code by using
16 | [GitHub markdown](https://docs.github.com/en/free-pro-team@latest/github/writing-on-github). For
17 | example:
18 |
19 | ```py
20 | from sas7bdat_converter import to_dataframe
21 |
22 | df = to_dataframe('/path/to/file')
23 | ...
24 | ```
25 |
26 | 2. Explain what is currently happening and what you expect instead.
27 |
28 | ## Working on the code
29 |
30 | ### Fork the project
31 |
32 | In order to work on the project you will need your own fork. To do this click the "Fork" button on
33 | this project.
34 |
35 | Once the project is forked clone it to your local machine:
36 |
37 | ```sh
38 | git clone https://github.com/your-user-name/sas7bdat-converter.git
39 | cd sas7bdat-converter
40 | git remote add upstream https://github.com/sanders41/sas7bdat-converter.git
41 | ```
42 |
43 | This creates the directory sas7bdat-converter and connects your repository to the upstream
44 | (main project) repository.
45 |
46 | ### Working with the code
47 |
48 | Note: This project uses Poetry to manage dependencies. If you do not already have Poetry installed
49 | you will need to install it with the instructions [here](https://python-poetry.org/docs/#installation)
50 |
51 | First the requirements need to be installed.
52 |
53 | ```sh
54 | poetry install -E all
55 | ```
56 |
57 | ### Creating a branch
58 |
59 | You want your main branch to reflect only production-ready code, so create a feature branch for
60 | making your changes. For example:
61 |
62 | ```sh
63 | git checkout -b my-new-feature
64 | ```
65 |
66 | This changes your working directory to the my-new-feature branch. Keep any changes in this branch
67 | specific to one bug or feature so the purpose is clear. You can have many my-new-features and switch
68 | in between them using the git checkout command.
69 |
70 | When creating this branch, make sure your main branch is up to date with the latest upstream
71 | main version. To update your local main branch, you can do:
72 |
73 | ```sh
74 | git checkout main
75 | git pull upstream main --ff-only
76 | ```
77 |
78 | ### Code Standards and tests (ruff, mypy, pytest, and pre-commit)
79 |
80 | sas7bdat-converter uses [ruff](https://github.com/charliermarsh/ruff) and
81 | [mypy](https://mypy.readthedocs.io/en/stable/) to ensure consistent code formatting.
82 |
83 | You can run linting on your code at any time with:
84 |
85 | ```sh
86 | # Run ruff linter
87 | poetry run ruff check sas7bdat_converter tests
88 |
89 | # Run ruff formatter
90 | poetry run ruff format sas7bdat_converter tests
91 |
92 | # Run mypy
93 | poetry run mypy sas7bdat_converter
94 | ```
95 |
96 | It is also suggested that you setup [pre-commit](https://pre-commit.com/) in order to run linting
97 | when you commit changes to you branch. To setup pre-commit for this project run:
98 |
99 | ```sh
100 | pre-commit install -E openpyxl
101 | ```
102 |
103 | After this pre-commit will automatically run any time you check in code to your branches. You can
104 | also run pre-commit at any time with:
105 |
106 | ```sh
107 | pre-commit run --all-files
108 | ```
109 |
110 | ### Type Hints
111 |
112 | At a minimum all variables/arguments that receive data should contain type hints, and all
113 | functions/methods should specify the return type.
114 |
115 | Accepted examples:
116 |
117 | ```py
118 | def my_function(argument: str) -> None:
119 | ...
120 |
121 |
122 | def another_function(num: int) -> int:
123 | return num + 1
124 | ```
125 |
126 | Rejected examples:
127 |
128 | ```py
129 | def my_function(argument):
130 | ...
131 |
132 |
133 | def another_function(num):
134 | return num + 1
135 | ```
136 |
137 | Type hints on files in the tests directory are optional.
138 |
139 | ### Testing
140 |
141 | This project uses [pytest](https://docs.pytest.org/en/stable/)
142 | for testing. Please ensure that any additions/changes you make to the code have tests to go along
143 | with them. Code coverage should not drop blow it's current level with any pull requests you make,
144 | if it does the pull request will not be accepted. You can view the current coverage level in the
145 | codecov badge on the
146 | [main github page](https://github.com/sanders41/sas7bdat-converter). You can run tests and see the
147 | code coverage by running.
148 |
149 | ```sh
150 | poetry run pytest
151 | ```
152 |
153 | If you want to see which lines are missing code coverage run the tests with:
154 |
155 | ```sh
156 | poetry run pytest --cov-report term-missing
157 | ```
158 |
159 | In additon to mainting the coverage percentage please ensure that all
160 | tests are passing before submitting a pull request.
161 |
162 | ## Committing your code
163 |
164 | Once you have made changes to the code on your branch you can see which files have changed by
165 | running:
166 |
167 | ```sh
168 | git status
169 | ```
170 |
171 | If new files were created that and are not tracked by git they can be added by running:
172 |
173 | ```sh
174 | git add .
175 | ```
176 |
177 | Now you can commit your changes in your local repository:
178 |
179 | ```sh
180 | git commit -am 'Some short helpful message to describe your changes'
181 | ```
182 |
183 | If you setup pre-commit and any of the tests fail the commit will be cancelled and you will need to
184 | fix any errors. Once the errors are fixed you can run the same git commit command again.
185 |
186 | ## Push your changes
187 |
188 | Once your changes are ready and all linting/tests are passing you can push your changes to your
189 | forked repository:
190 |
191 | ```sh
192 | git push origin my-new-feature
193 | ```
194 |
195 | origin is the default name of your remote repository on GitHub. You can see all of your remote
196 | repositories by running:
197 |
198 | ```sh
199 | git remote -v
200 | ```
201 |
202 | ## Making a Pull Request
203 |
204 | After pushing your code to origin it is now on GitHub but not yet part of the sas7bdat-converter
205 | project. When you’re ready to ask for a code review, file a pull request. Before you do, once again
206 | make sure that you have followed all the guidelines outlined in this document regarding code style,
207 | tests, and documentation. You should also double check your branch changes
208 | against the branch it was based on by:
209 |
210 | 1. Navigating to your repository on GitHub
211 | 2. Click on Branches
212 | 3. Click on the Compare button for your feature branch
213 | 4. Select the base and compare branches, if necessary. This will be main and my-new-feature, respectively.
214 |
215 | ### Make the pull request
216 |
217 | If everything looks good, you are ready to make a pull request. This is how you let the maintainers
218 | of the sas7bdat-coverter project know you have code ready to be reviewed. To submit the pull request:
219 |
220 | 1. Navigate to your repository on GitHub
221 | 2. Click on the Pull Request button for your feature branch
222 | 3. You can then click on Commits and Files Changed to make sure everything looks okay one last time
223 | 4. Write a description of your changes in the Conversation tab
224 | 5. Click Send Pull Request
225 |
226 | This request then goes to the repository maintainers, and they will review the code.
227 |
228 | ### Updating your pull request
229 |
230 | Changes to your code may be needed based on the review of your pull request. If this is the case
231 | you can make them in your branch, add a new commit to that branch, push it to GitHub, and the pull
232 | request will be automatically updated. Pushing them to GitHub again is done by:
233 |
234 | ```sh
235 | git push origin my-new-feature
236 | ```
237 |
238 | This will automatically update your pull request with the latest code and restart the Continuous
239 | Integration tests.
240 |
241 | Another reason you might need to update your pull request is to solve conflicts with changes that
242 | have been merged into the main branch since you opened your pull request.
243 |
244 | To do this, you need to rebase your branch:
245 |
246 | ```sh
247 | git checkout my-new-feature
248 | git fetch upstream
249 | git rebase upstream/main
250 | ```
251 |
252 | There may be some merge conficts that need to be resolved. After the feature branch has been update
253 | locally, you can now update your pull request by pushing to the branch on GitHub:
254 |
255 | ```sh
256 | git checkout my-new-feature
257 | git fetch upstream
258 | git rebase upstream/main
259 | ```
260 |
261 | After the feature branch has been update locally, you can now update your pull request by pushing
262 | to the branch on GitHub:
263 |
264 | ```sh
265 | git push origin my-new-feature
266 | ```
267 |
268 | If you rebased and get an error when pushing your changes you can resolve it with:
269 |
270 | ```sh
271 | git push origin my-new-feature --force
272 | ```
273 |
274 | ## Delete your merged branch (optional)
275 |
276 | Once your feature branch is accepted into upstream, you’ll probably want to get rid of the branch.
277 | First, merge upstream main into your main branch so git knows it is safe to delete your branch:
278 |
279 | ```sh
280 | git fetch upstream
281 | git checkout main
282 | git merge upstream/main
283 | ```
284 |
285 | Then you can do:
286 |
287 | ```sh
288 | git branch -d my-new-feature
289 | ```
290 |
291 | Make sure you use a lower-case -d, or else git won’t warn you if your feature branch has not
292 | actually been merged.
293 |
294 | The branch will still exist on GitHub, so to delete it there do:
295 |
296 | ```sh
297 | git push origin --delete my-new-feature
298 | ```
299 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # sas7bdat-converter: Convert sas7bdat files into other formats
2 |
3 | [](https://github.com/sanders41/sas7bdat-converter/actions?query=workflow%3ATests+branch%3Amain+event%3Apush)
4 | [](https://results.pre-commit.ci/latest/github/sanders41/sas7bdat-converter/main)
5 | [](https://codecov.io/gh/sanders41/sas7bdat-converter)
6 | [](https://badge.fury.io/py/sas7bdat-converter)
7 | [](https://github.com/sanders41/sas7bdat-converter)
8 |
9 | Converts proprietary sas7bdat and/or xport files from SAS into formats such as csv, json, and Excel useable
10 | by other programs. Currently supported conversiaions are csv, Excel (xlsx format), json, Pandas
11 | DataFrame, and XML.
12 |
13 | Conversions can be done on either a single file, an entire directory, or a batch of specified files.
14 |
15 | ## Install
16 |
17 | `pip install sas7bdat-converter`
18 |
19 | If you would like to be able to convert to Excel files you will need to install with the extra Excel dependency.
20 |
21 | `pip install sas7bdat-converter[excel]`
22 |
23 | ## Usage
24 |
25 | In all cases either sas7bdat or xport files can be converted. Examples below all use the .sas7bdat
26 | extension, xport files with a .xpt extension will also work.
27 |
28 | - **batch_to_csv(file_dicts)** - Convert multiple sas7bdat files into csv files at once.
29 |
30 | - file_dicts = A list containing a dictionary for each file to convert. The dictionary is required
31 | to contain 'sas7bdat_file' containing the path and name for the sas7bdat file, and 'export_file'
32 | containing the path and name for the csv files. The csv file extension should be .csv. File paths
33 | can be sent as either strings or Path objects.
34 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
35 | file conversion error instead of raising an exception. Default = False
36 |
37 | **Example**
38 |
39 | ```py
40 | import sas7bdat_converter
41 |
42 | file_dicts = [
43 | {
44 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_1.sas7bdat',
45 | 'export_file': '/path/to/new/files/example_1.csv',
46 | },
47 | {
48 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_2.sas7bdat',
49 | 'export_file': '/path/to/new/files/example_2.csv',
50 | },
51 | ]
52 | sas7bdat_converter.batch_to_csv(file_dicts)
53 | ```
54 |
55 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
56 | `c:\path\to\sas7bdat\files\example_1.sas7bdat`.
57 |
58 | - **batch_to_excel(file_dicts)** - Convert multiple sas7bdat files into Excel files at once.
59 |
60 | - file_dicts = A list containing a dictionary for each file to convert. The dictionary is required
61 | to contain 'sas7bdat_file' containing the path and name for the sas7bdat file, and 'export_file'
62 | containing the path and name for the excel files. The Excel file extension should be .xlsx. File
63 | paths can be sent as either strings or Path objects.
64 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
65 | file conversion error instead of raising an exception. Default = False
66 |
67 | **Example**
68 |
69 | ```py
70 | import sas7bdat_converter
71 |
72 | file_dicts = [
73 | {
74 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_1.sas7bdat',
75 | 'export_file': '/path/to/new/files/example_1.xlsx',
76 | },
77 | {
78 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_2.sas7bdat',
79 | 'export_file': '/path/to/new/files/example_2.xlsx',
80 | },
81 | ]
82 | sas7bdat_converter.batch_to_excel(file_dicts)
83 | ```
84 |
85 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
86 | `c:\path\to\sas7bdat\files\example_1.sas7bdat`.
87 |
88 | - **batch_to_json(file_dicts)** - Convert multiple sas7bdat files into json files at once.
89 |
90 | - file_dicts = A list containing a dictionary for each file to convert. The dictionary is required
91 | to contain 'sas7bdat_file' containing the path and name for the sas7bdat file, and 'export_file'
92 | containing the path and name for the json files. The json file extension should be .json. File
93 | paths can be sent as either strings or Path objects.
94 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
95 | file conversion error instead of raising an exception. Default = False
96 |
97 | **Example**
98 |
99 | ```py
100 | import sas7bdat_converter
101 |
102 | file_dicts = [
103 | {
104 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_1.sas7bdat',
105 | 'export_file': '/path/to/new/files/example_1.json',
106 | },
107 | {
108 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_2.sas7bdat',
109 | 'export_file': '/path/to/new/files/example_2.json',
110 | },
111 | ]
112 | sas7bdat_converter.batch_to_json(file_dicts)
113 | ```
114 |
115 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
116 | `c:\path\to\sas7bdat\files\example_1.sas7bdat`.
117 |
118 | - **batch_to_xml(file_dicts)** - Convert multiple sas7bdat files into XML files at once.
119 |
120 | - file_dicts = A list containing a dictionary for each file to convert. The dictionary is required
121 | to contain 'sas7bdat_file' containing the path and name for the sas7bdat file, and 'export_file'
122 | containing the path and name for the xml files. The XML file extension should be .xml. File paths
123 | can be sent as either strings or Path objects.
124 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
125 | file conversion error instead of raising an exception. Default = False
126 |
127 | **Example**
128 |
129 | ```py
130 | import sas7bdat_converter
131 |
132 | file_dicts = [
133 | {
134 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_1.sas7bdat',
135 | 'export_file': '/path/to/new/files/example_1.xml',
136 | },
137 | {
138 | 'sas7bdat_file': '/path/to/sas7bdat/files/example_2.sas7bdat',
139 | 'export_file': '/path/to/new/files/example_2.xml',
140 | },
141 | ]
142 | sas7bdat_converter.batch_to_xml(file_dicts)
143 | ```
144 |
145 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
146 | `c:\path\to\sas7bdat\files\example_1.sas7bdat`.
147 |
148 | - **dir_to_csv(dir_path, export_path=None)** - Convert all sas7bdat files in a directory into csv
149 | files at once. File paths can be sent as either strings or Path objects.
150 |
151 | - dir_path = The dictionary that contains the sas7bdat file to convert.
152 | - export_path = Optional path for the converted files. If no path is supplied the new files will
153 | be put into the dir_path directory with the sas7bdat files. File paths can be sent as either
154 | strings or Path objects. Default = None
155 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
156 | file conversion error instead of raising an exception. Default = False
157 |
158 | **Example**
159 |
160 | ```py
161 | import sas7bdat_converter
162 |
163 | # Option 1: put the converted files in the same directory as the sas7bdat files
164 | sas7bdat_converter.dir_to_csv('/path/to/sas7bdat/files')
165 |
166 | # Option 2: put the converted fiels in a diffferent directory
167 | sas7bdat_converter.dir_to_csv('/path/to/sas7bdat/files', 'path/for/new/files')
168 | ```
169 |
170 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
171 | `c:\path\to\sas7bdat\files`.
172 |
173 | - **dir_to_excel(dir_path, export_path=None)** - Convert all sas7bdat files in a directory into
174 | Excel files at once. File paths can be sent as either strings or Path objects.
175 |
176 | - dir_path = The dictionary that contains the sas7bdat file to convert.
177 | - export_path = Optional path for the converted files. If no path is supplied the new files will
178 | be put into the dir_path directory with the sas7bdat files Default = None
179 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
180 | file conversion error instead of raising an exception. Default = False
181 |
182 | **Example**
183 |
184 | ```py
185 | import sas7bdat_converter
186 |
187 | # Option 1: put the converted files in the same directory as the sas7bdat files
188 | sas7bdat_converter.dir_to_excel('/path/to/sas7bdat/files')
189 |
190 | # Option 2: put the converted fiels in a diffferent directory
191 | sas7bdat_converter.dir_to_excel('/path/to/sas7bdat/files', 'path/for/new/files')
192 | ```
193 |
194 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
195 | `c:\path\to\sas7bdat\files`.
196 |
197 | - **dir_to_json(dir_path, export_path=None)** - Convert all sas7bdat files in a directory into json
198 | files at once. File paths can be sent as either strings or Path objects.
199 |
200 | - dir_path = The dictionary that contains the sas7bdat file to convert.
201 | - export_path = Optional path for the converted files. If no path is supplied the new files will
202 | be put into the dir_path directory with the sas7bdat files. Default = None
203 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
204 | file conversion error instead of raising an exception. Default = False
205 |
206 | **Example**
207 |
208 | ```py
209 | import sas7bdat_converter
210 |
211 | # Option 1: put the converted files in the same directory as the sas7bdat files
212 | sas7bdat_converter.dir_to_json('/path/to/sas7bdat/files')
213 |
214 | # Option 2: put the converted fiels in a diffferent directory
215 | sas7bdat_converter.dir_to_json('/path/to/sas7bdat/files', 'path/for/new/files')
216 | ```
217 |
218 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
219 | `c:\path\to\sas7bdat\files`.
220 |
221 | - **dir_to_xml(dir_path, export_path=None)** - Convert all sas7bdat files in a directory into XML
222 | files at once. File paths can be sent as either strings or Path objects.
223 |
224 | - dir_path = The dictionary that contains the sas7bdat file to convert.
225 | - export_path = Optional path for the converted files. If no path is supplied the new files will
226 | be put into the dir_path directory with the sas7bdat files. Default = None
227 | - continue_on_error = If set to true processing of files in a batch will continue if there is a
228 | file conversion error instead of raising an exception. Default = False
229 |
230 | **Example**
231 |
232 | ```py
233 | import sas7bdat_converter
234 |
235 | # Option 1: put the converted files in the same directory as the sas7bdat files
236 | sas7bdat_converter.dir_to_xml('/path/to/sas7bdat/files')
237 |
238 | # Option 2: put the converted fiels in a diffferent directory
239 | sas7bdat_converter.dir_to_xml('/path/to/sas7bdat/files', 'path/for/new/files')
240 | ```
241 |
242 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
243 | `c:\path\to\sas7bdat\files`.
244 |
245 | - **to_csv(sas7bdat_file, export_file)** - convert a sas7bdat file into a csv file. File path can be
246 | sent as either a string or Path objects.
247 |
248 | - sas7bdat_file = the path and name for sas7bdat file to convert.
249 | - export_file = the path and name for the csv file. The csv file extension should be .csv.
250 |
251 | **Example**
252 |
253 | ```py
254 | import sas7bdat_converter
255 |
256 | sas7bdat_converter.to_csv('/path/to/sas7bdat/file/example.sas7bdat', 'path/to/new/file/example.csv')
257 | ```
258 |
259 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
260 | `c:\path\to\sas7bdat\files\example.sas7bdat`.
261 |
262 | - **to_dataframe(sas7bdat_file)** - Convert a sas7bdat file into a Pandas DataFrame. File path can
263 | be sent as either a string or Path objects.
264 |
265 | - sas7bdat_file = The path and name for sas7bdat file to convert.
266 |
267 | **Example**
268 |
269 | ```py
270 | import sas7bdat_converter
271 |
272 | sas7bdat_converter.to_dataframe('/path/to/sas7bdat/file/example.sas7bdat')
273 | ```
274 |
275 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
276 | `c:\path\to\sas7bdat\files\example_1.sas7bdat`.
277 |
278 | - **to_excel(sas7bdat_file, export_file)** - convert a sas7bdat file into a Excel file. File path
279 | can be sent as either a string or Path objects.
280 |
281 | - sas7bdat_file = the path and name for sas7bdat file to convert.
282 | - export_file = the path and name for the Excel file. The Excel file extension should be .xlsx.
283 |
284 | **Example**
285 |
286 | ```py
287 | import sas7bdat_converter
288 |
289 | sas7bdat_converter.to_excel('/path/to/sas7bdat/file/example.sas7bdat',
290 | 'path/to/new/file/example.xlsx')
291 | ```
292 |
293 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
294 | `c:\path\to\sas7bdat\files\example.sas7bdat`.
295 |
296 | - **to_json(sas7bdat_file, export_file)** - convert a sas7bdat file into a json file. File path can
297 | be sent as either a string or Path objects.
298 |
299 | - sas7bdat_file = the path and name for sas7bdat file to convert.
300 | - export_file = the path and name for the json file. the json file extension should be .json.
301 |
302 | **Example**
303 |
304 | ```py
305 | import sas7bdat_converter
306 |
307 | sas7bdat_converter.to_json('/path/to/sas7bdat/file/example.sas7bdat', 'path/to/new/file/example.json')
308 | ```
309 |
310 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
311 | `c:\path\to\sas7bdat\files\example.sas7bdat`.
312 |
313 | - **to_xml(sas7bdat_file, export_file, root_node='root', first_node='item')** - convert a sas7bdat
314 | file into a XML file. File path can be sent as either a string or Path objects.
315 |
316 | - sas7bdat_file = the path and name for sas7bdat file to convert.
317 | - export_file = the path and name for the XML file. The XML file extension should be .xlm.
318 | - root_node = The name to uses for the top level node. If no name is supplied "root" will be used.
319 | - first_node = The name to use for the first node under root. If no name is supplied "item" will be used.
320 |
321 | **Example**
322 |
323 | ```py
324 | import sas7bdat_converter
325 |
326 | sas7bdat_converter.to_xml('/path/to/sas7bdat/file/example.sas7bdat', 'path/to/new/file/example.xml')
327 | ```
328 |
329 | **Note:** Example uses Mac/Linux type file paths. For Windows use paths like
330 | `c:\path\to\sas7bdat\files\example.sas7bdat`.
331 |
332 | ## Contributing
333 |
334 | If you are interested in contributing to this project please see our [contributing guide](CONTRIBUTING.md)
335 |
--------------------------------------------------------------------------------
/sas7bdat_converter/converter.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import csv
4 | from pathlib import Path
5 | from xml.sax.saxutils import escape
6 |
7 | import pandas as pd
8 |
9 | _FILE_DICT_REQUIRED_KEYS = [
10 | "sas7bdat_file",
11 | "export_file",
12 | ]
13 |
14 |
15 | def batch_to_csv(
16 | file_dicts: list[dict[str, str | Path]],
17 | *,
18 | continue_on_error: bool = False,
19 | verbose: bool = True,
20 | ) -> None:
21 | """Converts a batch of sas7bdat and/or xpt files to csv files.
22 |
23 | Args:
24 | file_dicts: A list dictionaries containing the files to convert. The dictionary should
25 | contain the keys 'sas7bdat_file' (containing the path and name to the sas7bdat
26 | file) and 'export_file' containing the path and name of the export csv).
27 | Example: file_dict = [{
28 | 'sas7bdat_file': 'sas_file1.sas7bdat',
29 | 'export_file': 'converted_file1.csv',
30 | },
31 | {
32 | 'sas7bdat_file': 'sas_file2.sas7bdat',
33 | 'export_file': 'converted_file2.csv',
34 | }]
35 | continue_on_error: If set to true processing of files in a batch will continue if there is
36 | a file conversion error instead of raising an exception. Default = False
37 | verbose: Increases the output. Default = True
38 | """
39 | for file_dict in file_dicts:
40 | _rise_on_invalid_file_dict(file_dict)
41 |
42 | sas7bdat = _format_path(file_dict["sas7bdat_file"])
43 | export = _format_path(file_dict["export_file"])
44 | try:
45 | to_csv(sas7bdat_file=sas7bdat, export_file=export)
46 | except: # noqa: E722
47 | if continue_on_error and verbose:
48 | print(f"Error converting {sas7bdat}") # noqa: T201
49 | elif continue_on_error:
50 | pass
51 | else:
52 | raise
53 |
54 |
55 | def batch_to_excel(
56 | file_dicts: list[dict[str, str | Path]],
57 | *,
58 | continue_on_error: bool = False,
59 | verbose: bool = True,
60 | ) -> None:
61 | """Converts a batch of sas7bdat and/or xpt files to xlsx files.
62 |
63 | Args:
64 | file_dicts: A list of dictionaries containing the files to convert. The dictionary should
65 | contain the keys 'sas7bdat_file' (containing the path and name to the sas7bdat
66 | file) and 'export_file' containing the path and name of the export xlsx).
67 | Example: file_dict = [{
68 | 'sas7bdat_file': 'sas_file1.sas7bdat',
69 | 'export_file': 'converted_file1.xlsx',
70 | },
71 | {
72 | 'sas7bdat_file': 'sas_file2.sas7bdat',
73 | 'export_file': 'converted_file2.xlxs',
74 | }]
75 | continue_on_error: If set to true processing of files in a batch will continue if there is
76 | a file conversion error instead of raising an exception. Default = False
77 | verbose: Increases the output. Default = True
78 | """
79 | for file_dict in file_dicts:
80 | _rise_on_invalid_file_dict(file_dict)
81 |
82 | sas7bdat = _format_path(file_dict["sas7bdat_file"])
83 | export = _format_path(file_dict["export_file"])
84 | try:
85 | to_excel(sas7bdat_file=sas7bdat, export_file=export)
86 | except: # noqa: E722
87 | if continue_on_error and verbose:
88 | print(f"Error converting {sas7bdat}") # noqa: T201
89 | elif continue_on_error:
90 | pass
91 | else:
92 | raise
93 |
94 |
95 | def batch_to_json(
96 | file_dicts: list[dict[str, str | Path]],
97 | *,
98 | continue_on_error: bool = False,
99 | verbose: bool = True,
100 | ) -> None:
101 | """Converts a batch of sas7bdat and/or xpt files to json files.
102 |
103 | Args:
104 | file_dicts: A list dictionaries containing the files to convert. The dictionary should
105 | contain the keys 'sas7bdat_file' (containing the path and name to the sas7bdat
106 | file) and 'export_file' containing the path and name of the export json).
107 | Example: file_dict = [{
108 | 'sas7bdat_file': 'sas_file1.sas7bdat',
109 | 'export_file': 'converted_file1.json',
110 | },
111 | {
112 | 'sas7bdat_file': 'sas_file2.sas7bdat',
113 | 'export_file': 'converted_file2.json',
114 | }]
115 | continue_on_error: If set to true processing of files in a batch will continue if there is
116 | a file conversion error instead of raising an exception. Default = False
117 | verbose: Increases the output. Default = True
118 | """
119 | for file_dict in file_dicts:
120 | _rise_on_invalid_file_dict(file_dict)
121 |
122 | sas7bdat = _format_path(file_dict["sas7bdat_file"])
123 | export = _format_path(file_dict["export_file"])
124 | try:
125 | to_json(sas7bdat_file=sas7bdat, export_file=export)
126 | except: # noqa: E722
127 | if continue_on_error and verbose:
128 | print(f"Error converting {sas7bdat}") # noqa: T201
129 | elif continue_on_error:
130 | pass
131 | else:
132 | raise
133 |
134 |
135 | def batch_to_xml(
136 | file_dicts: list[dict[str, str | Path]],
137 | *,
138 | continue_on_error: bool = False,
139 | verbose: bool = True,
140 | ) -> None:
141 | """Converts a batch of sas7bdat and/or xpt files to xml files.
142 |
143 | Args:
144 | file_dicts: A list dictionaries containing the files to convert. The dictionary should
145 | contain the keys 'sas7bdat_file' (containing the path and name to the sas7bdat
146 | file) and 'export_file' containing the path and name of the export xml).
147 | Optinallly the dictionary can also contain 'root_node' (containing the name for
148 | the root node in the xml file, and 'first_node' (containing the name for the
149 | first node in the xml file).
150 | Examples: file_dict = [{'sas7bdat_file': 'sas_file1.sas7bdat',
151 | 'export_file': 'converted_file1.xlsx'},
152 | {'sas7bdat_file': 'sas_file2.sas7bdat',
153 | 'export_file': 'converted_file2.xlxs'}]
154 |
155 | file_dict = [{'sas7bdat_file': 'sas_file1.sas7bdat',
156 | 'export_file': 'converted_file1.xml',
157 | 'root_node': 'my_root',
158 | 'first_node': 'my_first'},
159 | {'sas7bdat_file': 'sas_file2.sas7bdat',
160 | 'export_file': 'converted_file2.xml',
161 | 'root_node': 'another_root',
162 | 'first_node': 'another_first'}]
163 | continue_on_error: If set to true processing of files in a batch will continue if there is
164 | a file conversion error instead of raising an exception. Default = False
165 | verbose: Increases the output. Default = True
166 | """
167 | optional_keys = [
168 | "root_node",
169 | "first_node",
170 | ]
171 | for file_dict in file_dicts:
172 | error = False
173 | if len(set(file_dict).intersection(_FILE_DICT_REQUIRED_KEYS)) != len(
174 | _FILE_DICT_REQUIRED_KEYS
175 | ) or len(set(file_dict).intersection(_FILE_DICT_REQUIRED_KEYS)) > len(
176 | _FILE_DICT_REQUIRED_KEYS
177 | ) + len(optional_keys):
178 | error = True
179 | elif len(set(file_dict).intersection(optional_keys)) != len(file_dict) - len(
180 | _FILE_DICT_REQUIRED_KEYS
181 | ):
182 | error = True
183 |
184 | if error:
185 | message = _invalid_key_exception_message(
186 | required_keys=_FILE_DICT_REQUIRED_KEYS, optional_keys=optional_keys
187 | )
188 | raise KeyError(message)
189 |
190 | sas7bdat = _format_path(file_dict["sas7bdat_file"])
191 | export = _format_path(file_dict["export_file"])
192 | root_node = None
193 | first_node = None
194 | if "root_node" in file_dict:
195 | root_node = file_dict["root_node"]
196 | if "first_node" in file_dict:
197 | first_node = file_dict["first_node"]
198 |
199 | try:
200 | if root_node and first_node:
201 | to_xml(
202 | sas7bdat_file=sas7bdat,
203 | export_file=export,
204 | root_node=str(root_node),
205 | first_node=str(first_node),
206 | )
207 | elif root_node:
208 | to_xml(sas7bdat_file=sas7bdat, export_file=export, root_node=str(root_node))
209 | elif first_node:
210 | to_xml(sas7bdat_file=sas7bdat, export_file=export, first_node=str(first_node))
211 | else:
212 | to_xml(sas7bdat_file=sas7bdat, export_file=export)
213 | except: # noqa: E722
214 | if continue_on_error and verbose:
215 | print(f"Error converting {sas7bdat}") # noqa: T201
216 | elif continue_on_error:
217 | pass
218 | else:
219 | raise
220 |
221 |
222 | def dir_to_csv(
223 | dir_path: str | Path,
224 | export_path: str | Path | None = None,
225 | *,
226 | continue_on_error: bool = False,
227 | verbose: bool = True,
228 | ) -> None:
229 | """Converts all sas7bdat and/or xpt files in a directory into csv files.
230 |
231 | args:
232 | dir_path: The path to the directory that contains the sas7bdat files
233 | for conversion.
234 | export_path (optional): If used this can specify a new directory to create
235 | the converted files into. If not supplied then the files will be
236 | created into the same directory as dir_path. Default = None
237 | continue_on_error: If set to true processing of files in a batch will continue if there is
238 | a file conversion error instead of raising an exception. Default = False
239 | verbose: Increases the output. Default = True
240 | """
241 | _walk_dir("csv", dir_path, export_path, continue_on_error, verbose)
242 |
243 |
244 | def dir_to_excel(
245 | dir_path: str | Path,
246 | export_path: str | Path | None = None,
247 | *,
248 | continue_on_error: bool = False,
249 | verbose: bool = True,
250 | ) -> None:
251 | """Converts all sas7bdat and/or xpt files in a directory into xlsx files.
252 |
253 | args:
254 | dir_path: The path to the directory that contains the sas7bdat files
255 | for conversion.
256 | export_path (optional): If used this can specify a new directory to create
257 | the converted files into. If not supplied then the files will be
258 | created into the same directory as dir_path. Default = None
259 | continue_on_error: If set to true processing of files in a batch will continue if there is
260 | a file conversion error instead of raising an exception. Default = False
261 | verbose: Increases the output. Default = True
262 | """
263 | _walk_dir("xlsx", dir_path, export_path, continue_on_error, verbose)
264 |
265 |
266 | def dir_to_json(
267 | dir_path: str | Path,
268 | export_path: str | Path | None = None,
269 | *,
270 | continue_on_error: bool = False,
271 | verbose: bool = True,
272 | ) -> None:
273 | """Converts all sas7bdat and/or xpt files in a directory into json files.
274 |
275 | args:
276 | dir_path: The path to the directory that contains the sas7bdat files
277 | for conversion.
278 | export_path (optional): If used this can specify a new directory to create
279 | the converted files into. If not supplied then the files will be
280 | created into the same directory as dir_path. Default = None
281 | continue_on_error: If set to true processing of files in a batch will continue if there is
282 | a file conversion error instead of raising an exception. Default = False
283 | verbose: Increases the output. Default = True
284 | """
285 | _walk_dir("json", dir_path, export_path, continue_on_error, verbose)
286 |
287 |
288 | def dir_to_xml(
289 | dir_path: str | Path,
290 | export_path: str | Path | None = None,
291 | *,
292 | continue_on_error: bool = False,
293 | verbose: bool = True,
294 | ) -> None:
295 | """Converts all sas7bdat and/or xpt files in a directory into xml files.
296 |
297 | args:
298 | dir_path: The path to the directory that contains the sas7bdat files
299 | for conversion.
300 | export_path (optional): If used this can specify a new directory to create
301 | the converted files into. If not supplied then the files will be
302 | created into the same directory as dir_path. Default = None
303 | continue_on_error: If set to true processing of files in a batch will continue if there is
304 | a file conversion error instead of raising an exception. Default = False
305 | verbose: Increases the output. Default = True
306 | """
307 | _walk_dir("xml", dir_path, export_path, continue_on_error, verbose)
308 |
309 |
310 | def to_csv(sas7bdat_file: str | Path, export_file: str | Path) -> None:
311 | """Converts a sas7bdat and/or xpt file into a csv file.
312 |
313 | args:
314 | sas7bdat_file: The name, including the path, for the sas7bdat file.
315 | export_file: The name, including the path, for the export file.
316 | """
317 | valid_extensions = (".csv",)
318 | file_extension = Path(export_file).suffix
319 |
320 | if not _is_valid_extension(valid_extensions, file_extension):
321 | error_message = _file_extension_exception_message("to_csv", valid_extensions)
322 | raise AttributeError(error_message)
323 |
324 | df = to_dataframe(sas7bdat_file)
325 | df.to_csv(export_file, quoting=csv.QUOTE_NONNUMERIC, index=False)
326 |
327 |
328 | def to_dataframe(sas7bdat_file: str | Path) -> pd.DataFrame:
329 | """Converts a sas7bdat and/or xpt file into a pandas dataframe.
330 |
331 | args:
332 | sas7bdat_file: The name, including the path, for the sas7bdat file.
333 |
334 | return:
335 | A pandas dataframe containing the data from the sas7bdat file.
336 | """
337 | df = pd.read_sas(sas7bdat_file)
338 |
339 | # convert binary strings to utf-8
340 | str_df = df.select_dtypes(include=["object"])
341 | if len(str_df.columns) > 0:
342 | str_df = str_df.stack().str.decode("utf-8").unstack()
343 |
344 | for col in str_df:
345 | df[col] = str_df[col]
346 | # end conversion to utf-8
347 |
348 | return df
349 |
350 |
351 | def to_excel(sas7bdat_file: str | Path, export_file: str | Path) -> None:
352 | """Converts a sas7bdat and/or xpt file into a xlsx file.
353 |
354 | args:
355 | sas7bdat_file: The name, including the path, for the sas7bdat file.
356 | export_file: The name, including the path, for the export file.
357 | """
358 | valid_extensions = (".xlsx",)
359 | file_extension = Path(export_file).suffix
360 |
361 | if not _is_valid_extension(valid_extensions, file_extension):
362 | error_message = _file_extension_exception_message("to_excel", valid_extensions)
363 | raise AttributeError(error_message)
364 |
365 | df = to_dataframe(sas7bdat_file)
366 | try:
367 | df.to_excel(export_file, index=False)
368 | except ModuleNotFoundError as e:
369 | raise ModuleNotFoundError(
370 | "The optional dependency openpyxl is required in order to convert to an Excel file"
371 | ) from e
372 |
373 |
374 | def to_json(sas7bdat_file: str | Path, export_file: str | Path) -> None:
375 | """Converts a sas7bdat and/or xpt file into a json file.
376 |
377 | args:
378 | sas7bdat_file: The name, including the path, for the sas7bdat file.
379 | export_file: The name, including the path, for the export file.
380 | """
381 | valid_extensions = (".json",)
382 | file_extension = Path(export_file).suffix
383 |
384 | if not _is_valid_extension(valid_extensions, file_extension):
385 | error_message = _file_extension_exception_message("to_json", valid_extensions)
386 | raise AttributeError(error_message)
387 |
388 | df = to_dataframe(sas7bdat_file)
389 | df.to_json(export_file)
390 |
391 |
392 | def to_xml(
393 | sas7bdat_file: str | Path,
394 | export_file: str | Path,
395 | root_node: str = "root",
396 | first_node: str = "item",
397 | ) -> None:
398 | """Converts a sas7bdat and/or xpt file into a xml file.
399 |
400 | args:
401 | sas7bdat_file: The name, including the path, for the sas7bdat file.
402 | export_file: The name, including the path, for the export file.
403 | root_node: The name to use for the root node in the xml file.
404 | first_node: The name to use for the fist node in the xml file.
405 | """
406 | valid_extensions = (".xml",)
407 | file_extension = Path(export_file).suffix
408 |
409 | if not _is_valid_extension(valid_extensions, file_extension):
410 | error_message = _file_extension_exception_message("to_xml", valid_extensions)
411 | raise AttributeError(error_message)
412 |
413 | df = to_dataframe(sas7bdat_file)
414 |
415 | def row_to_xml(row: pd.DataFrame) -> str:
416 | xml = [f" <{first_node}>"]
417 | for i, col_name in enumerate(row.index):
418 | text = row.iloc[i]
419 | if isinstance(text, str):
420 | text = escape(text)
421 |
422 | xml.append(f" <{col_name}>{text}{col_name}>")
423 | xml.append(f" {first_node}>")
424 | return "\n".join(xml)
425 |
426 | res = f'\n<{root_node}>\n'
427 | res = res + "\n".join(df.apply(row_to_xml, axis=1)) + f"\n{root_node}>"
428 |
429 | with open(export_file, "w") as f:
430 | f.write(res)
431 |
432 |
433 | def _file_extension_exception_message(conversion_type: str, valid_extensions: tuple[str]) -> str:
434 | if len(valid_extensions) == 1:
435 | is_are = ("extension", "is")
436 | else:
437 | is_are = ("extensions", "are")
438 |
439 | extensions = ", ".join(valid_extensions)
440 | return f"sas7bdat conversion error - Valid {is_are[0]} for {conversion_type} conversion {is_are[1]}: {extensions}" # noqa: E501
441 |
442 |
443 | def _invalid_key_exception_message(
444 | required_keys: list[str], optional_keys: list[str] | None = None
445 | ) -> str:
446 | required_keys_joined: str = ", ".join(required_keys)
447 | if optional_keys:
448 | optional_keys_joined: str = ", ".join(optional_keys)
449 | return f"Invalid key provided, expected keys are: {required_keys_joined} and optional keys are: {optional_keys_joined}"
450 |
451 | else:
452 | return f"Invalid key provided, expected keys are: {required_keys_joined}"
453 |
454 |
455 | def _is_valid_extension(valid_extensions: tuple[str], file_extension: str) -> bool:
456 | return file_extension in valid_extensions
457 |
458 |
459 | def _format_path(path: str | Path) -> str:
460 | return str(path) if isinstance(path, Path) else path
461 |
462 |
463 | def _rise_on_invalid_file_dict(file_dict: dict[str, str | Path]) -> None:
464 | if len(set(file_dict).intersection(_FILE_DICT_REQUIRED_KEYS)) != len(_FILE_DICT_REQUIRED_KEYS):
465 | message = _invalid_key_exception_message(required_keys=_FILE_DICT_REQUIRED_KEYS)
466 | raise KeyError(message)
467 |
468 |
469 | def _walk_dir(
470 | file_type: str,
471 | dir_path: str | Path,
472 | export_path: str | Path | None = None,
473 | continue_on_error: bool = False,
474 | verbose: bool = True,
475 | ) -> None:
476 | path = dir_path if isinstance(dir_path, Path) else Path(dir_path)
477 | for file_name in path.iterdir():
478 | if file_name.suffix in [".sas7bdat", ".xpt"]:
479 | export_file = Path(f"{file_name.stem}.{file_type}")
480 | if export_path:
481 | export_file = Path(export_path).joinpath(export_file)
482 | else:
483 | export_file = path.joinpath(export_file)
484 |
485 | sas7bdat_file = path.joinpath(file_name)
486 |
487 | try:
488 | if file_type == "csv":
489 | to_csv(str(sas7bdat_file), str(export_file))
490 | elif file_type == "json":
491 | to_json(str(sas7bdat_file), str(export_file))
492 | elif file_type == "xlsx":
493 | to_excel(str(sas7bdat_file), str(export_file))
494 | elif file_type == "xml":
495 | to_xml(str(sas7bdat_file), str(export_file))
496 | except: # noqa: E722
497 | if continue_on_error and verbose:
498 | print(f"Error converting {sas7bdat_file}") # noqa: T201
499 | elif continue_on_error:
500 | pass
501 | else:
502 | raise
503 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
2 |
3 | [[package]]
4 | name = "cfgv"
5 | version = "3.4.0"
6 | description = "Validate configuration and produce human readable error messages."
7 | optional = false
8 | python-versions = ">=3.8"
9 | groups = ["dev"]
10 | files = [
11 | {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
12 | {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
13 | ]
14 |
15 | [[package]]
16 | name = "colorama"
17 | version = "0.4.6"
18 | description = "Cross-platform colored terminal text."
19 | optional = false
20 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
21 | groups = ["dev"]
22 | markers = "sys_platform == \"win32\""
23 | files = [
24 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
25 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
26 | ]
27 |
28 | [[package]]
29 | name = "coverage"
30 | version = "7.8.2"
31 | description = "Code coverage measurement for Python"
32 | optional = false
33 | python-versions = ">=3.9"
34 | groups = ["dev"]
35 | files = [
36 | {file = "coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a"},
37 | {file = "coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be"},
38 | {file = "coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3"},
39 | {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6"},
40 | {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622"},
41 | {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c"},
42 | {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"},
43 | {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404"},
44 | {file = "coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7"},
45 | {file = "coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347"},
46 | {file = "coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9"},
47 | {file = "coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879"},
48 | {file = "coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a"},
49 | {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5"},
50 | {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11"},
51 | {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a"},
52 | {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb"},
53 | {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54"},
54 | {file = "coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a"},
55 | {file = "coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975"},
56 | {file = "coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53"},
57 | {file = "coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c"},
58 | {file = "coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1"},
59 | {file = "coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279"},
60 | {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99"},
61 | {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20"},
62 | {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2"},
63 | {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57"},
64 | {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f"},
65 | {file = "coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8"},
66 | {file = "coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223"},
67 | {file = "coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f"},
68 | {file = "coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca"},
69 | {file = "coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d"},
70 | {file = "coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85"},
71 | {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257"},
72 | {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108"},
73 | {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0"},
74 | {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050"},
75 | {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48"},
76 | {file = "coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7"},
77 | {file = "coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3"},
78 | {file = "coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7"},
79 | {file = "coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008"},
80 | {file = "coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36"},
81 | {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46"},
82 | {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be"},
83 | {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740"},
84 | {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625"},
85 | {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b"},
86 | {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199"},
87 | {file = "coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8"},
88 | {file = "coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d"},
89 | {file = "coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b"},
90 | {file = "coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a"},
91 | {file = "coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d"},
92 | {file = "coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca"},
93 | {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d"},
94 | {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787"},
95 | {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7"},
96 | {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3"},
97 | {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7"},
98 | {file = "coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a"},
99 | {file = "coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e"},
100 | {file = "coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837"},
101 | {file = "coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32"},
102 | {file = "coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27"},
103 | ]
104 |
105 | [package.dependencies]
106 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
107 |
108 | [package.extras]
109 | toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
110 |
111 | [[package]]
112 | name = "distlib"
113 | version = "0.3.9"
114 | description = "Distribution utilities"
115 | optional = false
116 | python-versions = "*"
117 | groups = ["dev"]
118 | files = [
119 | {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
120 | {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
121 | ]
122 |
123 | [[package]]
124 | name = "et-xmlfile"
125 | version = "2.0.0"
126 | description = "An implementation of lxml.xmlfile for the standard library"
127 | optional = true
128 | python-versions = ">=3.8"
129 | groups = ["main"]
130 | markers = "extra == \"excel\" or extra == \"all\""
131 | files = [
132 | {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"},
133 | {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"},
134 | ]
135 |
136 | [[package]]
137 | name = "exceptiongroup"
138 | version = "1.3.0"
139 | description = "Backport of PEP 654 (exception groups)"
140 | optional = false
141 | python-versions = ">=3.7"
142 | groups = ["dev"]
143 | markers = "python_version < \"3.11\""
144 | files = [
145 | {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"},
146 | {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"},
147 | ]
148 |
149 | [package.dependencies]
150 | typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""}
151 |
152 | [package.extras]
153 | test = ["pytest (>=6)"]
154 |
155 | [[package]]
156 | name = "filelock"
157 | version = "3.18.0"
158 | description = "A platform independent file lock."
159 | optional = false
160 | python-versions = ">=3.9"
161 | groups = ["dev"]
162 | files = [
163 | {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"},
164 | {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"},
165 | ]
166 |
167 | [package.extras]
168 | docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
169 | testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
170 | typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
171 |
172 | [[package]]
173 | name = "identify"
174 | version = "2.6.12"
175 | description = "File identification library for Python"
176 | optional = false
177 | python-versions = ">=3.9"
178 | groups = ["dev"]
179 | files = [
180 | {file = "identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2"},
181 | {file = "identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6"},
182 | ]
183 |
184 | [package.extras]
185 | license = ["ukkonen"]
186 |
187 | [[package]]
188 | name = "iniconfig"
189 | version = "2.1.0"
190 | description = "brain-dead simple config-ini parsing"
191 | optional = false
192 | python-versions = ">=3.8"
193 | groups = ["dev"]
194 | files = [
195 | {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
196 | {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
197 | ]
198 |
199 | [[package]]
200 | name = "mypy"
201 | version = "1.15.0"
202 | description = "Optional static typing for Python"
203 | optional = false
204 | python-versions = ">=3.9"
205 | groups = ["dev"]
206 | files = [
207 | {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"},
208 | {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"},
209 | {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"},
210 | {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"},
211 | {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"},
212 | {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"},
213 | {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"},
214 | {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"},
215 | {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"},
216 | {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"},
217 | {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"},
218 | {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"},
219 | {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"},
220 | {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"},
221 | {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"},
222 | {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"},
223 | {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"},
224 | {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"},
225 | {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"},
226 | {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"},
227 | {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"},
228 | {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"},
229 | {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"},
230 | {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"},
231 | {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"},
232 | {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"},
233 | {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"},
234 | {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"},
235 | {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"},
236 | {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"},
237 | {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"},
238 | {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"},
239 | ]
240 |
241 | [package.dependencies]
242 | mypy_extensions = ">=1.0.0"
243 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
244 | typing_extensions = ">=4.6.0"
245 |
246 | [package.extras]
247 | dmypy = ["psutil (>=4.0)"]
248 | faster-cache = ["orjson"]
249 | install-types = ["pip"]
250 | mypyc = ["setuptools (>=50)"]
251 | reports = ["lxml"]
252 |
253 | [[package]]
254 | name = "mypy-extensions"
255 | version = "1.1.0"
256 | description = "Type system extensions for programs checked with the mypy type checker."
257 | optional = false
258 | python-versions = ">=3.8"
259 | groups = ["dev"]
260 | files = [
261 | {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"},
262 | {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
263 | ]
264 |
265 | [[package]]
266 | name = "nodeenv"
267 | version = "1.9.1"
268 | description = "Node.js virtual environment builder"
269 | optional = false
270 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
271 | groups = ["dev"]
272 | files = [
273 | {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
274 | {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
275 | ]
276 |
277 | [[package]]
278 | name = "numpy"
279 | version = "2.2.6"
280 | description = "Fundamental package for array computing in Python"
281 | optional = false
282 | python-versions = ">=3.10"
283 | groups = ["main", "dev"]
284 | files = [
285 | {file = "numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb"},
286 | {file = "numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90"},
287 | {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163"},
288 | {file = "numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf"},
289 | {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83"},
290 | {file = "numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915"},
291 | {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680"},
292 | {file = "numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289"},
293 | {file = "numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d"},
294 | {file = "numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3"},
295 | {file = "numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae"},
296 | {file = "numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a"},
297 | {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42"},
298 | {file = "numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491"},
299 | {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a"},
300 | {file = "numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf"},
301 | {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1"},
302 | {file = "numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab"},
303 | {file = "numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47"},
304 | {file = "numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303"},
305 | {file = "numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff"},
306 | {file = "numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c"},
307 | {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3"},
308 | {file = "numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282"},
309 | {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87"},
310 | {file = "numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249"},
311 | {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49"},
312 | {file = "numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de"},
313 | {file = "numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4"},
314 | {file = "numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2"},
315 | {file = "numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84"},
316 | {file = "numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b"},
317 | {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d"},
318 | {file = "numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566"},
319 | {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f"},
320 | {file = "numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f"},
321 | {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868"},
322 | {file = "numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d"},
323 | {file = "numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd"},
324 | {file = "numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c"},
325 | {file = "numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6"},
326 | {file = "numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda"},
327 | {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40"},
328 | {file = "numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8"},
329 | {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f"},
330 | {file = "numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa"},
331 | {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571"},
332 | {file = "numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1"},
333 | {file = "numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff"},
334 | {file = "numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06"},
335 | {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d"},
336 | {file = "numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db"},
337 | {file = "numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543"},
338 | {file = "numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00"},
339 | {file = "numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd"},
340 | ]
341 |
342 | [[package]]
343 | name = "openpyxl"
344 | version = "3.1.5"
345 | description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
346 | optional = true
347 | python-versions = ">=3.8"
348 | groups = ["main"]
349 | markers = "extra == \"excel\" or extra == \"all\""
350 | files = [
351 | {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"},
352 | {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"},
353 | ]
354 |
355 | [package.dependencies]
356 | et-xmlfile = "*"
357 |
358 | [[package]]
359 | name = "packaging"
360 | version = "25.0"
361 | description = "Core utilities for Python packages"
362 | optional = false
363 | python-versions = ">=3.8"
364 | groups = ["dev"]
365 | files = [
366 | {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
367 | {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
368 | ]
369 |
370 | [[package]]
371 | name = "pandas"
372 | version = "2.2.3"
373 | description = "Powerful data structures for data analysis, time series, and statistics"
374 | optional = false
375 | python-versions = ">=3.9"
376 | groups = ["main"]
377 | files = [
378 | {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
379 | {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
380 | {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"},
381 | {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"},
382 | {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"},
383 | {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"},
384 | {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"},
385 | {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"},
386 | {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"},
387 | {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"},
388 | {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"},
389 | {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"},
390 | {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"},
391 | {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"},
392 | {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"},
393 | {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"},
394 | {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"},
395 | {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"},
396 | {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"},
397 | {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"},
398 | {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"},
399 | {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"},
400 | {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"},
401 | {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"},
402 | {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"},
403 | {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"},
404 | {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"},
405 | {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"},
406 | {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"},
407 | {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"},
408 | {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"},
409 | {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"},
410 | {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"},
411 | {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"},
412 | {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"},
413 | {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"},
414 | {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"},
415 | {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"},
416 | {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"},
417 | {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"},
418 | {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"},
419 | {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"},
420 | ]
421 |
422 | [package.dependencies]
423 | numpy = [
424 | {version = ">=1.22.4", markers = "python_version < \"3.11\""},
425 | {version = ">=1.23.2", markers = "python_version == \"3.11\""},
426 | {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
427 | ]
428 | python-dateutil = ">=2.8.2"
429 | pytz = ">=2020.1"
430 | tzdata = ">=2022.7"
431 |
432 | [package.extras]
433 | all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"]
434 | aws = ["s3fs (>=2022.11.0)"]
435 | clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"]
436 | compression = ["zstandard (>=0.19.0)"]
437 | computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"]
438 | consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
439 | excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"]
440 | feather = ["pyarrow (>=10.0.1)"]
441 | fss = ["fsspec (>=2022.11.0)"]
442 | gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"]
443 | hdf5 = ["tables (>=3.8.0)"]
444 | html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"]
445 | mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"]
446 | output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"]
447 | parquet = ["pyarrow (>=10.0.1)"]
448 | performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
449 | plot = ["matplotlib (>=3.6.3)"]
450 | postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
451 | pyarrow = ["pyarrow (>=10.0.1)"]
452 | spss = ["pyreadstat (>=1.2.0)"]
453 | sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
454 | test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
455 | xml = ["lxml (>=4.9.2)"]
456 |
457 | [[package]]
458 | name = "pandas-stubs"
459 | version = "2.2.3.250308"
460 | description = "Type annotations for pandas"
461 | optional = false
462 | python-versions = ">=3.10"
463 | groups = ["dev"]
464 | files = [
465 | {file = "pandas_stubs-2.2.3.250308-py3-none-any.whl", hash = "sha256:a377edff3b61f8b268c82499fdbe7c00fdeed13235b8b71d6a1dc347aeddc74d"},
466 | {file = "pandas_stubs-2.2.3.250308.tar.gz", hash = "sha256:3a6e9daf161f00b85c83772ed3d5cff9522028f07a94817472c07b91f46710fd"},
467 | ]
468 |
469 | [package.dependencies]
470 | numpy = ">=1.23.5"
471 | types-pytz = ">=2022.1.1"
472 |
473 | [[package]]
474 | name = "platformdirs"
475 | version = "4.3.8"
476 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
477 | optional = false
478 | python-versions = ">=3.9"
479 | groups = ["dev"]
480 | files = [
481 | {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"},
482 | {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"},
483 | ]
484 |
485 | [package.extras]
486 | docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
487 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"]
488 | type = ["mypy (>=1.14.1)"]
489 |
490 | [[package]]
491 | name = "pluggy"
492 | version = "1.6.0"
493 | description = "plugin and hook calling mechanisms for python"
494 | optional = false
495 | python-versions = ">=3.9"
496 | groups = ["dev"]
497 | files = [
498 | {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
499 | {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
500 | ]
501 |
502 | [package.extras]
503 | dev = ["pre-commit", "tox"]
504 | testing = ["coverage", "pytest", "pytest-benchmark"]
505 |
506 | [[package]]
507 | name = "pre-commit"
508 | version = "4.2.0"
509 | description = "A framework for managing and maintaining multi-language pre-commit hooks."
510 | optional = false
511 | python-versions = ">=3.9"
512 | groups = ["dev"]
513 | files = [
514 | {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"},
515 | {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"},
516 | ]
517 |
518 | [package.dependencies]
519 | cfgv = ">=2.0.0"
520 | identify = ">=1.0.0"
521 | nodeenv = ">=0.11.1"
522 | pyyaml = ">=5.1"
523 | virtualenv = ">=20.10.0"
524 |
525 | [[package]]
526 | name = "pytest"
527 | version = "8.3.5"
528 | description = "pytest: simple powerful testing with Python"
529 | optional = false
530 | python-versions = ">=3.8"
531 | groups = ["dev"]
532 | files = [
533 | {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
534 | {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
535 | ]
536 |
537 | [package.dependencies]
538 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
539 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
540 | iniconfig = "*"
541 | packaging = "*"
542 | pluggy = ">=1.5,<2"
543 | tomli = {version = ">=1", markers = "python_version < \"3.11\""}
544 |
545 | [package.extras]
546 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
547 |
548 | [[package]]
549 | name = "pytest-cov"
550 | version = "6.1.1"
551 | description = "Pytest plugin for measuring coverage."
552 | optional = false
553 | python-versions = ">=3.9"
554 | groups = ["dev"]
555 | files = [
556 | {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"},
557 | {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"},
558 | ]
559 |
560 | [package.dependencies]
561 | coverage = {version = ">=7.5", extras = ["toml"]}
562 | pytest = ">=4.6"
563 |
564 | [package.extras]
565 | testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
566 |
567 | [[package]]
568 | name = "python-dateutil"
569 | version = "2.9.0.post0"
570 | description = "Extensions to the standard Python datetime module"
571 | optional = false
572 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
573 | groups = ["main"]
574 | files = [
575 | {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
576 | {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
577 | ]
578 |
579 | [package.dependencies]
580 | six = ">=1.5"
581 |
582 | [[package]]
583 | name = "pytz"
584 | version = "2025.2"
585 | description = "World timezone definitions, modern and historical"
586 | optional = false
587 | python-versions = "*"
588 | groups = ["main"]
589 | files = [
590 | {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"},
591 | {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
592 | ]
593 |
594 | [[package]]
595 | name = "pyyaml"
596 | version = "6.0.2"
597 | description = "YAML parser and emitter for Python"
598 | optional = false
599 | python-versions = ">=3.8"
600 | groups = ["dev"]
601 | files = [
602 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
603 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
604 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
605 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
606 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
607 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
608 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
609 | {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
610 | {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
611 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
612 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
613 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
614 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
615 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
616 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
617 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
618 | {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
619 | {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
620 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
621 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
622 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
623 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
624 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
625 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
626 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
627 | {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
628 | {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
629 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
630 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
631 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
632 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
633 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
634 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
635 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
636 | {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
637 | {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
638 | {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
639 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
640 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
641 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
642 | {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
643 | {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
644 | {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
645 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
646 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
647 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
648 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
649 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
650 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
651 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
652 | {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
653 | {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
654 | {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
655 | ]
656 |
657 | [[package]]
658 | name = "ruff"
659 | version = "0.11.11"
660 | description = "An extremely fast Python linter and code formatter, written in Rust."
661 | optional = false
662 | python-versions = ">=3.7"
663 | groups = ["dev"]
664 | files = [
665 | {file = "ruff-0.11.11-py3-none-linux_armv6l.whl", hash = "sha256:9924e5ae54125ed8958a4f7de320dab7380f6e9fa3195e3dc3b137c6842a0092"},
666 | {file = "ruff-0.11.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8a93276393d91e952f790148eb226658dd275cddfde96c6ca304873f11d2ae4"},
667 | {file = "ruff-0.11.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d6e333dbe2e6ae84cdedefa943dfd6434753ad321764fd937eef9d6b62022bcd"},
668 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7885d9a5e4c77b24e8c88aba8c80be9255fa22ab326019dac2356cff42089fc6"},
669 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b5ab797fcc09121ed82e9b12b6f27e34859e4227080a42d090881be888755d4"},
670 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e231ff3132c1119ece836487a02785f099a43992b95c2f62847d29bace3c75ac"},
671 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a97c9babe1d4081037a90289986925726b802d180cca784ac8da2bbbc335f709"},
672 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8c4ddcbe8a19f59f57fd814b8b117d4fcea9bee7c0492e6cf5fdc22cfa563c8"},
673 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6224076c344a7694c6fbbb70d4f2a7b730f6d47d2a9dc1e7f9d9bb583faf390b"},
674 | {file = "ruff-0.11.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:882821fcdf7ae8db7a951df1903d9cb032bbe838852e5fc3c2b6c3ab54e39875"},
675 | {file = "ruff-0.11.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:dcec2d50756463d9df075a26a85a6affbc1b0148873da3997286caf1ce03cae1"},
676 | {file = "ruff-0.11.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:99c28505ecbaeb6594701a74e395b187ee083ee26478c1a795d35084d53ebd81"},
677 | {file = "ruff-0.11.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9263f9e5aa4ff1dec765e99810f1cc53f0c868c5329b69f13845f699fe74f639"},
678 | {file = "ruff-0.11.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:64ac6f885e3ecb2fdbb71de2701d4e34526651f1e8503af8fb30d4915a3fe345"},
679 | {file = "ruff-0.11.11-py3-none-win32.whl", hash = "sha256:1adcb9a18802268aaa891ffb67b1c94cd70578f126637118e8099b8e4adcf112"},
680 | {file = "ruff-0.11.11-py3-none-win_amd64.whl", hash = "sha256:748b4bb245f11e91a04a4ff0f96e386711df0a30412b9fe0c74d5bdc0e4a531f"},
681 | {file = "ruff-0.11.11-py3-none-win_arm64.whl", hash = "sha256:6c51f136c0364ab1b774767aa8b86331bd8e9d414e2d107db7a2189f35ea1f7b"},
682 | {file = "ruff-0.11.11.tar.gz", hash = "sha256:7774173cc7c1980e6bf67569ebb7085989a78a103922fb83ef3dfe230cd0687d"},
683 | ]
684 |
685 | [[package]]
686 | name = "six"
687 | version = "1.17.0"
688 | description = "Python 2 and 3 compatibility utilities"
689 | optional = false
690 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
691 | groups = ["main"]
692 | files = [
693 | {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
694 | {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
695 | ]
696 |
697 | [[package]]
698 | name = "tomli"
699 | version = "2.2.1"
700 | description = "A lil' TOML parser"
701 | optional = false
702 | python-versions = ">=3.8"
703 | groups = ["dev"]
704 | markers = "python_version < \"3.11\""
705 | files = [
706 | {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
707 | {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
708 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
709 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
710 | {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
711 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
712 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
713 | {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
714 | {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
715 | {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
716 | {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
717 | {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
718 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
719 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
720 | {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
721 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
722 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
723 | {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
724 | {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
725 | {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
726 | {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
727 | {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
728 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
729 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
730 | {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
731 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
732 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
733 | {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
734 | {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
735 | {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
736 | {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
737 | {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
738 | ]
739 |
740 | [[package]]
741 | name = "types-pytz"
742 | version = "2025.2.0.20250516"
743 | description = "Typing stubs for pytz"
744 | optional = false
745 | python-versions = ">=3.9"
746 | groups = ["dev"]
747 | files = [
748 | {file = "types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451"},
749 | {file = "types_pytz-2025.2.0.20250516.tar.gz", hash = "sha256:e1216306f8c0d5da6dafd6492e72eb080c9a166171fa80dd7a1990fd8be7a7b3"},
750 | ]
751 |
752 | [[package]]
753 | name = "typing-extensions"
754 | version = "4.13.2"
755 | description = "Backported and Experimental Type Hints for Python 3.8+"
756 | optional = false
757 | python-versions = ">=3.8"
758 | groups = ["dev"]
759 | files = [
760 | {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"},
761 | {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"},
762 | ]
763 |
764 | [[package]]
765 | name = "tzdata"
766 | version = "2025.2"
767 | description = "Provider of IANA time zone data"
768 | optional = false
769 | python-versions = ">=2"
770 | groups = ["main"]
771 | files = [
772 | {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"},
773 | {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"},
774 | ]
775 |
776 | [[package]]
777 | name = "virtualenv"
778 | version = "20.31.2"
779 | description = "Virtual Python Environment builder"
780 | optional = false
781 | python-versions = ">=3.8"
782 | groups = ["dev"]
783 | files = [
784 | {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"},
785 | {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"},
786 | ]
787 |
788 | [package.dependencies]
789 | distlib = ">=0.3.7,<1"
790 | filelock = ">=3.12.2,<4"
791 | platformdirs = ">=3.9.1,<5"
792 |
793 | [package.extras]
794 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
795 | test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
796 |
797 | [extras]
798 | all = ["openpyxl"]
799 | excel = ["openpyxl"]
800 |
801 | [metadata]
802 | lock-version = "2.1"
803 | python-versions = "^3.10.0"
804 | content-hash = "cd5e6ef4e2c10a926cbefb50fb56871f1b8dab80d47c021a9064745455569ab5"
805 |
--------------------------------------------------------------------------------