├── tests ├── __init__.py ├── test_utils.py └── test_core.py ├── .flake8 ├── .github ├── dependabot.yml └── workflows │ ├── ci.yml │ └── codeql-analysis.yml ├── Makefile ├── arrayfiles ├── __init__.py ├── utils.py └── core.py ├── Pipfile ├── tox.ini ├── setup.py ├── .travis.yml ├── LICENSE ├── .gitignore ├── README.md └── Pipfile.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | init: 2 | pipenv install --skip-lock --dev 3 | pipenv run flake8 --install-hook git 4 | git config --bool flake8.strict true 5 | test: 6 | pipenv run pytest --cov=arrayfiles --cov-report=term-missing tests 7 | -------------------------------------------------------------------------------- /arrayfiles/__init__.py: -------------------------------------------------------------------------------- 1 | from arrayfiles.core import CsvFile # NOQA 2 | from arrayfiles.core import CustomNewlineTextFile # NOQA 3 | from arrayfiles.core import TextFile # NOQA 4 | from arrayfiles.core import read_csv # NOQA 5 | from arrayfiles.core import read_text # NOQA 6 | -------------------------------------------------------------------------------- /arrayfiles/utils.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import os 3 | 4 | 5 | @contextlib.contextmanager 6 | def fd_open(filename: str, flags, **kwargs) -> int: 7 | try: 8 | fd = os.open(filename, flags, **kwargs) 9 | yield fd 10 | finally: 11 | os.close(fd) 12 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | autopep8 = "*" 8 | flake8 = "*" 9 | ipython = "*" 10 | pytest = "*" 11 | pytest-cov = "*" 12 | isort = {extras = ["pipfile"],version = "*"} 13 | 14 | [packages] 15 | 16 | [requires] 17 | python_version = "3.7" 18 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py36, py37, py38 3 | 4 | [testenv] 5 | passenv = CI TRAVIS TRAVIS_* CODACY_PROJECT_TOKEN 6 | commands = 7 | pytest --cov=arrayfiles tests 8 | codecov 9 | coverage xml 10 | python-codacy-coverage -r coverage.xml 11 | deps = 12 | pytest 13 | pytest-cov 14 | codecov 15 | codacy-coverage 16 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from unittest import TestCase 4 | 5 | from arrayfiles import utils 6 | 7 | 8 | class UtilsTestCase(TestCase): 9 | 10 | def setUp(self): 11 | self.fp = tempfile.NamedTemporaryFile() 12 | 13 | def tearDown(self): 14 | self.fp.close() 15 | 16 | def test_open(self): 17 | with utils.fd_open(self.fp.name, os.O_RDWR) as fd: 18 | self.assertIsInstance(fd, int) 19 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Run CI build 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v1 12 | - name: Set up Python 3.7 13 | uses: actions/setup-python@v1 14 | with: 15 | python-version: 3.7 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install pipenv 20 | pipenv install --dev 21 | - name: Lint with flake8 22 | run: | 23 | pipenv run flake8 24 | - name: Test with pytest 25 | run: | 26 | pipenv run pytest tests 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | try: 3 | from setuptools import setup 4 | except ImportError: 5 | from distuils.core import setup 6 | 7 | 8 | setup( 9 | name='arrayfiles', 10 | use_scm_version=True, 11 | setup_requires=['setuptools_scm'], 12 | description='Array-like File Access in Python', 13 | long_description=open('./README.md', encoding='utf-8').read(), 14 | long_description_content_type='text/markdown', 15 | url='https://github.com/yasufumy/arrayfiles', 16 | author='Yasufumi Taniguchi', 17 | author_email='yasufumi.taniguchi@gmail.com', 18 | packages=[ 19 | 'arrayfiles' 20 | ], 21 | license='MIT', 22 | classifiers=[ 23 | 'Programming Language :: Python :: 3.6', 24 | 'Programming Language :: Python :: 3.7', 25 | 'Programming Language :: Python :: 3.8', 26 | ], 27 | ) 28 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | dist: xenial 3 | python: 4 | - 3.6 5 | - 3.7 6 | - 3.8 7 | install: pip install tox-travis flake8 8 | script: 9 | - flake8 10 | - tox 11 | deploy: 12 | provider: pypi 13 | user: yasufumy 14 | password: 15 | secure: qGKZVYUf480pq/WhFhU87Zf17IHYJgjftXqlPjHHN29joRdAx+4sYaCQ+x2yn2mg1amHph1hwo8i0QGnlqK9FWcDaPi5LnVOm/Ie+nSWo1Ie9/iF/nKl76215s3CNc8XL2+5O+j4JSb1/+o6dT5KrYCs520jwZXkkpxb6oVSJzTpBOMxtxYai6XlJk8I3isIdqodpO/hr1BYYPdMzVJn9zQHK5LFQfitiPNeGSXhpd++oMJjXK5jChp0AvClIJkW6LJBWceCBGTfOfxYtLrYZVNiYAY20jncDfSX7vZVBSwmDyNeWZZdGcQ9L92lxpQntpmWoDixbdSKMnZCOq4lp/m0+IWT2xjkVcqVzpgDBcveijkWsbC+3zvMrel+4xoBpfaL7Qm6W/M+jK8W0XyqKAUlGhB/nzGkTOg+HeU0ukv0F4LyHHEekFoVa/MzzLhuUtm6eoZPtXwksXxKHu9M3VKcJtyXp3oFcoPzBTjua/AdCto14UTuFPfjry7dFK7swy2X5NtSQsyGd5VvWGNXE38viq0Le4p+Fw1pHaiAzeqCclwisqLOKsDu4SZbr2Nn6b/F+yNjguDpzo4yLESqM783QV3TSlsLXGNZ6LMUOsVBU1ctwbffCD+uiIy+5+OwUD8FeYzcN14bjwuIz3HqDtAC1gJDeC5I5nMCCe2t+hc= 16 | skip_existing: true 17 | on: 18 | tags: true 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Taniguchi Yasufumi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ArrayFiles: Array-like File Access in Python 2 | [![Build Status](https://travis-ci.com/yasufumy/arrayfiles.svg?branch=master)](https://travis-ci.com/yasufumy/arrayfiles) 3 | [![Build Status](https://github.com/yasufumy/arrayfiles/workflows/Run%20CI%20build/badge.svg)](https://github.com/yasufumy/arrayfiles/actions?query=workflow%3A%22Run+CI+build%22) 4 | [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/yasufumy/arrayfiles.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/yasufumy/arrayfiles/context:python) 5 | [![Codacy Badge](https://api.codacy.com/project/badge/Grade/2de6c7b86da743a7ba1a567d29213dca)](https://www.codacy.com/manual/yasufumy/arrayfiles?utm_source=github.com&utm_medium=referral&utm_content=yasufumy/arrayfiles&utm_campaign=Badge_Grade) 6 | [![Codacy Badge](https://api.codacy.com/project/badge/Coverage/2de6c7b86da743a7ba1a567d29213dca)](https://www.codacy.com/manual/yasufumy/arrayfiles?utm_source=github.com&utm_medium=referral&utm_content=yasufumy/arrayfiles&utm_campaign=Badge_Coverage) 7 | [![codecov](https://codecov.io/gh/yasufumy/arrayfiles/branch/master/graph/badge.svg)](https://codecov.io/gh/yasufumy/arrayfiles) 8 | 9 | ArrayFiles allows you to access an arbitrary line of a text file. 10 | 11 | If your interest is in using arrayfiles for Deep Learning, please check [LineFlow](https://github.com/yasufumy/lineflow). 12 | 13 | ## Installation 14 | 15 | To install arrayfiles: 16 | 17 | ```bash 18 | pip install arrayfiles 19 | ``` 20 | 21 | ## Usage 22 | 23 | ```py 24 | import arrayfiles 25 | 26 | data = arrayfiles.read_text('/path/to/text') 27 | 28 | data[0] # Access the first line of your text 29 | data[-1] # Access the last line of your text 30 | data[10:100] # Access the 10th line to the 100 the line of your text 31 | ``` 32 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [master] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [master] 14 | schedule: 15 | - cron: '0 5 * * 5' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['python'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from unittest import TestCase 3 | 4 | import arrayfiles 5 | 6 | 7 | class TextTestCase(TestCase): 8 | 9 | def setUp(self): 10 | self.length = 100 11 | 12 | fp = tempfile.NamedTemporaryFile() 13 | for i in range(self.length): 14 | fp.write(f'line #{i}\n'.encode('utf-8')) 15 | fp.seek(0) 16 | self.fp = fp 17 | 18 | def tearDown(self): 19 | self.fp.close() 20 | 21 | def test_dunder_init(self): 22 | text = arrayfiles.read_text(self.fp.name) 23 | self.assertEqual(text._path, self.fp.name) 24 | self.assertEqual(text._encoding, 'utf-8') 25 | 26 | def test_supports_random_access(self): 27 | text = arrayfiles.read_text(self.fp.name) 28 | for i in range(self.length): 29 | self.assertEqual(text[i], f'line #{i}') 30 | self.assertEqual(text[i - self.length], f'line #{i}') 31 | 32 | def test_iterates_each_line(self): 33 | text = arrayfiles.read_text(self.fp.name) 34 | for i, x in enumerate(text): 35 | self.assertEqual(x, f'line #{i}') 36 | 37 | def test_slices_items(self): 38 | text = arrayfiles.read_text(self.fp.name) 39 | self.assertSequenceEqual(text[:self.length], text) 40 | 41 | def test_iterates(self): 42 | text = arrayfiles.read_text(self.fp.name) 43 | with self.assertRaises(ValueError): 44 | next(text.iterate(self.length, 0)) 45 | 46 | def test_raise_value_error_with_invalid_span(self): 47 | text = arrayfiles.read_text(self.fp.name) 48 | step = 10 49 | for start in range(0, self.length, step): 50 | end = start + step 51 | with self.subTest(start=start, end=end): 52 | for i, line in zip(range(start, end), text.iterate(start, end)): 53 | self.assertEqual(line, f'line #{i}') 54 | 55 | def test_raises_index_error_with_invalid_index(self): 56 | text = arrayfiles.read_text(self.fp.name) 57 | 58 | def getitem(i): 59 | return text[i] 60 | 61 | with self.assertRaises(IndexError): 62 | getitem(self.length) 63 | with self.assertRaises(IndexError): 64 | getitem(-self.length-1) 65 | 66 | def test_dunder_len(self): 67 | text = arrayfiles.read_text(self.fp.name) 68 | self.assertEqual(len(text), self.length) 69 | self.assertEqual(len(text[:None]), self.length) 70 | self.assertEqual(len(list(text)), self.length) 71 | 72 | def test_dunder_getstate(self): 73 | text = arrayfiles.read_text(self.fp.name) 74 | state = text.__getstate__() 75 | self.assertNotIn('_mm', state) 76 | 77 | def test_dunder_setstate(self): 78 | text = arrayfiles.read_text(self.fp.name) 79 | state = text.__getstate__() 80 | self.assertNotIn('_mm', state) 81 | text.__setstate__(state) 82 | self.assertIn('_mm', text.__dict__) 83 | 84 | def test_eager_load(self): 85 | text1 = arrayfiles.read_text(self.fp.name, lazy=False) 86 | text2 = arrayfiles.read_text(self.fp.name, lazy=True) 87 | for a, b in zip(text1, text2): 88 | self.assertEqual(a, b) 89 | 90 | 91 | class CsvTestCase(TestCase): 92 | 93 | def setUp(self): 94 | lines = ['en,ja', 95 | 'this is English .,this is Japanese .', 96 | 'this is also English .,this is also Japanese .'] 97 | self.lines = lines 98 | fp = tempfile.NamedTemporaryFile() 99 | for x in lines: 100 | fp.write(f'{x}\n'.encode('utf-8')) 101 | fp.seek(0) 102 | self.fp = fp 103 | 104 | def tearDown(self): 105 | self.fp.close() 106 | 107 | def test_dunder_init(self): 108 | data = arrayfiles.read_csv(self.fp.name) 109 | self.assertEqual(data._path, self.fp.name) 110 | self.assertEqual(data._encoding, 'utf-8') 111 | self.assertEqual(data._delimiter, ',') 112 | self.assertFalse(data._header) 113 | 114 | def test_slices_items(self): 115 | data = arrayfiles.read_csv(self.fp.name) 116 | self.assertSequenceEqual(data[:len(self.lines)], data) 117 | 118 | def test_loads_csv_with_header(self): 119 | data = arrayfiles.read_csv(self.fp.name, header=True) 120 | self.assertTrue(data._header) 121 | 122 | def test_iterates_csv_with_header(self): 123 | from collections import OrderedDict 124 | 125 | data = arrayfiles.read_csv(self.fp.name, header=True) 126 | header = self.lines[0].split(',') 127 | expected = [OrderedDict(zip(header, line.split(','))) 128 | for line in self.lines[1:]] 129 | for i, (x, y) in enumerate(zip(data, expected)): 130 | self.assertEqual(x, y) 131 | self.assertEqual(data[i], y) 132 | 133 | def test_iterates_csv_without_header(self): 134 | data = arrayfiles.read_csv(self.fp.name, header=False) 135 | expected = [line.split(',') for line in self.lines] 136 | self.assertSequenceEqual(data, expected) 137 | for x, y in zip(data, expected): 138 | self.assertEqual(x, y) 139 | 140 | def test_eager_load(self): 141 | text1 = arrayfiles.read_csv(self.fp.name, lazy=False) 142 | text2 = arrayfiles.read_csv(self.fp.name, lazy=True) 143 | for a, b in zip(text1, text2): 144 | self.assertEqual(a, b) 145 | 146 | 147 | class CustomNewlineTextTestCase(TestCase): 148 | 149 | def setUp(self): 150 | self.length = 100 151 | 152 | fp = tempfile.NamedTemporaryFile() 153 | for i in range(self.length): 154 | fp.write(f'line #{i}\n\n'.encode('utf-8')) 155 | fp.seek(0) 156 | self.fp = fp 157 | self.newline = '\n\n' 158 | 159 | def tearDown(self): 160 | self.fp.close() 161 | 162 | def test_dunder_len(self): 163 | text = arrayfiles.read_text(self.fp.name, newline=self.newline) 164 | self.assertEqual(len(text), self.length) 165 | self.assertEqual(len(text[:None]), self.length) 166 | self.assertEqual(len(list(text)), self.length) 167 | 168 | def test_iterates_each_line(self): 169 | text = arrayfiles.read_text(self.fp.name, newline=self.newline) 170 | for i, (x, y) in enumerate(zip(text, text[:None])): 171 | self.assertEqual(x, y, f'line #{i}') 172 | 173 | def test_eager_load(self): 174 | text1 = arrayfiles.read_text(self.fp.name, newline=self.newline, lazy=False) 175 | text2 = arrayfiles.read_text(self.fp.name, newline=self.newline, lazy=True) 176 | for a, b in zip(text1, text2): 177 | self.assertEqual(a, b) 178 | -------------------------------------------------------------------------------- /arrayfiles/core.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import functools 3 | import io 4 | import mmap 5 | import os 6 | from typing import Any, Dict, Iterator, List, Optional, Union 7 | 8 | from arrayfiles import utils 9 | 10 | 11 | class TextFile: 12 | """Load a line-oriented text file. 13 | 14 | Args: 15 | path (str): The path to the text file. 16 | encoding (str, optional): The name of the encoding used to decode. 17 | """ 18 | 19 | def __init__(self, path: str, encoding: Optional[str] = 'utf-8') -> None: 20 | path = os.path.expanduser(path) 21 | assert os.path.exists(path) 22 | 23 | self._path = path 24 | self._encoding = encoding 25 | with utils.fd_open(path, os.O_RDWR) as fd: 26 | self._mm = mmap.mmap(fd, 0, access=mmap.ACCESS_READ) 27 | 28 | def _get_offsets(self) -> List[int]: 29 | mm = self._mm 30 | mm.seek(0) 31 | return [0] + [mm.tell() for _ in iter(mm.readline, b'')] 32 | 33 | @property 34 | @functools.lru_cache() 35 | def _offsets(self) -> List[int]: 36 | return self._get_offsets() 37 | 38 | def _get_length(self) -> int: 39 | return len(self._offsets) - 1 40 | 41 | @property 42 | @functools.lru_cache() 43 | def _length(self) -> int: 44 | return self._get_length() 45 | 46 | def __iter__(self) -> Iterator[str]: 47 | with io.open(self._path, encoding=self._encoding) as fp: 48 | for line in fp: 49 | yield line.rstrip(os.linesep) 50 | 51 | def iterate(self, start: int, end: int) -> Iterator[str]: 52 | if start > end: 53 | raise ValueError('end should be larger than start.') 54 | self._mm.seek(self._offsets[start]) 55 | readline = self._mm.readline 56 | tell = self._mm.tell 57 | end = self._offsets[end] if end < len(self._offsets) else self._offsets[-1] 58 | while tell() != end: 59 | yield readline().decode(self._encoding).rstrip(os.linesep) 60 | 61 | def __getitem__(self, index: Union[int, slice]) -> Union[str, List[str]]: 62 | if isinstance(index, slice): 63 | start, stop, step = index.indices(self._length) 64 | return [self.getline(i) for i in range(start, stop, step)] 65 | 66 | if index >= 0: 67 | if index >= self._length: 68 | raise IndexError('Text object index out of range') 69 | else: 70 | if index < - self._length: 71 | raise IndexError('Text object index out of range') 72 | index += self._length 73 | 74 | return self.getline(index) 75 | 76 | def getline(self, i: int) -> str: 77 | start, end = self._offsets[i: i + 2] 78 | return self._mm[start: end].decode(self._encoding).rstrip(os.linesep) 79 | 80 | def __len__(self) -> int: 81 | return self._length 82 | 83 | def __getstate__(self) -> Dict[str, Any]: 84 | state = self.__dict__.copy() 85 | del state['_mm'] 86 | return state 87 | 88 | def __setstate__(self, state: Dict[str, Any]) -> None: 89 | self.__dict__.update(state) 90 | with utils.fd_open(self._path, os.O_RDWR) as fd: 91 | self._mm = mmap.mmap(fd, 0, access=mmap.ACCESS_READ) 92 | 93 | def __del__(self) -> None: 94 | if getattr(self, '_mm', None): 95 | self._mm.close() 96 | 97 | 98 | class CsvFile(TextFile): 99 | """Load a CSV file. 100 | 101 | Args: 102 | path (str): The path to the text file. 103 | encoding (str, optional): The name of the encoding used to decode. 104 | delimiter (str, optional): A one-character string used to separate fields. It defaults to ','. 105 | header (bool, optional): If ``True``, the csvfile will use the first line of the file as a header. 106 | """ 107 | 108 | def __init__(self, 109 | path: str, 110 | encoding: Optional[str] = 'utf-8', 111 | delimiter: Optional[str] = ',', 112 | header: Optional[bool] = False, 113 | fieldnames: Optional[List[str]] = None) -> None: 114 | super().__init__(path, encoding) 115 | 116 | self._delimiter = delimiter 117 | self._header = header 118 | if header: 119 | if fieldnames is None: 120 | with io.open(path, encoding=encoding) as fp: 121 | fieldnames = next(csv.reader(fp, delimiter=delimiter)) 122 | # TODO: csv.DictReader skips blank lines. 123 | # So the item length doesn't match if the given file includes black lines. 124 | self._reader = functools.partial(csv.DictReader, delimiter=delimiter, fieldnames=fieldnames) 125 | else: 126 | self._reader = functools.partial(csv.reader, delimiter=delimiter) 127 | 128 | def _get_offsets(self) -> List[int]: 129 | offsets = super(CsvFile, self)._get_offsets() 130 | if self._header: 131 | offsets.pop(0) 132 | return offsets 133 | 134 | def __iter__(self) -> Iterator[Union[List[Any], Dict[str, Any]]]: 135 | with io.open(self._path, encoding=self._encoding) as fp: 136 | if self._header: 137 | fp.readline() 138 | yield from self._reader(fp) 139 | 140 | def __getitem__(self, index: Union[int, slice]) -> Union[List[Any], Dict[str, Any]]: 141 | x = super().__getitem__(index) 142 | if not isinstance(x, list): 143 | x = [x] 144 | row = list(self._reader(x)) 145 | if len(row) == 1: 146 | return row[0] 147 | return row 148 | 149 | 150 | class CustomNewlineTextFile(TextFile): 151 | """Load a line-oriented text file with custom newline letters. 152 | 153 | Args: 154 | path (str): The path to the text file. 155 | newline (str): The newline letters. 156 | encoding (str, optional): The name of the encoding used to decode. 157 | """ 158 | 159 | def __init__(self, path: str, newline: str, encoding: Optional[str] = 'utf-8') -> None: 160 | super(CustomNewlineTextFile, self).__init__(path, encoding) 161 | 162 | self._newline = newline.encode(encoding) 163 | 164 | def _get_offsets(self) -> List[int]: 165 | mm = self._mm 166 | mm.seek(0) 167 | 168 | offsets = [0] 169 | start = 0 170 | newline = self._newline 171 | newline_offset = len(newline) 172 | while True: 173 | temp = mm.find(newline, start) 174 | if temp == -1: 175 | break 176 | start = temp + newline_offset 177 | offsets.append(start) 178 | return offsets 179 | 180 | def __iter__(self) -> Iterator[str]: 181 | mm = self._mm 182 | for start, end in zip(self._offsets, self._offsets[1:]): 183 | yield mm[start: end].decode(self._encoding).rstrip(os.linesep) 184 | 185 | 186 | def read_text( 187 | path: str, 188 | encoding: Optional[str] = 'utf-8', 189 | newline: Optional[str] = '\n', 190 | lazy: Optional[bool] = True 191 | ) -> Union[TextFile, CustomNewlineTextFile, List[str]]: 192 | """Load a line-oriented text file. 193 | 194 | Args: 195 | path (str): The path to the text file. 196 | encoding (str, optional): The name of the encoding used to decode. 197 | newline (str, optional): The newline letters. 198 | lazy (bool, optional): If ``True``, the function returns ``TextFile`` or 199 | ``CustomNewlineTextFile`` object. Otherwise, returns a list of string. 200 | 201 | Returns (Union[TextFile, CustomNewlineTextFile, List[str]]): 202 | The loaded array-like accessible text file. 203 | 204 | Examples: 205 | >>> import arrayfiles 206 | >>> text = arrayfiles.read_text('/path/to/your/text') 207 | >>> print(text[0]) 208 | The 1st line in your text will be displayed. 209 | >>> print(text[10:20]) 210 | The 10th to 20th lines in your text will be displayed. 211 | """ 212 | 213 | if newline == '\n': 214 | data = TextFile(path, encoding) 215 | else: 216 | data = CustomNewlineTextFile(path, newline, encoding) 217 | 218 | if lazy: 219 | return data 220 | else: 221 | return list(data) 222 | 223 | 224 | def read_csv( 225 | path: str, 226 | encoding: Optional[str] = 'utf-8', 227 | delimiter: Optional[str] = ',', 228 | header: Optional[bool] = False, 229 | fieldnames: Optional[List[str]] = None, 230 | lazy: Optional[str] = True 231 | ) -> Union[CsvFile, List[str]]: 232 | """Load a CSV file. 233 | 234 | Args: 235 | path (str): The path to the text file. 236 | encoding (str, optional): The name of the encoding used to decode. 237 | delimiter (str, optional): A one-character string used to separate fields. It defaults to ','. 238 | header (bool, optional): If ``True``, this method will use the first line of the file as a header. 239 | fieldnames (list, optional): custom header. 240 | lazy (bool, optional): If ``True``, the function returns ``TextFile`` or 241 | ``CustomNewlineTextFile`` object. Otherwise, returns a list of string. 242 | 243 | Returns (Union[CsvFile, List[str]]): 244 | The loaded array-like accessible csv file. 245 | 246 | Examples: 247 | >>> import arrayfiles 248 | >>> text = arrayfiles.read_csv('/path/to/your/tsv', delimiter='\t') 249 | >>> print(text[0]) 250 | ['the', 'first', 'row'] 251 | >>> print(text[10:12]) 252 | [['the', '10th', 'row'], ['the', '12th', 'row']] 253 | """ 254 | 255 | data = CsvFile(path, encoding, delimiter, header, fieldnames) 256 | 257 | if lazy: 258 | return data 259 | else: 260 | return list(data) 261 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "ad9430c5c951a871644406001a634e9db91bf83487097bc2f092a625d8a5143d" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": {}, 19 | "develop": { 20 | "autopep8": { 21 | "hashes": [ 22 | "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077", 23 | "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207" 24 | ], 25 | "index": "pypi", 26 | "version": "==2.0.0" 27 | }, 28 | "backcall": { 29 | "hashes": [ 30 | "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e", 31 | "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255" 32 | ], 33 | "version": "==0.2.0" 34 | }, 35 | "coverage": { 36 | "extras": [ 37 | "toml" 38 | ], 39 | "hashes": [ 40 | "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", 41 | "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2", 42 | "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a", 43 | "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", 44 | "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", 45 | "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6", 46 | "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7", 47 | "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f", 48 | "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02", 49 | "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c", 50 | "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063", 51 | "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", 52 | "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5", 53 | "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959", 54 | "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", 55 | "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", 56 | "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", 57 | "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9", 58 | "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5", 59 | "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f", 60 | "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", 61 | "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", 62 | "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9", 63 | "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f", 64 | "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", 65 | "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb", 66 | "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1", 67 | "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb", 68 | "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250", 69 | "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e", 70 | "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", 71 | "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5", 72 | "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", 73 | "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2", 74 | "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", 75 | "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", 76 | "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", 77 | "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", 78 | "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9", 79 | "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", 80 | "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0", 81 | "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9", 82 | "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", 83 | "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050", 84 | "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d", 85 | "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6", 86 | "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353", 87 | "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb", 88 | "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e", 89 | "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8", 90 | "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495", 91 | "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2", 92 | "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd", 93 | "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27", 94 | "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1", 95 | "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818", 96 | "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", 97 | "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e", 98 | "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850", 99 | "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3" 100 | ], 101 | "markers": "python_version >= '3.7'", 102 | "version": "==7.2.7" 103 | }, 104 | "decorator": { 105 | "hashes": [ 106 | "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", 107 | "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186" 108 | ], 109 | "markers": "python_version >= '3.5'", 110 | "version": "==5.1.1" 111 | }, 112 | "exceptiongroup": { 113 | "hashes": [ 114 | "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", 115 | "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" 116 | ], 117 | "markers": "python_version < '3.11'", 118 | "version": "==1.1.1" 119 | }, 120 | "flake8": { 121 | "hashes": [ 122 | "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db", 123 | "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248" 124 | ], 125 | "index": "pypi", 126 | "version": "==5.0.4" 127 | }, 128 | "importlib-metadata": { 129 | "hashes": [ 130 | "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b", 131 | "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31" 132 | ], 133 | "markers": "python_version < '3.8'", 134 | "version": "==4.2.0" 135 | }, 136 | "iniconfig": { 137 | "hashes": [ 138 | "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", 139 | "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" 140 | ], 141 | "markers": "python_version >= '3.7'", 142 | "version": "==2.0.0" 143 | }, 144 | "ipython": { 145 | "hashes": [ 146 | "sha256:af3bdb46aa292bce5615b1b2ebc76c2080c5f77f54bda2ec72461317273e7cd6", 147 | "sha256:c175d2440a1caff76116eb719d40538fbb316e214eda85c5515c303aacbfb23e" 148 | ], 149 | "index": "pypi", 150 | "version": "==7.34.0" 151 | }, 152 | "isort": { 153 | "extras": [ 154 | "pipfile" 155 | ], 156 | "hashes": [ 157 | "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db", 158 | "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746" 159 | ], 160 | "index": "pypi", 161 | "version": "==5.11.5" 162 | }, 163 | "jedi": { 164 | "hashes": [ 165 | "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e", 166 | "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612" 167 | ], 168 | "markers": "python_version >= '3.6'", 169 | "version": "==0.18.2" 170 | }, 171 | "matplotlib-inline": { 172 | "hashes": [ 173 | "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311", 174 | "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304" 175 | ], 176 | "markers": "python_version >= '3.5'", 177 | "version": "==0.1.6" 178 | }, 179 | "mccabe": { 180 | "hashes": [ 181 | "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", 182 | "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" 183 | ], 184 | "markers": "python_version >= '3.6'", 185 | "version": "==0.7.0" 186 | }, 187 | "packaging": { 188 | "hashes": [ 189 | "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", 190 | "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" 191 | ], 192 | "markers": "python_version >= '3.7'", 193 | "version": "==23.1" 194 | }, 195 | "parso": { 196 | "hashes": [ 197 | "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0", 198 | "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75" 199 | ], 200 | "markers": "python_version >= '3.6'", 201 | "version": "==0.8.3" 202 | }, 203 | "pexpect": { 204 | "hashes": [ 205 | "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937", 206 | "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c" 207 | ], 208 | "markers": "sys_platform != 'win32'", 209 | "version": "==4.8.0" 210 | }, 211 | "pickleshare": { 212 | "hashes": [ 213 | "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca", 214 | "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56" 215 | ], 216 | "version": "==0.7.5" 217 | }, 218 | "pluggy": { 219 | "hashes": [ 220 | "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849", 221 | "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3" 222 | ], 223 | "markers": "python_version >= '3.7'", 224 | "version": "==1.2.0" 225 | }, 226 | "prompt-toolkit": { 227 | "hashes": [ 228 | "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b", 229 | "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f" 230 | ], 231 | "markers": "python_version >= '3.7'", 232 | "version": "==3.0.38" 233 | }, 234 | "ptyprocess": { 235 | "hashes": [ 236 | "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", 237 | "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" 238 | ], 239 | "version": "==0.7.0" 240 | }, 241 | "pycodestyle": { 242 | "hashes": [ 243 | "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785", 244 | "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b" 245 | ], 246 | "markers": "python_version >= '3.6'", 247 | "version": "==2.9.1" 248 | }, 249 | "pyflakes": { 250 | "hashes": [ 251 | "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2", 252 | "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3" 253 | ], 254 | "markers": "python_version >= '3.6'", 255 | "version": "==2.5.0" 256 | }, 257 | "pygments": { 258 | "hashes": [ 259 | "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c", 260 | "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1" 261 | ], 262 | "markers": "python_version >= '3.7'", 263 | "version": "==2.15.1" 264 | }, 265 | "pytest": { 266 | "hashes": [ 267 | "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32", 268 | "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a" 269 | ], 270 | "index": "pypi", 271 | "version": "==7.4.0" 272 | }, 273 | "pytest-cov": { 274 | "hashes": [ 275 | "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", 276 | "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a" 277 | ], 278 | "index": "pypi", 279 | "version": "==4.1.0" 280 | }, 281 | "setuptools": { 282 | "hashes": [ 283 | "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f", 284 | "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235" 285 | ], 286 | "markers": "python_version >= '3.7'", 287 | "version": "==68.0.0" 288 | }, 289 | "tomli": { 290 | "hashes": [ 291 | "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", 292 | "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" 293 | ], 294 | "markers": "python_version >= '3.7'", 295 | "version": "==2.0.1" 296 | }, 297 | "traitlets": { 298 | "hashes": [ 299 | "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8", 300 | "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9" 301 | ], 302 | "markers": "python_version >= '3.7'", 303 | "version": "==5.9.0" 304 | }, 305 | "typing-extensions": { 306 | "hashes": [ 307 | "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26", 308 | "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5" 309 | ], 310 | "markers": "python_version < '3.8'", 311 | "version": "==4.6.3" 312 | }, 313 | "wcwidth": { 314 | "hashes": [ 315 | "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e", 316 | "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0" 317 | ], 318 | "version": "==0.2.6" 319 | }, 320 | "zipp": { 321 | "hashes": [ 322 | "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", 323 | "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" 324 | ], 325 | "markers": "python_version >= '3.7'", 326 | "version": "==3.15.0" 327 | } 328 | } 329 | } 330 | --------------------------------------------------------------------------------