├── .github └── workflows │ ├── build.yaml │ ├── conda.yml │ └── publish.yaml ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── conda ├── conda_build_config.yaml └── meta.yaml ├── doltcli ├── __init__.py ├── dolt.py ├── types.py └── utils.py ├── poetry.lock ├── pyproject.toml ├── scripts └── run_tests.sh └── tests ├── conftest.py ├── helpers.py ├── test_dolt.py ├── test_read.py ├── test_types.py └── test_write.py /.github/workflows/build.yaml: -------------------------------------------------------------------------------- 1 | name: Doltcli Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | schedule: 9 | - cron: '0 20 * * *' # run at noon PT 10 | 11 | jobs: 12 | # Run test suite across all major OS types 13 | build: 14 | strategy: 15 | matrix: 16 | python-version: ['3.6', '3.9'] 17 | os: [ubuntu-latest] 18 | runs-on: ${{ matrix.os }} 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v1 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Install dependencies 27 | run: | 28 | sudo curl -L https://github.com/liquidata-inc/dolt/releases/latest/download/install.sh | sudo bash 29 | dolt config --global --add user.email bojack@horseman.com 30 | dolt config --global --add user.name "Bojack Horseman" 31 | dolt config --global --add metrics.host eventsapi.awsdev.ld-corp.com 32 | dolt config --global --add metrics.port 443 33 | - uses: Gr1N/setup-poetry@v4 34 | - uses: actions/cache@v1 35 | with: 36 | path: ~/.cache/pypoetry/virtualenvs 37 | key: ${{ runner.os }}-${{ matrix.python-version }}-poetry-${{ hashFiles('poetry.lock') }} 38 | restore-keys: | 39 | ${{ runner.os }}-${{ matrix.python-version }}-poetry- 40 | - name: Install poetry dependencies 41 | run: poetry install 42 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 43 | - name: Format 44 | run: | 45 | poetry run make fmt 46 | - name: Lint 47 | run: | 48 | poetry run make lint 49 | - name: Execute pytest 50 | run: | 51 | poetry run make test 52 | - uses: codecov/codecov-action@v1 53 | if: ${{ matrix.python-version }} == '3.9' 54 | with: 55 | token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos 56 | #files: ./coverage-${{ runner.os }}-${{ matrix.python-version }}.xml 57 | name: codecov-umbrella # optional 58 | fail_ci_if_error: true # optional (default = false) 59 | verbose: true # optional (default = false) 60 | -------------------------------------------------------------------------------- /.github/workflows/conda.yml: -------------------------------------------------------------------------------- 1 | name: publish_conda 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version: 7 | description: 'SemVer format tag, i.e. 0.23.4' 8 | required: true 9 | 10 | jobs: 11 | conda_publish: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Update Doltcli version 16 | run: | 17 | sed -i 's/version = \S*/version = "'"$VERSION"'"/' "$FILE" 18 | env: 19 | FILE: conda/meta.yaml 20 | VERSION: ${{ github.event.inputs.version }} 21 | - uses: EndBug/add-and-commit@v7 22 | with: 23 | message: ${{ format('[ga-bump-release] update Doltcli conda version to {0}', github.event.inputs.version) }} 24 | add: ${{ format('{0}/conda/meta.yaml', github.workspace) }} 25 | cwd: "." 26 | - name: publish-to-conda 27 | uses: maxibor/conda-package-publish-action@v1.1 28 | with: 29 | subDir: 'conda' 30 | AnacondaToken: ${{ secrets.CONDA_SECRET }} 31 | -------------------------------------------------------------------------------- /.github/workflows/publish.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version: 7 | description: 'SemVer format release tag, i.e. 0.23.4' 8 | required: true 9 | 10 | jobs: 11 | bump-version: 12 | name: Bump Version and Release 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: ["3.10"] 17 | permissions: write-all 18 | # IMPORTANT: this permission is mandatory for trusted publishing 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python ${{ matrix.python_version }} 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: ${{ matrix.python_version }} 26 | - uses: Gr1N/setup-poetry@v8 27 | - name: Install poetry dependencies 28 | run: poetry install 29 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 30 | - name: Update Doltcli version 31 | run: | 32 | sed -i 's/^version = \S*/version = "'"$VERSION"'"/' "$FILE" 33 | env: 34 | FILE: pyproject.toml 35 | VERSION: ${{ github.event.inputs.version }} 36 | - uses: EndBug/add-and-commit@v7 37 | with: 38 | message: ${{ format('[ga-bump-release] update Doltcli version to {0}', github.event.inputs.version) }} 39 | add: ${{ format('{0}/pyproject.toml', github.workspace) }} 40 | cwd: "." 41 | - name: Create Release 42 | id: create_release 43 | uses: actions/create-release@v1 44 | env: 45 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 46 | with: 47 | tag_name: v${{ github.event.inputs.version }} 48 | release_name: ${{ github.event.inputs.version }} 49 | draft: false 50 | prerelease: false 51 | - name: Build 52 | run: | 53 | poetry build 54 | - name: Publish package distributions to PyPI 55 | uses: pypa/gh-action-pypi-publish@release/v1 56 | env: 57 | PASSWORD: ${{ secrets.pypi_password }} 58 | USERNAME: Dolthub 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | zip/ 3 | .pycache/ 4 | *__pycache__* 5 | .idea/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | line_length = 95 2 | package = doltcli 3 | 4 | .PHONY: fmt 5 | fmt: ## Format code with black and isort 6 | black . --check -t py37 --line-length=${line_length} || ( black . -t py37 --line-length=${line_length} && false ) 7 | isort . 8 | 9 | .PHONY: lint 10 | lint: ## Run linters 11 | mypy ${package} 12 | flake8 ${package} \ 13 | --max-line-length=${line_length} \ 14 | --ignore=F401,E501 15 | 16 | .PHONY: lint 17 | test: ## Run tests 18 | pytest tests --cov=${package} --cov-report=term --cov-report xml 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DoltCLI 2 | 3 | This is a minimalist package intended for data engineering applications: 4 | 5 | - unzipped size ~100kb 6 | - one dependency -- Dolt binary 7 | - only changes when Dolt changes 8 | 9 | If you are a data scientist or are using Pandas there are three options: 10 | - Use [doltpy](https://github.com/dolthub/doltpy) 11 | - Use [pandas.sql](https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_sql.html) 12 | with [dolt 13 | sql-server](https://docs.dolthub.com/interfaces/cli#dolt-sql-server) 14 | - Manually convert the `doltcli` return types to DataFrames with 15 | `pd.Dataframe.from_records(...)` or another [DataFrame instantiate of 16 | choice](https://pandas.pydata.org/pandas-docs/version/0.18.1/generated/pandas.DataFrame.html). 17 | 18 | Note: `doltcli` is in development. The interface does not 19 | completely wrap Dolt CLI yet, and may have function signature changes in 20 | the short-term. Reach out to the team on our discord if you have 21 | questions regarding production use-cases. 22 | 23 | ## Dev Setup 24 | 25 | - clone repo 26 | - Python 3.6+ required 27 | - [Install Dolt binary](https://docs.dolthub.com/getting-started/installation). Currently recommended version is [1.16.0](https://github.com/dolthub/dolt/releases/tag/v1.16.0). 28 | - [Install Poetry](https://python-poetry.org/docs/#installation) 29 | ```bash 30 | curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python - 31 | ``` 32 | -Install dependencies: 33 | ```bash 34 | poetry install 35 | ``` 36 | 37 | Now you can run tests and use `doltcli`. 38 | -------------------------------------------------------------------------------- /conda/conda_build_config.yaml: -------------------------------------------------------------------------------- 1 | python: 2 | - 3.9 3 | - 3.8 4 | - 3.7 5 | - 3.6 6 | -------------------------------------------------------------------------------- /conda/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set version = "0.1.4" %} 2 | 3 | package: 4 | name: doltcli 5 | version: {{ version }} 6 | 7 | buiLd: 8 | script: 9 | - PIP_NO_INDEX="False" $PYTHON -m pip install doltcli=={{ version }} 10 | 11 | requirements: 12 | build: 13 | - python {{ python }} 14 | run: 15 | - python 16 | - dataclasses 17 | 18 | about: 19 | home: https://github.com/dolthub/doltcli 20 | -------------------------------------------------------------------------------- /doltcli/__init__.py: -------------------------------------------------------------------------------- 1 | from .dolt import ( 2 | Branch, 3 | Commit, 4 | Dolt, 5 | DoltException, 6 | DoltHubContext, 7 | KeyPair, 8 | Remote, 9 | Status, 10 | Table, 11 | _execute, 12 | ) 13 | from .types import BranchT, CommitT, DoltT, KeyPairT, RemoteT, StatusT, TableT 14 | from .utils import ( 15 | CREATE, 16 | FORCE_CREATE, 17 | REPLACE, 18 | UPDATE, 19 | columns_to_rows, 20 | detach_head, 21 | read_columns, 22 | read_columns_sql, 23 | read_rows, 24 | read_rows_sql, 25 | set_dolt_path, 26 | write_columns, 27 | write_file, 28 | write_rows, 29 | ) 30 | -------------------------------------------------------------------------------- /doltcli/dolt.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import datetime 3 | import json 4 | import logging 5 | import os 6 | import shutil 7 | import tempfile 8 | from collections import OrderedDict 9 | from subprocess import PIPE, Popen 10 | from typing import Any, Callable, Dict, List, Optional, Tuple, Union 11 | 12 | from .types import BranchT, CommitT, DoltT, KeyPairT, RemoteT, StatusT, TableT 13 | from .utils import ( 14 | read_columns, 15 | read_columns_sql, 16 | read_rows, 17 | read_rows_sql, 18 | to_list, 19 | write_columns, 20 | write_file, 21 | write_rows, 22 | ) 23 | 24 | global logger 25 | logger = logging.getLogger(__name__) 26 | 27 | 28 | SQL_OUTPUT_PARSERS = { 29 | "csv": lambda fh: list(csv.DictReader(fh)), 30 | "json": lambda fh: json.load(fh), 31 | } 32 | 33 | 34 | class DoltException(Exception): 35 | 36 | """ 37 | A class representing a Dolt exception. 38 | """ 39 | 40 | def __init__( 41 | self, 42 | exec_args, 43 | stdout: Optional[Union[str, bytes]] = None, 44 | stderr: Optional[Union[str, bytes]] = None, 45 | exitcode: Optional[int] = 1, 46 | ): 47 | super().__init__(exec_args, stdout, stderr, exitcode) 48 | self.exec_args = exec_args 49 | self.stdout = stdout 50 | self.stderr = stderr 51 | self.exitcode = exitcode 52 | 53 | 54 | class DoltServerNotRunningException(Exception): 55 | def __init__(self, message): 56 | self.message = message 57 | 58 | 59 | class DoltWrongServerException(Exception): 60 | def __init__(self, message): 61 | self.message = message 62 | 63 | 64 | class DoltDirectoryException(Exception): 65 | def __init__(self, message): 66 | self.message = message 67 | 68 | 69 | def _execute(args: List[str], cwd: Optional[str] = None, outfile: Optional[str] = None): 70 | from .utils import DOLT_PATH 71 | 72 | _args = [DOLT_PATH] + args 73 | str_args = " ".join(" ".join(args).split()) 74 | logger.info(str_args) 75 | if outfile: 76 | with open(outfile, "w", newline="") as f: 77 | proc = Popen(args=_args, cwd=cwd, stdout=f, stderr=PIPE) 78 | else: 79 | proc = Popen(args=_args, cwd=cwd, stdout=PIPE, stderr=PIPE) 80 | out, err = (val.decode("utf8") if val else "" for val in proc.communicate()) 81 | exitcode = proc.returncode 82 | 83 | if exitcode != 0: 84 | logger.error(err) 85 | raise DoltException(str_args, out, err, exitcode) 86 | 87 | if outfile: 88 | return outfile 89 | else: 90 | return out 91 | 92 | 93 | class Status(StatusT): 94 | """ 95 | Represents the current status of a Dolt repo, summarized by the is_clean field which is True if the wokring set is 96 | clean, and false otherwise. If the working set is not clean, then the changes are stored in maps, one for added 97 | tables, and one for modifications, each name maps to a flag indicating whether the change is staged. 98 | """ 99 | 100 | pass 101 | 102 | 103 | class Table(TableT): 104 | """ 105 | Represents a Dolt table in the working set. 106 | """ 107 | 108 | def __str__(self): 109 | return f"Table(name: {self.name}, table_hash: {self.table_hash}, rows: {self.rows}, system: {self.system})" 110 | 111 | 112 | class Commit(CommitT): 113 | """ 114 | Represents metadata about a commit, including a ref, timestamp, and author, to make it easier to sort and present 115 | to the user. 116 | """ 117 | 118 | def __str__(self): 119 | return f"{self.ref}: {self.author} @ {self.timestamp}, {self.message}" 120 | 121 | def is_merge(self): 122 | return isinstance(self.parents, tuple) 123 | 124 | def append_parent(self, parent: str): 125 | if isinstance(self.parents, tuple): 126 | raise ValueError("Already has a merge parent set") 127 | elif isinstance(self.parents, str): 128 | self.parents = [self.parents, parent] 129 | self.merge = True 130 | elif not self.parents: 131 | logger.warning("No merge parents set") 132 | return 133 | 134 | @classmethod 135 | def get_log_table_query( 136 | cls, 137 | number: Optional[int] = None, 138 | commit: Optional[str] = None, 139 | head: Optional[str] = None, 140 | ): 141 | base = """ 142 | select 143 | dc.`commit_hash` as commit_hash, 144 | dca.`parent_hash` as parent_hash, 145 | `committer` as committer, 146 | `email` as email, 147 | `date` as date, 148 | `message` as message 149 | from 150 | dolt_log as dc 151 | left outer join dolt_commit_ancestors as dca 152 | on dc.commit_hash = dca.commit_hash 153 | """ 154 | 155 | if commit is not None: 156 | base += f"\nWHERE dc.`commit_hash`='{commit}'" 157 | 158 | base += "\nORDER BY `date` DESC" 159 | 160 | if number is not None: 161 | base += f"\nLIMIT {number}" 162 | 163 | return base 164 | 165 | @classmethod 166 | def parse_dolt_log_table(cls, rows: List[dict]) -> Dict: 167 | commits: Dict[str, Commit] = OrderedDict() 168 | for row in rows: 169 | ref = row["commit_hash"] 170 | if ref in commits: 171 | commits[ref].append_parent(row["parent_hash"]) 172 | else: 173 | commit = Commit( 174 | ref=row["commit_hash"], 175 | timestamp=row["date"], 176 | author=row["committer"], 177 | email=row["email"], 178 | message=row["message"], 179 | parents=row["parent_hash"], 180 | merge=False, 181 | ) 182 | commits[ref] = commit 183 | 184 | return commits 185 | 186 | 187 | class KeyPair(KeyPairT): 188 | """ 189 | Represents a key pair generated by Dolt for authentication with remotes. 190 | """ 191 | 192 | def __init__(self, public_key: str, key_id: str, active: bool): 193 | self.public_key = public_key 194 | self.key_id = key_id 195 | self.active = active 196 | 197 | 198 | class Branch(BranchT): 199 | """ 200 | Represents a branch, along with the commit it points to. 201 | """ 202 | 203 | def __str__(self): 204 | return f"branch name: {self.name}, hash:{self.hash}" 205 | 206 | 207 | class Remote(RemoteT): 208 | """ 209 | Represents a remote, effectively a name and URL pair. 210 | """ 211 | 212 | pass 213 | 214 | 215 | class DoltHubContext: 216 | def __init__( 217 | self, 218 | db_path: str, 219 | path: Optional[str] = None, 220 | remote: str = "origin", 221 | tables_to_read: Optional[List[str]] = None, 222 | ): 223 | self.db_path = db_path 224 | self.path = ( 225 | os.path.join(tempfile.mkdtemp(), self._get_db_name(db_path)) if not path else path 226 | ) 227 | self.remote = remote 228 | self.dolt = None 229 | self.tables_to_read = tables_to_read 230 | 231 | @classmethod 232 | def _get_db_name(cls, db_path): 233 | split = db_path.split("/") 234 | if len(split) != 2: 235 | raise ValueError(f"Invalid DoltHub path {db_path}") 236 | return split[1] 237 | 238 | def __enter__(self): 239 | try: 240 | dolt = Dolt(self.path) 241 | logger.info( 242 | f'Dolt database found at path provided ({self.path}), pulling from remote "{self.remote}"' 243 | ) 244 | dolt.pull(self.remote) 245 | except ValueError: 246 | if self.db_path is None: 247 | raise ValueError("Cannot clone remote data without db_path set") 248 | if self.tables_to_read: 249 | logger.info(f"Running read-tables, creating a fresh copy of {self.db_path}") 250 | dolt = Dolt.read_tables(self.db_path, "master", tables=self.tables_to_read) 251 | else: 252 | logger.info(f"Running clone, cloning remote {self.db_path}") 253 | dolt = Dolt.clone(self.db_path, self.path) 254 | 255 | self.dolt = dolt 256 | return self 257 | 258 | def __exit__(self, type, value, traceback): 259 | pass 260 | 261 | 262 | class Dolt(DoltT): 263 | """ 264 | This class wraps the Dolt command line interface, mimicking functionality exactly to the extent that is possible. 265 | Some commands simply do not translate to Python, such as `dolt sql` (with no arguments) since that command 266 | launches an interactive shell. 267 | """ 268 | 269 | def __init__(self, repo_dir: str, print_output: Optional[bool] = None): 270 | # allow ~ to be used in paths 271 | repo_dir = os.path.expanduser(repo_dir) 272 | self.repo_dir = repo_dir 273 | self._print_output = print_output or False 274 | 275 | if not os.path.exists(os.path.join(self.repo_dir, ".dolt")): 276 | raise ValueError(f"{self.repo_dir} is not a valid Dolt repository") 277 | 278 | @property 279 | def repo_name(self): 280 | return os.path.basename(os.path.normpath(self.repo_dir)).replace("-", "_") 281 | 282 | @property 283 | def head(self): 284 | head_hash = "HASHOF('HEAD')" 285 | head_commit = self.sql(f"select {head_hash} as hash", result_format="csv")[0].get( 286 | "hash", None 287 | ) 288 | if not head_commit: 289 | raise ValueError("Head not found") 290 | return head_commit 291 | 292 | @property 293 | def working(self): 294 | working = self.sql( 295 | f"select @@{self.repo_name}_working as working", result_format="csv" 296 | )[0].get("working", None) 297 | if not working: 298 | raise ValueError("Working head not found") 299 | return working 300 | 301 | @property 302 | def active_branch(self): 303 | active_branch = self.sql("select active_branch() as a", result_format="csv")[0].get( 304 | "a", None 305 | ) 306 | if not active_branch: 307 | raise ValueError("Active branch not found") 308 | return active_branch 309 | 310 | def execute( 311 | self, 312 | args: List[str], 313 | print_output: Optional[bool] = None, 314 | stdout_to_file: str = None, 315 | error: bool = True, 316 | ) -> str: 317 | """ 318 | Manages executing a dolt command, pass all commands, sub-commands, and arguments as they would appear on the 319 | command line. 320 | :param args: 321 | :param print_output: 322 | :param stdout_to_file: 323 | :return: 324 | """ 325 | if print_output and stdout_to_file is not None: 326 | raise ValueError("Cannot print output and send it to a file") 327 | 328 | if not error: 329 | try: 330 | output = _execute(args, self.repo_dir, outfile=stdout_to_file) 331 | except DoltException as e: 332 | output = repr(e) 333 | else: 334 | output = _execute(args, self.repo_dir, outfile=stdout_to_file) 335 | 336 | print_output = print_output or self._print_output 337 | if print_output: 338 | logger.info(output) 339 | 340 | if stdout_to_file: 341 | return stdout_to_file 342 | else: 343 | return output 344 | 345 | @staticmethod 346 | def init(repo_dir: Optional[str] = None, error: bool = False) -> "Dolt": 347 | """ 348 | Creates a new repository in the directory specified, creating the directory if `create_dir` is passed, and returns 349 | a `Dolt` object representing the newly created repo. 350 | :return: 351 | """ 352 | if not repo_dir: 353 | repo_dir = os.getcwd() 354 | 355 | os.makedirs(repo_dir, exist_ok=True) 356 | logger.info(f"Initializing Dolt repo in {repo_dir}") 357 | 358 | try: 359 | _execute(["init"], cwd=repo_dir) 360 | except DoltException: 361 | if not error: 362 | return Dolt(repo_dir) 363 | return Dolt(repo_dir) 364 | 365 | @staticmethod 366 | def version(): 367 | return _execute(["version"], cwd=os.getcwd()).split(" ")[2].strip() 368 | 369 | def status(self, **kwargs) -> Status: 370 | """ 371 | Parses the status of this repository into a `Status` object. 372 | :return: 373 | """ 374 | new_tables: Dict[str, bool] = {} 375 | changes: Dict[str, bool] = {} 376 | 377 | output = self.execute(["status"], print_output=False, **kwargs).split("\n") 378 | 379 | if "clean" in str("\n".join(output)): 380 | return Status(True, changes, new_tables) 381 | else: 382 | staged = False 383 | for line in output: 384 | _line = line.lstrip() 385 | if _line.startswith("Changes to be committed"): 386 | staged = True 387 | elif _line.startswith("Changes not staged for commit"): 388 | staged = False 389 | elif _line.startswith("Untracked files"): 390 | staged = False 391 | elif _line.startswith("modified"): 392 | changes[_line.split(":")[1].lstrip()] = staged 393 | elif _line.startswith("new table"): 394 | new_tables[_line.split(":")[1].lstrip()] = staged 395 | else: 396 | pass 397 | 398 | return Status(False, changes, new_tables) 399 | 400 | def add(self, tables: Union[str, List[str]], **kwargs) -> Status: 401 | """ 402 | Adds the table or list of tables in the working tree to staging. 403 | :param tables: 404 | :return: 405 | """ 406 | self.execute(["add"] + to_list(tables), **kwargs) 407 | return self.status() 408 | 409 | def reset( 410 | self, 411 | tables: Union[str, List[str]] = [], 412 | revision: str = "", 413 | hard: bool = False, 414 | soft: bool = False, 415 | **kwargs, 416 | ): 417 | """ 418 | Reset a table or set of tables that have changes in the working set to their value at the tip of the current 419 | branch. 420 | :param tables: 421 | :param hard: 422 | :param soft: 423 | :return: 424 | """ 425 | if not isinstance(tables, (str, list)): 426 | raise ValueError(f"tables should be: Union[str, List[str]]; found {type(tables)}") 427 | 428 | to_reset = to_list(tables) 429 | 430 | args = ["reset"] 431 | 432 | if hard and soft: 433 | raise ValueError("Specify one of: hard=True, soft=True") 434 | 435 | if (hard or soft) and to_reset: 436 | raise ValueError("Specify either hard/soft flag, or tables to reset") 437 | 438 | if to_reset and revision != "": 439 | raise ValueError("Specify either revision or tables to reset") 440 | 441 | if revision != "": 442 | args.append(revision) 443 | 444 | if hard: 445 | args.append("--hard") 446 | elif soft: 447 | args.append("--soft") 448 | elif not tables: 449 | args.append("--soft") 450 | else: 451 | args += to_reset 452 | 453 | self.execute(args, **kwargs) 454 | 455 | def commit( 456 | self, 457 | message: Optional[str] = None, 458 | allow_empty: bool = False, 459 | date: datetime.datetime = None, 460 | **kwargs, 461 | ): 462 | """ 463 | Create a commit with the currents in the working set that are currently in staging. 464 | :param message: 465 | :param allow_empty: 466 | :param date: 467 | :return: 468 | """ 469 | if message is None: 470 | message = "" 471 | 472 | args = ["commit", "-m", message] 473 | 474 | if allow_empty: 475 | args.append("--allow-empty") 476 | 477 | if date: 478 | # TODO format properly 479 | args.extend(["--date", str(date)]) 480 | 481 | self.execute(args, **kwargs) 482 | 483 | def merge( 484 | self, branch: str, message: Optional[str] = None, squash: bool = False, **kwargs 485 | ): 486 | """ 487 | Executes a merge operation. If conflicts result, the merge is aborted, as an interactive merge does not really 488 | make sense in a scripting environment, or at least we have not figured out how to model it in a way that does. 489 | :param branch: name of the branch to merge into the current branch 490 | :param message: message to be used for the merge commit only in the case of an automatic 491 | merge. In case of automatic merge without a message provided, the commit message will be 492 | "Merge branch '' into ''" 493 | :param squash: squash the commits from the merged branch into a single commit 494 | :return: 495 | """ 496 | current_branch, branches = self._get_branches() 497 | if not self.status().is_clean: 498 | err = f"Changes in the working set, please commit before merging {branch} to {current_branch.name}" 499 | raise ValueError(err) 500 | if branch not in [branch.name for branch in branches]: 501 | raise ValueError( 502 | f"Trying to merge in non-existent branch {branch} to {current_branch.name}" 503 | ) 504 | 505 | logger.info(f"Merging {branch} into {current_branch.name}") 506 | args = ["merge"] 507 | 508 | if squash: 509 | args.append("--squash") 510 | if message: 511 | args.extend(["--message", message]) 512 | args.append(branch) 513 | output = self.execute(args, **kwargs).split("\n") 514 | 515 | # TODO: this was and remains a hack, we need to parse the output properly 516 | if len(output) > 1 and "Fast-forward" in output[0]: 517 | logger.info(f"Completed fast-forward merge of {branch} into {current_branch.name}") 518 | return 519 | 520 | # TODO: this was and remains a hack, we need to parse the output properly 521 | merge_conflict_pos = 8 522 | if len(output) > 1 and output[merge_conflict_pos].startswith("CONFLICT"): 523 | logger.warning( 524 | f""" 525 | The following merge conflict occurred merging {branch} to {current_branch.name}: 526 | {output[merge_conflict_pos]} 527 | """ 528 | ) 529 | logger.warning("Aborting as interactive merge not supported in Doltpy") 530 | abort_args = ["merge", "--abort"] 531 | self.execute(abort_args) 532 | return 533 | 534 | if message is None: 535 | message = f"Merged {current_branch.name} into {branch}" 536 | logger.info(message) 537 | 538 | def sql( 539 | self, 540 | query: Optional[str] = None, 541 | result_format: Optional[str] = None, 542 | execute: bool = False, 543 | save: Optional[str] = None, 544 | message: Optional[str] = None, 545 | list_saved: bool = False, 546 | batch: bool = False, 547 | multi_db_dir: Optional[str] = None, 548 | result_file: Optional[str] = None, 549 | result_parser: Optional[Callable[[str], Any]] = None, 550 | **kwargs, 551 | ): 552 | """ 553 | Execute a SQL query, using the options to dictate how it is executed, and where the output goes. 554 | :param query: query to be executed 555 | :param result_format: the file format of the 556 | :param execute: execute a saved query, not valid with other parameters 557 | :param save: use the name provided to save the value of query 558 | :param message: the message associated with the saved query, if any 559 | :param list_saved: print out a list of saved queries 560 | :param batch: execute in batch mode, one statement after the other delimited by ; 561 | :param multi_db_dir: use a directory of Dolt repos, each one treated as a database 562 | :param result_parser: 563 | :return: 564 | """ 565 | args = ["sql"] 566 | 567 | if list_saved: 568 | if any([query, result_format, save, message, batch, multi_db_dir]): 569 | raise ValueError("Incompatible arguments provided") 570 | args.append("--list-saved") 571 | self.execute(args, **kwargs) 572 | 573 | if execute: 574 | if any([query, save, message, list_saved, batch, multi_db_dir]): 575 | raise ValueError("Incompatible arguments provided") 576 | args.extend(["--execute", str(execute)]) 577 | 578 | if multi_db_dir: 579 | args.extend(["--multi-db-dir", multi_db_dir]) 580 | 581 | if batch: 582 | args.append("--batch") 583 | 584 | if save: 585 | args.extend(["--save", save]) 586 | if message: 587 | args.extend(["--message", message]) 588 | 589 | # do something with result format 590 | if result_parser is not None: 591 | if query is None: 592 | raise ValueError("Must provide a query in order to specify a result format") 593 | args.extend(["--query", query]) 594 | 595 | try: 596 | d = tempfile.mkdtemp() 597 | args.extend(["--result-format", "csv"]) 598 | f = os.path.join(d, "tmpfile") 599 | output_file = self.execute(args, stdout_to_file=f, **kwargs) 600 | if not hasattr(result_parser, "__call__"): 601 | raise ValueError( 602 | f"Invalid argument: `result_parser` should be Callable; found {type(result_parser)}" 603 | ) 604 | return result_parser(output_file) 605 | finally: 606 | shutil.rmtree(d, ignore_errors=True, onerror=None) 607 | elif result_file is not None: 608 | if query is None: 609 | raise ValueError("Must provide a query in order to specify a result format") 610 | args.extend(["--query", query]) 611 | 612 | args.extend(["--result-format", "csv"]) 613 | output_file = self.execute(args, stdout_to_file=result_file, **kwargs) 614 | return output_file 615 | elif result_format in ["csv", "json"]: 616 | if query is None: 617 | raise ValueError("Must provide a query in order to specify a result format") 618 | args.extend(["--query", query]) 619 | 620 | try: 621 | d = tempfile.mkdtemp() 622 | f = os.path.join(d, "tmpfile") 623 | args.extend(["--result-format", result_format]) 624 | output_file = self.execute(args, stdout_to_file=f, **kwargs) 625 | with open(output_file, newline="") as fh: 626 | return SQL_OUTPUT_PARSERS[result_format](fh) 627 | finally: 628 | shutil.rmtree(d, ignore_errors=True, onerror=None) 629 | 630 | logger.warning("Must provide a value for result_format to get output back") 631 | if query is not None: 632 | args.extend(["--query", query]) 633 | 634 | self.execute(args, **kwargs) 635 | 636 | def log(self, number: Optional[int] = None, commit: Optional[str] = None) -> Dict: 637 | """ 638 | Parses the log created by running the log command into instances of `Commit` that provide detail of the 639 | commit, including timestamp and hash. 640 | :param number: 641 | :param commit: 642 | :return: 643 | """ 644 | res = read_rows_sql( 645 | self, 646 | sql=Commit.get_log_table_query(number=number, commit=commit, head=self.head), 647 | ) 648 | commits = Commit.parse_dolt_log_table(res) 649 | return commits 650 | 651 | def diff( 652 | self, 653 | commit: Optional[str] = None, 654 | other_commit: Optional[str] = None, 655 | tables: Optional[Union[str, List[str]]] = None, 656 | data: bool = False, 657 | schema: bool = False, # can we even support this? 658 | summary: bool = False, 659 | sql: bool = False, 660 | where: Optional[str] = None, 661 | limit: Optional[int] = None, 662 | **kwargs, 663 | ): 664 | """ 665 | Executes a diff command and prints the output. In the future we plan to create a diff object that will allow 666 | for programmatic interactions. 667 | :param commit: commit to diff against the tip of the current branch 668 | :param other_commit: optionally specify two specific commits if desired 669 | :param tables: table or list of tables to diff 670 | :param data: diff only data 671 | :param schema: diff only schema 672 | :param summary: summarize the data changes shown, valid only with data 673 | :param sql: show the diff in terms of SQL 674 | :param where: apply a where clause to data diffs 675 | :param limit: limit the number of rows shown in a data diff 676 | :return: 677 | """ 678 | switch_count = [el for el in [data, schema, summary] if el] 679 | if len(switch_count) > 1: 680 | raise ValueError("At most one of delete, copy, move can be set to True") 681 | 682 | args = ["diff"] 683 | 684 | if data: 685 | if where: 686 | args.extend(["--where", where]) 687 | if limit: 688 | args.extend(["--limit", str(limit)]) 689 | 690 | if summary: 691 | args.append("--summary") 692 | 693 | if schema: 694 | args.extend("--schema") 695 | 696 | if sql: 697 | args.append("--sql") 698 | 699 | if commit: 700 | args.append(commit) 701 | if other_commit: 702 | args.append(other_commit) 703 | 704 | if tables: 705 | args.append(" ".join(to_list(tables))) 706 | 707 | self.execute(args, **kwargs) 708 | 709 | def blame(self, table_name: str, rev: Optional[str] = None, **kwargs): 710 | """ 711 | Executes a blame command that prints out a table that shows the authorship of the last change to a row. 712 | :param table_name: 713 | :param rev: 714 | :return: 715 | """ 716 | args = ["blame"] 717 | 718 | if rev: 719 | args.append(rev) 720 | 721 | args.append(table_name) 722 | self.execute(args, **kwargs) 723 | 724 | def branch( 725 | self, 726 | branch_name: Optional[str] = None, 727 | start_point: Optional[str] = None, 728 | new_branch: Optional[str] = None, 729 | force: bool = False, 730 | delete: bool = False, 731 | copy: bool = False, 732 | move: bool = False, 733 | remote: bool = False, 734 | all: bool = False, 735 | **kwargs, 736 | ): 737 | """ 738 | List, create, or delete branches. 739 | 740 | If 'branch_name' is None, existing branches are listed, including remotely tracked branches 741 | if 'remote' or 'all' are set. If 'branch_name' is provided, a new branch is created, checked 742 | our, deleted, moved or copied. 743 | 744 | :param branch_name: Name of branch to Checkout, create, delete, move, or copy. 745 | :param start_point: A commit that a new branch should point at. 746 | :param new_branch: Name of branch to copy to or rename to if 'copy' or 'move' is set. 747 | :param force: Reset 'branch_name' to 'start_point', even if 'branch_name' exists already. 748 | Without 'force', dolt branch refuses to change an existing branch. In combination with 749 | 'delete', allow deleting the branch irrespective of its merged status. In 750 | combination with 'move', allow renaming the branch even if the new branch name 751 | already exists, the same applies for 'copy'. 752 | :param delete: Delete a branch. The branch must be fully merged in its upstream branch. 753 | :param copy: Create a copy of a branch. 754 | :param move: Move/rename a branch. If 'new_branch' does not exist, 'branch_name' will be 755 | renamed to 'new_branch'. If 'new_branch' exists, 'force' must be used to force the 756 | rename to happen. 757 | :param remote: When in list mode, show only remote tracked branches, unless 'all' is true. 758 | When with -d, delete a remote tracking branch. 759 | :param all: When in list mode, shows both local and remote tracked branches 760 | 761 | :return: active_branch, branches 762 | """ 763 | switch_count = [el for el in [delete, copy, move] if el] 764 | if len(switch_count) > 1: 765 | raise ValueError("At most one of delete, copy, move can be set to True") 766 | 767 | if not any([branch_name, delete, copy, move]): 768 | if force: 769 | raise ValueError( 770 | "force is not valid without providing a new branch name, or copy, move, or delete being true" 771 | ) 772 | return self._get_branches(remote=remote, all=all) 773 | 774 | args = ["branch"] 775 | if force: 776 | args.append("--force") 777 | 778 | def execute_wrapper(command_args: List[str]): 779 | self.execute(command_args, **kwargs) 780 | return self._get_branches() 781 | 782 | if branch_name and not (delete or copy or move): 783 | args.append(branch_name) 784 | if start_point: 785 | args.append(start_point) 786 | return execute_wrapper(args) 787 | 788 | if copy: 789 | if not new_branch: 790 | raise ValueError("must provide new_branch when copying a branch") 791 | args.append("--copy") 792 | if branch_name: 793 | args.append(branch_name) 794 | args.append(new_branch) 795 | return execute_wrapper(args) 796 | 797 | if delete: 798 | if not branch_name: 799 | raise ValueError("must provide branch_name when deleting") 800 | args.extend(["--delete", branch_name]) 801 | if remote: 802 | args.append("--remote") 803 | return execute_wrapper(args) 804 | 805 | if move: 806 | if not new_branch: 807 | raise ValueError("must provide new_branch when moving a branch") 808 | args.append("--move") 809 | if branch_name: 810 | args.append(branch_name) 811 | args.append(new_branch) 812 | return execute_wrapper(args) 813 | 814 | if branch_name: 815 | args.append(branch_name) 816 | if start_point: 817 | args.append(start_point) 818 | return execute_wrapper(args) 819 | 820 | return self._get_branches(remote=remote, all=all) 821 | 822 | def _get_branches(self, remote: bool = False, all: bool = False) -> Tuple[Branch, List[Branch]]: 823 | """ 824 | Gets the branches for this repository, optionally including remote branches, and optionally 825 | including all. 826 | 827 | :param remote: include remotely tracked branches. If all is false and remote is true, only 828 | remotely track branches are returned. If all is true both local and remote are included. 829 | Default is False 830 | :param all: include both local and remotely tracked branches. Default is False 831 | :return: active_branch, branches 832 | """ 833 | local_dicts = read_rows_sql(self, sql="select * from dolt_branches") 834 | dicts = [] 835 | if all: 836 | dicts = local_dicts + read_rows_sql(self, sql="select * from dolt_remote_branches") 837 | elif remote: 838 | dicts = read_rows_sql(self, sql="select * from dolt_remote_branches") 839 | else: 840 | dicts = local_dicts 841 | 842 | # find active_branch 843 | ab_dicts = read_rows_sql( 844 | self, "select * from dolt_branches where name = (select active_branch())" 845 | ) 846 | if len(ab_dicts) != 1: 847 | raise ValueError( 848 | "Ensure you have the latest version of Dolt installed, this is fixed as of 0.24.2" 849 | ) 850 | active_branch = Branch(**ab_dicts[0]) 851 | if not active_branch: 852 | raise DoltException("Failed to set active branch") 853 | 854 | branches = [Branch(**d) for d in dicts] 855 | 856 | return active_branch, branches 857 | 858 | def checkout( 859 | self, 860 | branch: Optional[str] = None, 861 | tables: Optional[Union[str, List[str]]] = None, 862 | checkout_branch: bool = False, 863 | start_point: Optional[str] = None, 864 | track: Optional[str] = None, 865 | **kwargs, 866 | ): 867 | """ 868 | Checkout an existing branch, or create a new one, optionally at a specified commit. Or, checkout a table or list 869 | of tables. 870 | :param branch: branch to checkout or create 871 | :param tables: table or tables to checkout 872 | :param checkout_branch: branch to checkout 873 | :param start_point: tip of new branch 874 | :param track: the upstream branch to track 875 | :return: 876 | """ 877 | if tables and branch: 878 | raise ValueError("No tables may be provided when creating a branch with checkout") 879 | args = ["checkout"] 880 | 881 | if branch: 882 | if checkout_branch: 883 | args.append("-b") 884 | args.append(branch) 885 | if start_point: 886 | args.append(start_point) 887 | 888 | if tables: 889 | args.append(" ".join(to_list(tables))) 890 | 891 | if track is not None: 892 | args.append("--track") 893 | args.append(track) 894 | 895 | self.execute(args, **kwargs) 896 | 897 | def remote( 898 | self, 899 | add: bool = False, 900 | name: Optional[str] = None, 901 | url: Optional[str] = None, 902 | remove: bool = None, 903 | **kwargs, 904 | ): 905 | """ 906 | Add or remove remotes to this repository. Note we do not currently support some more esoteric options for using 907 | AWS and GCP backends, but will do so in a future release. 908 | :param add: 909 | :param name: 910 | :param url: 911 | :param remove: 912 | :return: 913 | """ 914 | args = ["remote", "--verbose"] 915 | 916 | if not (add or remove): 917 | output = self.execute(args, print_output=False, **kwargs).split("\n") 918 | 919 | remotes = [] 920 | for line in output: 921 | if not line: 922 | break 923 | 924 | split = line.lstrip().split() 925 | remotes.append(Remote(split[0], split[1])) 926 | 927 | return remotes 928 | 929 | if remove: 930 | if add: 931 | raise ValueError("add and remove are not comptaibe ") 932 | if not name: 933 | raise ValueError("Must provide the name of a remote to move") 934 | args.extend(["remove", name]) 935 | 936 | if add: 937 | if not (name and url): 938 | raise ValueError("Must provide name and url to add") 939 | args.extend(["add", name, url]) 940 | 941 | self.execute(args, **kwargs) 942 | 943 | def push( 944 | self, 945 | remote: str, 946 | refspec: Optional[str] = None, 947 | set_upstream: bool = False, 948 | force: bool = False, 949 | **kwargs, 950 | ): 951 | """ 952 | Push the branch to the specified remote. If set_upstream is provided will create an upstream reference of all branches 953 | in a repo. 954 | :param remote: 955 | :param refspec: optionally specify a branch to push 956 | :param set_upstream: add upstream reference for every branch successfully pushed 957 | :param force: overwrite the history of the upstream with this repo's history 958 | :return: 959 | """ 960 | args = ["push"] 961 | 962 | if set_upstream: 963 | args.append("--set-upstream") 964 | 965 | if force: 966 | args.append("--force") 967 | 968 | args.append(remote) 969 | if refspec: 970 | args.append(refspec) 971 | 972 | # just print the output 973 | self.execute(args, **kwargs) 974 | 975 | def pull(self, remote: str = "origin", branch: Optional[str] = None, **kwargs): 976 | """ 977 | Pull the latest changes from the specified remote. 978 | :param remote: The remote to pull the changes from 979 | :param branch: The branch on the remote to pull the changes from 980 | :return: 981 | """ 982 | args = ["pull", remote] 983 | if branch is not None: 984 | args.append(branch) 985 | 986 | self.execute(args, **kwargs) 987 | 988 | def fetch( 989 | self, 990 | remote: str = "origin", 991 | refspecs: Union[str, List[str]] = None, 992 | force: bool = False, 993 | **kwargs, 994 | ): 995 | """ 996 | Fetch the specified branch or list of branches from the remote provided, defaults to origin. 997 | :param remote: the reomte to fetch from 998 | :param refspecs: branch or branches to fetch 999 | :param force: whether to override local history with remote 1000 | :return: 1001 | """ 1002 | args = ["fetch"] 1003 | 1004 | if force: 1005 | args.append("--force") 1006 | if remote: 1007 | args.append(remote) 1008 | if refspecs: 1009 | args.extend(to_list(refspecs)) 1010 | 1011 | self.execute(args, **kwargs) 1012 | 1013 | @staticmethod 1014 | def clone( 1015 | remote_url: str, 1016 | new_dir: Optional[str] = None, 1017 | remote: Optional[str] = None, 1018 | branch: Optional[str] = None, 1019 | **kwargs, 1020 | ) -> "Dolt": 1021 | """ 1022 | Clones the specified DoltHub database into a new directory, or optionally an existing directory provided by the 1023 | user. 1024 | :param remote_url: 1025 | :param new_dir: 1026 | :param remote: 1027 | :param branch: 1028 | :return: 1029 | """ 1030 | args = ["clone", remote_url] 1031 | 1032 | if remote: 1033 | args.extend(["--remote", remote]) 1034 | 1035 | if branch: 1036 | args.extend(["--branch", branch]) 1037 | 1038 | clone_dir = Dolt._get_clone_dir(new_dir, None if new_dir else remote_url) 1039 | if not clone_dir: 1040 | raise ValueError("Unable to infer new_dir") 1041 | 1042 | args.append(clone_dir) 1043 | 1044 | _execute(args, **kwargs) 1045 | 1046 | return Dolt(clone_dir) 1047 | 1048 | @classmethod 1049 | def _get_clone_dir( 1050 | cls, new_dir: Optional[str] = None, remote_url: Optional[str] = None 1051 | ) -> str: 1052 | """ 1053 | Takes either a new_dir to clone the 1054 | """ 1055 | if not (new_dir or remote_url): 1056 | raise ValueError("Provide either new_dir or remote_url") 1057 | elif remote_url: 1058 | split = remote_url.split("/") 1059 | inferred_dir = os.path.join(os.getcwd() if not new_dir else new_dir, split[-1]) 1060 | if os.path.exists(inferred_dir): 1061 | raise DoltDirectoryException( 1062 | f"Path already exists: {inferred_dir}. Cannot create new directory" 1063 | ) 1064 | return inferred_dir 1065 | elif new_dir: 1066 | return new_dir 1067 | else: 1068 | raise 1069 | 1070 | @staticmethod 1071 | def read_tables( 1072 | remote_url: str, 1073 | committish: str, 1074 | tables: Optional[Union[str, List[str]]] = None, 1075 | new_dir: Optional[str] = None, 1076 | ) -> "Dolt": 1077 | """ 1078 | Reads the specified tables, or all the tables, from the DoltHub database specified into a new local database, 1079 | at the commit or branch provided. Users can optionally provide an existing directory. 1080 | :param remote_url: 1081 | :param committish: 1082 | :param tables: 1083 | :param new_dir: 1084 | :return: 1085 | """ 1086 | args = ["read-tables"] 1087 | 1088 | clone_dir = Dolt._get_clone_dir(new_dir, None if new_dir else remote_url) 1089 | if not clone_dir: 1090 | raise ValueError("Unable to infer new_dir") 1091 | 1092 | args.extend(["--dir", clone_dir, remote_url, committish]) 1093 | 1094 | if tables: 1095 | args.extend(to_list(tables)) 1096 | 1097 | _execute(args, cwd=new_dir) 1098 | 1099 | return Dolt(clone_dir) 1100 | 1101 | def creds_new(self) -> bool: 1102 | """ 1103 | Create a new set of credentials for this Dolt repository. 1104 | :return: 1105 | """ 1106 | args = ["creds", "new"] 1107 | 1108 | output = self.execute(args, print_output=False) 1109 | 1110 | if len(output) == 2: 1111 | for out in output: 1112 | logger.info(out) 1113 | else: 1114 | output_str = "\n".join(output) 1115 | raise ValueError(f"Unexpected output: \n{output_str}") 1116 | 1117 | return True 1118 | 1119 | def creds_rm(self, public_key: str) -> bool: 1120 | """ 1121 | Remove the key pair identified by the specified public key ID. 1122 | :param public_key: 1123 | :return: 1124 | """ 1125 | args = ["creds", "rm", public_key] 1126 | 1127 | output = self.execute(args, print_output=False) 1128 | 1129 | if output[0].startswith("failed"): 1130 | logger.error(output[0]) 1131 | raise DoltException("Tried to remove non-existent creds") 1132 | 1133 | return True 1134 | 1135 | def creds_ls(self) -> List[KeyPair]: 1136 | """ 1137 | Parse the set of keys this repo has into `KeyPair` objects. 1138 | :return: 1139 | """ 1140 | args = ["creds", "ls", "--verbose"] 1141 | 1142 | output = self.execute(args, print_output=False) 1143 | 1144 | creds = [] 1145 | for line in output: 1146 | if line.startswith("*"): 1147 | active = True 1148 | split = line[1:].lstrip().split(" ") 1149 | else: 1150 | active = False 1151 | split = line.lstrip().split(" ") 1152 | 1153 | creds.append(KeyPair(split[0], split[1], active)) 1154 | 1155 | return creds 1156 | 1157 | def creds_check(self, endpoint: Optional[str] = None, creds: Optional[str] = None) -> bool: 1158 | """ 1159 | Check that credentials authenticate with the specified endpoint, return True if authorized, False otherwise. 1160 | :param endpoint: the endpoint to check 1161 | :param creds: creds identified by public key ID 1162 | :return: 1163 | """ 1164 | args = ["dolt", "creds", "check"] 1165 | 1166 | if endpoint: 1167 | args.extend(["--endpoint", endpoint]) 1168 | if creds: 1169 | args.extend(["--creds", creds]) 1170 | 1171 | output = _execute(args, self.repo_dir) 1172 | 1173 | if output[3].startswith("error"): 1174 | logger.error("\n".join(output[3:])) 1175 | return False 1176 | 1177 | return True 1178 | 1179 | def creds_use(self, public_key_id: str) -> bool: 1180 | """ 1181 | Use the credentials specified by the provided public keys ID. 1182 | :param public_key_id: 1183 | :return: 1184 | """ 1185 | args = ["creds", "use", public_key_id] 1186 | 1187 | output = _execute(args, self.repo_dir) 1188 | 1189 | if output and output[0].startswith("error"): 1190 | logger.error("\n".join(output[3:])) 1191 | raise DoltException("Bad public key") 1192 | 1193 | return True 1194 | 1195 | def creds_import(self, jwk_filename: str, no_profile: str): 1196 | """ 1197 | Not currently supported. 1198 | :param jwk_filename: 1199 | :param no_profile: 1200 | :return: 1201 | """ 1202 | raise NotImplementedError() 1203 | 1204 | @classmethod 1205 | def config_global( 1206 | cls, 1207 | name: Optional[str] = None, 1208 | value: Optional[str] = None, 1209 | add: bool = False, 1210 | list: bool = False, 1211 | get: bool = False, 1212 | unset: bool = False, 1213 | ) -> Dict[str, str]: 1214 | """ 1215 | Class method for manipulating global configs. 1216 | :param name: 1217 | :param value: 1218 | :param add: 1219 | :param list: 1220 | :param get: 1221 | :param unset: 1222 | :return: 1223 | """ 1224 | return cls._config_helper( 1225 | global_config=True, 1226 | cwd=os.getcwd(), 1227 | name=name, 1228 | value=value, 1229 | add=add, 1230 | list=list, 1231 | get=get, 1232 | unset=unset, 1233 | ) 1234 | 1235 | def config_local( 1236 | self, 1237 | name: Optional[str] = None, 1238 | value: Optional[str] = None, 1239 | add: bool = False, 1240 | list: bool = False, 1241 | get: bool = False, 1242 | unset: bool = False, 1243 | ) -> Dict[str, str]: 1244 | """ 1245 | Instance method for manipulating configs local to a repository. 1246 | :param name: 1247 | :param value: 1248 | :param add: 1249 | :param list: 1250 | :param get: 1251 | :param unset: 1252 | :return: 1253 | """ 1254 | return self._config_helper( 1255 | local_config=True, 1256 | cwd=self.repo_dir, 1257 | name=name, 1258 | value=value, 1259 | add=add, 1260 | list=list, 1261 | get=get, 1262 | unset=unset, 1263 | ) 1264 | 1265 | @classmethod 1266 | def _config_helper( 1267 | cls, 1268 | global_config: bool = False, 1269 | local_config: bool = False, 1270 | cwd: Optional[str] = None, 1271 | name: Optional[str] = None, 1272 | value: Optional[str] = None, 1273 | add: bool = False, 1274 | list: bool = False, 1275 | get: bool = False, 1276 | unset: bool = False, 1277 | ) -> Dict[str, str]: 1278 | switch_count = [el for el in [add, list, get, unset] if el] 1279 | if len(switch_count) != 1: 1280 | raise ValueError("Exactly one of add, list, get, unset must be True") 1281 | 1282 | args = ["config"] 1283 | 1284 | if global_config: 1285 | args.append("--global") 1286 | elif local_config: 1287 | args.append("--local") 1288 | else: 1289 | raise ValueError("Must pass either global_config") 1290 | 1291 | if add: 1292 | if not (name and value): 1293 | raise ValueError("For add, name and value must be set") 1294 | args.extend(["--add", name, value]) 1295 | if list: 1296 | if name or value: 1297 | raise ValueError("For list, no name and value provided") 1298 | args.append("--list") 1299 | if get: 1300 | if not name or value: 1301 | raise ValueError("For get, only name is provided") 1302 | args.extend(["--get", name]) 1303 | if unset: 1304 | if not name or value: 1305 | raise ValueError("For get, only name is provided") 1306 | args.extend(["--unset", name]) 1307 | 1308 | output = _execute(args, cwd).split("\n") 1309 | result = {} 1310 | for line in [x for x in output if x is not None and "=" in x]: 1311 | split = line.split(" = ") 1312 | config_name, config_val = split[0], split[1] 1313 | result[config_name] = config_val 1314 | 1315 | return result 1316 | 1317 | def ls(self, system: bool = False, all: bool = False, **kwargs) -> List[TableT]: 1318 | """ 1319 | List the tables in the working set, the system tables, or all. Parses the tables and their object hash into an 1320 | object that also provides row count. 1321 | :param system: 1322 | :param all: 1323 | :return: 1324 | """ 1325 | args = ["ls", "--verbose"] 1326 | 1327 | if all: 1328 | args.append("--all") 1329 | 1330 | if system: 1331 | args.append("--system") 1332 | 1333 | output = self.execute(args, print_output=False, **kwargs).split("\n") 1334 | tables: List[TableT] = [] 1335 | system_pos = None 1336 | 1337 | if len(output) == 3 and output[0] == "No tables in working set": 1338 | return tables 1339 | 1340 | for i, line in enumerate(output): 1341 | if line.startswith("Tables") or not line: 1342 | pass 1343 | elif line.startswith("System"): 1344 | system_pos = i 1345 | break 1346 | else: 1347 | if not line: 1348 | pass 1349 | split = line.lstrip().split() 1350 | tables.append(Table(name=split[0], row_cnt=int(split[len(split)-2]))) 1351 | 1352 | if system_pos: 1353 | for line in output[system_pos:]: 1354 | if line.startswith("System"): 1355 | pass 1356 | else: 1357 | tables.append(Table(name=line.strip(), system=True)) 1358 | 1359 | return tables 1360 | 1361 | def schema_export(self, table: str, filename: Optional[str] = None): 1362 | """ 1363 | Export the scehma of the table specified to the file path specified. 1364 | :param table: 1365 | :param filename: 1366 | :return: 1367 | """ 1368 | args = ["schema", "export", table] 1369 | 1370 | if filename: 1371 | args.extend([filename]) 1372 | _execute(args, self.repo_dir) 1373 | return True 1374 | else: 1375 | output = _execute(args, self.repo_dir) 1376 | logger.info("\n".join(output)) 1377 | return True 1378 | 1379 | def schema_import( 1380 | self, 1381 | table: str, 1382 | filename: str, 1383 | create: bool = False, 1384 | update: bool = False, 1385 | replace: bool = False, 1386 | dry_run: bool = False, 1387 | keep_types: bool = False, 1388 | file_type: Optional[str] = None, 1389 | pks: List[str] = None, 1390 | map: Optional[str] = None, 1391 | float_threshold: float = None, 1392 | delim: Optional[str] = None, 1393 | ): 1394 | """ 1395 | This implements schema import from Dolt, it works by inferring a schema from the file provided. It operates in 1396 | three modes: create, update, and replace. All require a table name. Create and replace require a primary key, as 1397 | they replace an existing table with a new one with a newly inferred schema. 1398 | 1399 | :param table: name of the table to create or update 1400 | :param filename: file to infer schema from 1401 | :param create: create a table 1402 | :param update: update a table 1403 | :param replace: replace a table 1404 | :param dry_run: output the SQL to run, do not execute it 1405 | :param keep_types: when a column already exists, use its current type 1406 | :param file_type: type of file used for schema inference 1407 | :param pks: the list of primary keys 1408 | :param map: mapping file mapping column name to new value 1409 | :param float_threshold: minimum value fractional component must have to be float 1410 | :param delim: the delimeter used in the file being inferred from 1411 | :return: 1412 | """ 1413 | switch_count = [el for el in [create, update, replace] if el] 1414 | if len(switch_count) != 1: 1415 | raise ValueError("Exactly one of create, update, replace must be True") 1416 | 1417 | args = ["schema", "import"] 1418 | 1419 | if create: 1420 | args.append("--create") 1421 | if not pks: 1422 | raise ValueError("When create is set to True, pks must be provided") 1423 | if update: 1424 | args.append("--update") 1425 | if replace: 1426 | args.append("--replace") 1427 | if not pks: 1428 | raise ValueError("When replace is set to True, pks must be provided") 1429 | if dry_run: 1430 | args.append("--dry-run") 1431 | if keep_types: 1432 | args.append("--keep-types") 1433 | if file_type: 1434 | args.extend(["--file_type", file_type]) 1435 | if pks: 1436 | args.extend(["--pks", ",".join(pks)]) 1437 | if map: 1438 | args.extend(["--map", map]) 1439 | if float_threshold: 1440 | args.extend(["--float-threshold", str(float_threshold)]) 1441 | if delim: 1442 | args.extend(["--delim", delim]) 1443 | 1444 | args.extend([str(table), str(filename)]) 1445 | 1446 | self.execute(args) 1447 | 1448 | def schema_show(self, tables: Union[str, List[str]], commit: Optional[str] = None): 1449 | """ 1450 | Dislay the schema of the specified table or tables at the (optionally) specified commit, defaulting to the tip 1451 | of master on the current branch. 1452 | :param tables: 1453 | :param commit: 1454 | :return: 1455 | """ 1456 | args = ["schema", "show"] 1457 | 1458 | if commit: 1459 | args.append(commit) 1460 | 1461 | args.extend(to_list(tables)) 1462 | 1463 | self.execute(args) 1464 | 1465 | def table_rm(self, tables: Union[str, List[str]]): 1466 | """ 1467 | Remove the table or list of tables provided from the working set. 1468 | :param tables: 1469 | :return: 1470 | """ 1471 | self.execute(["rm", " ".join(to_list(tables))]) 1472 | 1473 | def table_import( 1474 | self, 1475 | table: str, 1476 | filename: str, 1477 | create_table: bool = False, 1478 | update_table: bool = False, 1479 | force: bool = False, 1480 | mapping_file: Optional[str] = None, 1481 | pk: List[str] = None, 1482 | replace_table: bool = False, 1483 | file_type: Optional[str] = None, 1484 | continue_importing: bool = False, 1485 | delim: str = None, 1486 | ): 1487 | """ 1488 | Import a table from a filename, inferring the schema from the file. Operates in two possible modes, update, 1489 | create, or replace. If creating must provide a primary key. 1490 | :param table: the table to be created or updated 1491 | :param filename: the data file to import 1492 | :param create_table: create a table 1493 | :param update_table: update a table 1494 | :param force: force the import to overwrite existing data 1495 | :param mapping_file: file mapping column names in file to new names 1496 | :param pk: columns from which to build a primary key 1497 | :param replace_table: replace existing tables 1498 | :param file_type: the type of the file being imported 1499 | :param continue_importing: 1500 | :param delim: 1501 | :return: 1502 | """ 1503 | switch_count = [el for el in [create_table, update_table, replace_table] if el] 1504 | if len(switch_count) != 1: 1505 | raise ValueError("Exactly one of create, update, replace must be True") 1506 | 1507 | args = ["table", "import"] 1508 | 1509 | if create_table: 1510 | args.append("--create-table") 1511 | if not pk: 1512 | raise ValueError("When create is set to True, pks must be provided") 1513 | if update_table: 1514 | args.append("--update-table") 1515 | if replace_table: 1516 | args.append("--replace-table") 1517 | if not pk: 1518 | raise ValueError("When replace is set to True, pks must be provided") 1519 | if file_type: 1520 | args.extend(["--file-type", file_type]) 1521 | if pk: 1522 | args.extend(["--pk", ",".join(pk)]) 1523 | if mapping_file: 1524 | args.extend(["--map", mapping_file]) 1525 | if delim: 1526 | args.extend(["--delim", delim]) 1527 | if continue_importing: 1528 | args.append("--continue") 1529 | if force: 1530 | args.append("--force") 1531 | 1532 | args.extend([table, filename]) 1533 | self.execute(args) 1534 | 1535 | def table_export( 1536 | self, 1537 | table: str, 1538 | filename: str, 1539 | force: bool = False, 1540 | schema: Optional[str] = None, 1541 | mapping_file: Optional[str] = None, 1542 | pk: List[str] = None, 1543 | file_type: Optional[str] = None, 1544 | continue_exporting: bool = False, 1545 | ): 1546 | """ 1547 | 1548 | :param table: 1549 | :param filename: 1550 | :param force: 1551 | :param schema: 1552 | :param mapping_file: 1553 | :param pk: 1554 | :param file_type: 1555 | :param continue_exporting: 1556 | :return: 1557 | """ 1558 | args = ["table", "export"] 1559 | 1560 | if force: 1561 | args.append("--force") 1562 | 1563 | if continue_exporting: 1564 | args.append("--continue") 1565 | 1566 | if schema: 1567 | args.extend(["--schema", schema]) 1568 | 1569 | if mapping_file: 1570 | args.extend(["--map", mapping_file]) 1571 | 1572 | if pk: 1573 | args.extend(["--pk", ",".join(pk)]) 1574 | 1575 | if file_type: 1576 | args.extend(["--file-type", file_type]) 1577 | 1578 | args.extend([table, filename]) 1579 | self.execute(args) 1580 | 1581 | def table_mv(self, old_table: str, new_table: str, force: bool = False): 1582 | """ 1583 | Rename a table from name old_table to name new_table. 1584 | :param old_table: existing table 1585 | :param new_table: new table name 1586 | :param force: override changes in the working set 1587 | :return: 1588 | """ 1589 | args = ["table", "mv"] 1590 | 1591 | if force: 1592 | args.append("--force") 1593 | 1594 | args.extend([old_table, new_table]) 1595 | self.execute(args) 1596 | 1597 | def table_cp( 1598 | self, 1599 | old_table: str, 1600 | new_table: str, 1601 | commit: Optional[str] = None, 1602 | force: bool = False, 1603 | ): 1604 | """ 1605 | Copy an existing table to a new table, optionally at a specified commit. 1606 | :param old_table: existing table name 1607 | :param new_table: new table name 1608 | :param commit: commit at which to read old_table 1609 | :param force: override changes in the working set 1610 | :return: 1611 | """ 1612 | args = ["table", "cp"] 1613 | 1614 | if force: 1615 | args.append("--force") 1616 | 1617 | if commit: 1618 | args.append(commit) 1619 | 1620 | args.extend([old_table, new_table]) 1621 | self.execute(args) 1622 | -------------------------------------------------------------------------------- /doltcli/types.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | from dataclasses import asdict, dataclass 4 | from typing import Any, Callable, Dict, List, Optional, Union 5 | 6 | 7 | class Encoder(json.JSONEncoder): 8 | def default(self, obj): 9 | if isinstance(obj, datetime.datetime): 10 | return str(obj) 11 | 12 | 13 | class BaseDataclass: 14 | def dict(self) -> Dict: 15 | return asdict(self) 16 | 17 | def json(self) -> str: 18 | return json.dumps(self.dict(), cls=Encoder) 19 | 20 | 21 | @dataclass 22 | class BranchT(BaseDataclass): 23 | name: Optional[str] 24 | hash: Optional[str] 25 | latest_committer: Optional[str] = None 26 | latest_committer_email: Optional[str] = None 27 | latest_commit_date: Optional[datetime.datetime] = None 28 | latest_commit_message: Optional[str] = None 29 | remote: Optional[str] = None 30 | branch: Optional[str] = None 31 | 32 | 33 | @dataclass 34 | class CommitT(BaseDataclass): 35 | ref: Optional[str] 36 | timestamp: Optional[datetime.datetime] 37 | author: Optional[str] 38 | email: Optional[str] 39 | message: Optional[str] 40 | parents: Optional[Union[List[str], str]] 41 | merge: bool = False 42 | 43 | def add_merge_parent(self, parent: str) -> None: 44 | ... 45 | 46 | 47 | @dataclass 48 | class KeyPairT(BaseDataclass): 49 | public_key: str 50 | key_id: str 51 | active: bool 52 | 53 | 54 | @dataclass 55 | class RemoteT(BaseDataclass): 56 | name: Optional[str] 57 | url: Optional[str] 58 | 59 | 60 | @dataclass 61 | class StatusT(BaseDataclass): 62 | is_clean: bool 63 | modified_tables: Dict[str, bool] 64 | added_tables: Dict[str, bool] 65 | 66 | 67 | @dataclass 68 | class TableT(BaseDataclass): 69 | name: str 70 | root: Optional[str] = None 71 | row_cnt: Optional[int] = None 72 | system: bool = False 73 | 74 | 75 | @dataclass 76 | class TagT(BaseDataclass): 77 | name: str 78 | ref: str 79 | message: str 80 | 81 | 82 | @dataclass 83 | class DoltHubContextT(BaseDataclass): 84 | name: Optional[str] = None 85 | url: Optional[str] = None 86 | 87 | 88 | @dataclass 89 | class DoltT: 90 | repo_dir: str 91 | print_output: bool = False 92 | 93 | @staticmethod 94 | def init(repo_dir: Optional[str] = ...) -> "DoltT": 95 | ... 96 | 97 | def execute(self, args: List[str], print_output: Optional[bool] = ...): 98 | ... 99 | 100 | def status(self) -> "StatusT": 101 | ... 102 | 103 | @staticmethod 104 | def version() -> str: 105 | ... 106 | 107 | def add(self, tables: Union[str, List[str]]) -> "StatusT": 108 | ... 109 | 110 | def reset( 111 | self, 112 | tables: Union[str, List[str]], 113 | hard: bool = False, 114 | soft: bool = False, 115 | ) -> None: 116 | ... 117 | 118 | def commit( 119 | self, 120 | message: Optional[str] = ..., 121 | allow_empty: bool = False, 122 | date: Optional[datetime.datetime] = ..., 123 | ) -> None: 124 | ... 125 | 126 | def merge( 127 | self, 128 | branch: str, 129 | message: Optional[str] = ..., 130 | squash: bool = False, 131 | ) -> None: 132 | ... 133 | 134 | def sql( 135 | self, 136 | query: Optional[str] = None, 137 | result_format: Optional[str] = None, 138 | execute: bool = False, 139 | save: Optional[str] = None, 140 | message: Optional[str] = None, 141 | list_saved: bool = False, 142 | batch: bool = False, 143 | multi_db_dir: Optional[str] = None, 144 | result_file: Optional[str] = None, 145 | result_parser: Optional[Callable[[str], Any]] = None, 146 | ) -> List: 147 | ... 148 | 149 | def log(self, number: Optional[int] = ..., commit: Optional[str] = ...) -> Dict: 150 | ... 151 | 152 | def diff( 153 | self, 154 | commit: Optional[str] = ..., 155 | other_commit: Optional[str] = ..., 156 | tables: Optional[Union[str, List[str]]] = ..., 157 | data: bool = False, 158 | schema: bool = False, # can we even support this? 159 | summary: bool = False, 160 | sql: bool = False, 161 | where: Optional[str] = None, 162 | limit: Optional[int] = None, 163 | ) -> None: 164 | ... 165 | 166 | def blame(self, table_name: str, rev: Optional[str] = None) -> None: 167 | ... 168 | 169 | def branch( 170 | self, 171 | branch_name: Optional[str] = ..., 172 | start_point: Optional[str] = ..., 173 | new_branch: Optional[str] = ..., 174 | force: bool = False, 175 | delete: bool = False, 176 | copy: bool = False, 177 | move: bool = False, 178 | ) -> None: 179 | ... 180 | 181 | def checkout( 182 | self, 183 | branch: Optional[str] = ..., 184 | tables: Optional[Union[str, List[str]]] = ..., 185 | checkout_branch: bool = False, 186 | start_point: Optional[str] = ..., 187 | ) -> None: 188 | ... 189 | 190 | def remote( 191 | self, 192 | add: bool = False, 193 | name: Optional[str] = ..., 194 | url: Optional[str] = ..., 195 | remove: bool = False, 196 | ) -> None: 197 | ... 198 | 199 | def pull(self, remote: str = "origin") -> None: 200 | ... 201 | 202 | def fetch( 203 | self, 204 | remote: str = "origin", 205 | refspecs: Union[str, List[str]] = ..., 206 | force: bool = False, 207 | **kwargs: Any, 208 | ) -> None: 209 | ... 210 | 211 | @staticmethod 212 | def clone( 213 | remote_url: str, 214 | new_dir: Optional[str] = ..., 215 | remote: Optional[str] = ..., 216 | branch: Optional[str] = ..., 217 | ) -> "DoltT": 218 | ... 219 | 220 | def ls(self, system: bool = False, all: bool = False) -> List[TableT]: 221 | ... 222 | -------------------------------------------------------------------------------- /doltcli/utils.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import datetime 3 | import io 4 | import logging 5 | import os 6 | import tempfile 7 | from collections import defaultdict 8 | from contextlib import contextmanager 9 | from pathlib import Path 10 | from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Union 11 | 12 | from .types import DoltT 13 | 14 | logger = logging.getLogger() 15 | 16 | DOLT_PATH = "dolt" 17 | 18 | 19 | def set_dolt_path(path: str): 20 | global DOLT_PATH 21 | DOLT_PATH = path 22 | 23 | 24 | def read_columns(dolt: DoltT, table: str, as_of: Optional[str] = None) -> Dict[str, list]: 25 | return read_columns_sql(dolt, get_read_table_asof_query(table, as_of)) 26 | 27 | 28 | def read_rows(dolt: DoltT, table: str, as_of: Optional[str] = None) -> List[dict]: 29 | return read_rows_sql(dolt, get_read_table_asof_query(table, as_of)) 30 | 31 | 32 | def get_read_table_asof_query(table: str, as_of: Optional[str] = None) -> str: 33 | base_query = f"SELECT * FROM `{table}`" 34 | return f'{base_query} AS OF "{as_of}"' if as_of else base_query 35 | 36 | 37 | def read_columns_sql(dolt: DoltT, sql: str) -> Dict[str, list]: 38 | rows = read_table_sql(dolt, sql) 39 | columns = rows_to_columns(rows) 40 | return columns 41 | 42 | 43 | def read_rows_sql(dolt: DoltT, sql: str) -> List[dict]: 44 | return read_table_sql(dolt, sql) 45 | 46 | 47 | def read_table_sql( 48 | dolt: DoltT, sql: str, result_parser: Optional[Callable[[str], Any]] = None 49 | ) -> List[dict]: 50 | return dolt.sql(sql, result_format="csv", result_parser=result_parser) 51 | 52 | 53 | CREATE, FORCE_CREATE, REPLACE, UPDATE = "create", "force_create", "replace", "update" 54 | IMPORT_MODES_TO_FLAGS = { 55 | CREATE: ["-c"], 56 | FORCE_CREATE: ["-f", "-c"], 57 | REPLACE: ["-r"], 58 | UPDATE: ["-u"], 59 | } 60 | 61 | 62 | def write_file( 63 | dolt: DoltT, 64 | table: str, 65 | file_handle: Optional[io.TextIOBase] = None, 66 | file: Union[str, Path, None] = None, 67 | # TODO what to do about this? 68 | filetype: str = "csv", 69 | import_mode: Optional[str] = None, 70 | primary_key: Optional[List[str]] = None, 71 | commit: Optional[bool] = False, 72 | commit_message: Optional[str] = None, 73 | commit_date: Optional[datetime.datetime] = None, 74 | do_continue: Optional[bool] = False, 75 | ): 76 | if file_handle is not None and file is not None: 77 | raise ValueError("Specify one of: file, file_handle") 78 | elif file_handle is None and file is None: 79 | raise ValueError("Specify one of: file, file_handle") 80 | elif file_handle is not None: 81 | 82 | def writer(filepath: str): 83 | if not isinstance(file_handle, io.TextIOBase): 84 | raise ValueError( 85 | f"file_handle expected type io.StringIO; found: {type(file_handle)}" 86 | ) 87 | with open(filepath, "w", newline="") as f: 88 | f.writelines(file_handle.readlines()) 89 | return filepath 90 | 91 | elif file is not None: 92 | 93 | def writer(filepath: str): 94 | return str(file) 95 | 96 | _import_helper( 97 | dolt=dolt, 98 | table=table, 99 | write_import_file=writer, 100 | primary_key=primary_key, 101 | import_mode=import_mode, 102 | commit=commit, 103 | commit_message=commit_message, 104 | commit_date=commit_date, 105 | do_continue=do_continue, 106 | ) 107 | 108 | 109 | def write_columns( 110 | dolt: DoltT, 111 | table: str, 112 | columns: Dict[str, List[Any]], 113 | import_mode: Optional[str] = None, 114 | primary_key: Optional[List[str]] = None, 115 | commit: Optional[bool] = False, 116 | commit_message: Optional[str] = None, 117 | commit_date: Optional[datetime.datetime] = None, 118 | do_continue: Optional[bool] = False, 119 | ): 120 | """ 121 | 122 | :param dolt: 123 | :param table: 124 | :param columns: 125 | :param import_mode: 126 | :param primary_key: 127 | :param commit: 128 | :param commit_message: 129 | :param commit_date: 130 | :return: 131 | """ 132 | 133 | def writer(filepath: str): 134 | if len(list(set(len(col) for col in columns.values()))) != 1: 135 | raise ValueError("Must pass columns of identical length") 136 | 137 | with open(filepath, "w", newline="") as f: 138 | csv_writer = csv.DictWriter(f, columns.keys()) 139 | rows = columns_to_rows(columns) 140 | csv_writer.writeheader() 141 | csv_writer.writerows(rows) 142 | return filepath 143 | 144 | _import_helper( 145 | dolt=dolt, 146 | table=table, 147 | write_import_file=writer, 148 | primary_key=primary_key, 149 | import_mode=import_mode, 150 | commit=commit, 151 | commit_message=commit_message, 152 | commit_date=commit_date, 153 | do_continue=do_continue, 154 | ) 155 | 156 | 157 | def write_rows( 158 | dolt: DoltT, 159 | table: str, 160 | rows: List[dict], 161 | import_mode: Optional[str] = None, 162 | primary_key: Optional[List[str]] = None, 163 | commit: Optional[bool] = False, 164 | commit_message: Optional[str] = None, 165 | commit_date: Optional[datetime.datetime] = None, 166 | do_continue: Optional[bool] = False, 167 | ): 168 | """ 169 | 170 | :param dolt: 171 | :param table: 172 | :param rows: 173 | :param import_mode: 174 | :param primary_key: 175 | :param commit: 176 | :param commit_message: 177 | :param commit_date: 178 | :return: 179 | """ 180 | 181 | def writer(filepath: str): 182 | with open(filepath, "w", newline="") as f: 183 | fieldnames: Set[str] = set() 184 | for row in rows: 185 | fieldnames = fieldnames.union(set(row.keys())) 186 | 187 | csv_writer = csv.DictWriter(f, fieldnames) 188 | csv_writer.writeheader() 189 | csv_writer.writerows(rows) 190 | return filepath 191 | 192 | _import_helper( 193 | dolt=dolt, 194 | table=table, 195 | write_import_file=writer, 196 | primary_key=primary_key, 197 | import_mode=import_mode, 198 | commit=commit, 199 | commit_message=commit_message, 200 | commit_date=commit_date, 201 | do_continue=do_continue, 202 | ) 203 | 204 | 205 | def _import_helper( 206 | dolt: DoltT, 207 | table: str, 208 | write_import_file: Callable[[str], str], 209 | import_mode: Optional[str] = None, 210 | primary_key: Optional[List[str]] = None, 211 | do_continue: Optional[bool] = False, 212 | commit: Optional[bool] = False, 213 | commit_message: Optional[str] = None, 214 | commit_date: Optional[datetime.datetime] = None, 215 | ) -> None: 216 | import_mode = _get_import_mode_and_flags(dolt, table, import_mode) 217 | logger.info( 218 | f"Importing to table {table} in dolt directory located in {dolt.repo_dir}, import mode {import_mode}" 219 | ) 220 | 221 | fname = tempfile.mktemp(suffix=".csv") 222 | import_flags = IMPORT_MODES_TO_FLAGS[import_mode] 223 | try: 224 | import_file = write_import_file(fname) 225 | args = ["table", "import", table] + import_flags 226 | if primary_key: 227 | args += ["--pk={}".format(",".join(primary_key))] 228 | if do_continue is True: 229 | args += ["--continue"] 230 | 231 | dolt.execute(args + [import_file]) 232 | 233 | if commit: 234 | msg = commit_message or f"Committing write to table {table} in {import_mode} mode" 235 | dolt.add(table) 236 | dolt.commit(msg, date=commit_date) 237 | finally: 238 | if os.path.exists(fname): 239 | os.remove(fname) 240 | 241 | 242 | def _get_import_mode_and_flags( 243 | dolt: DoltT, table: str, import_mode: Optional[str] = None 244 | ) -> str: 245 | import_modes = IMPORT_MODES_TO_FLAGS.keys() 246 | if import_mode and import_mode not in import_modes: 247 | raise ValueError(f"update_mode must be one of: {import_modes}") 248 | elif not import_mode: 249 | if table in [table.name for table in dolt.ls()]: 250 | logger.info(f'No import mode specified, table exists, using "{UPDATE}"') 251 | import_mode = UPDATE 252 | else: 253 | logger.info(f'No import mode specified, table does not exist, using "{CREATE}"') 254 | import_mode = CREATE 255 | 256 | return import_mode 257 | 258 | 259 | def columns_to_rows(columns: Dict[str, list]) -> List[dict]: 260 | row_count = len(list(columns.values())[0]) 261 | rows: List[dict] = [{} for _ in range(row_count)] 262 | for col_name in columns.keys(): 263 | for j, val in enumerate(columns[col_name]): 264 | rows[j][col_name] = val 265 | 266 | return rows 267 | 268 | 269 | def rows_to_columns(rows: Iterable[dict]) -> Dict[str, list]: 270 | columns: Dict[str, list] = defaultdict(list) 271 | for i, row in enumerate(list(rows)): 272 | for col, val in row.items(): 273 | columns[col].append(val) 274 | 275 | return columns 276 | 277 | 278 | def to_list(value: Union[Any, List[Any]]) -> Any: 279 | return [value] if not isinstance(value, list) and value is not None else value 280 | 281 | 282 | @contextmanager 283 | def detach_head(db, commit): 284 | active_branch, _ = db._get_branches() 285 | switched = False 286 | try: 287 | commit_branches = db.sql( 288 | f"select name, hash from dolt_branches where hash = '{commit}'", 289 | result_format="csv", 290 | ) 291 | if len(commit_branches) > 0: 292 | tmp_branch = commit_branches[0] 293 | if active_branch.hash != tmp_branch["hash"]: 294 | switched = True 295 | db.checkout(tmp_branch["name"]) 296 | else: 297 | tmp_branch = f"detached_HEAD_at_{commit[:5]}" 298 | db.checkout(start_point=commit, branch=tmp_branch, checkout_branch=True) 299 | switched = True 300 | yield 301 | finally: 302 | if switched: 303 | db.checkout(active_branch.name) 304 | return 305 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "atomicwrites" 3 | version = "1.4.1" 4 | description = "Atomic file writes." 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 8 | 9 | [[package]] 10 | name = "attrs" 11 | version = "22.2.0" 12 | description = "Classes Without Boilerplate" 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=3.6" 16 | 17 | [package.extras] 18 | cov = ["attrs", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] 19 | dev = ["attrs"] 20 | docs = ["furo", "sphinx", "myst-parser", "zope.interface", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] 21 | tests = ["attrs", "zope.interface"] 22 | tests-no-zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] 23 | tests_no_zope = ["hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist", "cloudpickle", "mypy (>=0.971,<0.990)", "pytest-mypy-plugins"] 24 | 25 | [[package]] 26 | name = "black" 27 | version = "21.12b0" 28 | description = "The uncompromising code formatter." 29 | category = "dev" 30 | optional = false 31 | python-versions = ">=3.6.2" 32 | 33 | [package.dependencies] 34 | click = ">=7.1.2" 35 | dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} 36 | mypy-extensions = ">=0.4.3" 37 | pathspec = ">=0.9.0,<1" 38 | platformdirs = ">=2" 39 | tomli = ">=0.2.6,<2.0.0" 40 | typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} 41 | typing-extensions = [ 42 | {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, 43 | {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, 44 | ] 45 | 46 | [package.extras] 47 | colorama = ["colorama (>=0.4.3)"] 48 | d = ["aiohttp (>=3.7.4)"] 49 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 50 | python2 = ["typed-ast (>=1.4.3)"] 51 | uvloop = ["uvloop (>=0.15.2)"] 52 | 53 | [[package]] 54 | name = "click" 55 | version = "8.0.4" 56 | description = "Composable command line interface toolkit" 57 | category = "dev" 58 | optional = false 59 | python-versions = ">=3.6" 60 | 61 | [package.dependencies] 62 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 63 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 64 | 65 | [[package]] 66 | name = "colorama" 67 | version = "0.4.5" 68 | description = "Cross-platform colored terminal text." 69 | category = "dev" 70 | optional = false 71 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 72 | 73 | [[package]] 74 | name = "coverage" 75 | version = "6.2" 76 | description = "Code coverage measurement for Python" 77 | category = "dev" 78 | optional = false 79 | python-versions = ">=3.6" 80 | 81 | [package.extras] 82 | toml = ["tomli"] 83 | 84 | [[package]] 85 | name = "dataclasses" 86 | version = "0.8" 87 | description = "A backport of the dataclasses module for Python 3.6" 88 | category = "main" 89 | optional = false 90 | python-versions = ">=3.6, <3.7" 91 | 92 | [[package]] 93 | name = "flake8" 94 | version = "3.9.2" 95 | description = "the modular source code checker: pep8 pyflakes and co" 96 | category = "dev" 97 | optional = false 98 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 99 | 100 | [package.dependencies] 101 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} 102 | mccabe = ">=0.6.0,<0.7.0" 103 | pycodestyle = ">=2.7.0,<2.8.0" 104 | pyflakes = ">=2.3.0,<2.4.0" 105 | 106 | [[package]] 107 | name = "importlib-metadata" 108 | version = "4.8.3" 109 | description = "Read metadata from Python packages" 110 | category = "dev" 111 | optional = false 112 | python-versions = ">=3.6" 113 | 114 | [package.dependencies] 115 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} 116 | zipp = ">=0.5" 117 | 118 | [package.extras] 119 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 120 | perf = ["ipython"] 121 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] 122 | 123 | [[package]] 124 | name = "iniconfig" 125 | version = "1.1.1" 126 | description = "iniconfig: brain-dead simple config-ini parsing" 127 | category = "dev" 128 | optional = false 129 | python-versions = "*" 130 | 131 | [[package]] 132 | name = "isort" 133 | version = "5.10.1" 134 | description = "A Python utility / library to sort Python imports." 135 | category = "dev" 136 | optional = false 137 | python-versions = ">=3.6.1,<4.0" 138 | 139 | [package.extras] 140 | pipfile_deprecated_finder = ["pipreqs", "requirementslib"] 141 | requirements_deprecated_finder = ["pipreqs", "pip-api"] 142 | colors = ["colorama (>=0.4.3,<0.5.0)"] 143 | plugins = ["setuptools"] 144 | 145 | [[package]] 146 | name = "mccabe" 147 | version = "0.6.1" 148 | description = "McCabe checker, plugin for flake8" 149 | category = "dev" 150 | optional = false 151 | python-versions = "*" 152 | 153 | [[package]] 154 | name = "mypy" 155 | version = "0.971" 156 | description = "Optional static typing for Python" 157 | category = "dev" 158 | optional = false 159 | python-versions = ">=3.6" 160 | 161 | [package.dependencies] 162 | mypy-extensions = ">=0.4.3" 163 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 164 | typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} 165 | typing-extensions = ">=3.10" 166 | 167 | [package.extras] 168 | dmypy = ["psutil (>=4.0)"] 169 | python2 = ["typed-ast (>=1.4.0,<2)"] 170 | reports = ["lxml"] 171 | 172 | [[package]] 173 | name = "mypy-extensions" 174 | version = "1.0.0" 175 | description = "Type system extensions for programs checked with the mypy type checker." 176 | category = "dev" 177 | optional = false 178 | python-versions = ">=3.5" 179 | 180 | [[package]] 181 | name = "packaging" 182 | version = "21.3" 183 | description = "Core utilities for Python packages" 184 | category = "dev" 185 | optional = false 186 | python-versions = ">=3.6" 187 | 188 | [package.dependencies] 189 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 190 | 191 | [[package]] 192 | name = "pathspec" 193 | version = "0.9.0" 194 | description = "Utility library for gitignore style pattern matching of file paths." 195 | category = "dev" 196 | optional = false 197 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 198 | 199 | [[package]] 200 | name = "platformdirs" 201 | version = "2.4.0" 202 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 203 | category = "dev" 204 | optional = false 205 | python-versions = ">=3.6" 206 | 207 | [package.extras] 208 | docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] 209 | test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] 210 | 211 | [[package]] 212 | name = "pluggy" 213 | version = "1.0.0" 214 | description = "plugin and hook calling mechanisms for python" 215 | category = "dev" 216 | optional = false 217 | python-versions = ">=3.6" 218 | 219 | [package.dependencies] 220 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 221 | 222 | [package.extras] 223 | dev = ["pre-commit", "tox"] 224 | testing = ["pytest", "pytest-benchmark"] 225 | 226 | [[package]] 227 | name = "py" 228 | version = "1.11.0" 229 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 230 | category = "dev" 231 | optional = false 232 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 233 | 234 | [[package]] 235 | name = "pycodestyle" 236 | version = "2.7.0" 237 | description = "Python style guide checker" 238 | category = "dev" 239 | optional = false 240 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 241 | 242 | [[package]] 243 | name = "pyflakes" 244 | version = "2.3.1" 245 | description = "passive checker of Python programs" 246 | category = "dev" 247 | optional = false 248 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 249 | 250 | [[package]] 251 | name = "pyparsing" 252 | version = "3.0.7" 253 | description = "Python parsing module" 254 | category = "dev" 255 | optional = false 256 | python-versions = ">=3.6" 257 | 258 | [package.extras] 259 | diagrams = ["jinja2", "railroad-diagrams"] 260 | 261 | [[package]] 262 | name = "pytest" 263 | version = "6.2.5" 264 | description = "pytest: simple powerful testing with Python" 265 | category = "dev" 266 | optional = false 267 | python-versions = ">=3.6" 268 | 269 | [package.dependencies] 270 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} 271 | attrs = ">=19.2.0" 272 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 273 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} 274 | iniconfig = "*" 275 | packaging = "*" 276 | pluggy = ">=0.12,<2.0" 277 | py = ">=1.8.2" 278 | toml = "*" 279 | 280 | [package.extras] 281 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] 282 | 283 | [[package]] 284 | name = "pytest-cov" 285 | version = "2.12.1" 286 | description = "Pytest plugin for measuring coverage." 287 | category = "dev" 288 | optional = false 289 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 290 | 291 | [package.dependencies] 292 | coverage = ">=5.2.1" 293 | pytest = ">=4.6" 294 | toml = "*" 295 | 296 | [package.extras] 297 | testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] 298 | 299 | [[package]] 300 | name = "toml" 301 | version = "0.10.2" 302 | description = "Python Library for Tom's Obvious, Minimal Language" 303 | category = "dev" 304 | optional = false 305 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 306 | 307 | [[package]] 308 | name = "tomli" 309 | version = "1.2.3" 310 | description = "A lil' TOML parser" 311 | category = "dev" 312 | optional = false 313 | python-versions = ">=3.6" 314 | 315 | [[package]] 316 | name = "typed-ast" 317 | version = "1.5.4" 318 | description = "a fork of Python 2 and 3 ast modules with type comment support" 319 | category = "main" 320 | optional = false 321 | python-versions = ">=3.6" 322 | 323 | [[package]] 324 | name = "typing-extensions" 325 | version = "4.1.1" 326 | description = "Backported and Experimental Type Hints for Python 3.6+" 327 | category = "dev" 328 | optional = false 329 | python-versions = ">=3.6" 330 | 331 | [[package]] 332 | name = "zipp" 333 | version = "3.6.0" 334 | description = "Backport of pathlib-compatible object wrapper for zip files" 335 | category = "dev" 336 | optional = false 337 | python-versions = ">=3.6" 338 | 339 | [package.extras] 340 | docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] 341 | testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] 342 | 343 | [metadata] 344 | lock-version = "1.1" 345 | python-versions = ">=3.6.2,<4.0" 346 | content-hash = "80a62a3f272f7c45598154314eee14fc2a81b47759c9083ce02e6c76d2ebdd7f" 347 | 348 | [metadata.files] 349 | atomicwrites = [] 350 | attrs = [] 351 | black = [ 352 | {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, 353 | {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, 354 | ] 355 | click = [ 356 | {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, 357 | {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, 358 | ] 359 | colorama = [] 360 | coverage = [ 361 | {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, 362 | {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, 363 | {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, 364 | {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, 365 | {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, 366 | {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, 367 | {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, 368 | {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, 369 | {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, 370 | {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, 371 | {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, 372 | {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, 373 | {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, 374 | {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, 375 | {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, 376 | {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, 377 | {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, 378 | {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, 379 | {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, 380 | {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, 381 | {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, 382 | {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, 383 | {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, 384 | {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, 385 | {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, 386 | {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, 387 | {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, 388 | {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, 389 | {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, 390 | {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, 391 | {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, 392 | {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, 393 | {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, 394 | {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, 395 | {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, 396 | {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, 397 | {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, 398 | {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, 399 | {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, 400 | {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, 401 | {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, 402 | {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, 403 | {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, 404 | {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, 405 | {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, 406 | {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, 407 | {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, 408 | ] 409 | dataclasses = [ 410 | {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, 411 | {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, 412 | ] 413 | flake8 = [ 414 | {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, 415 | {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, 416 | ] 417 | importlib-metadata = [ 418 | {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, 419 | {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, 420 | ] 421 | iniconfig = [ 422 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, 423 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, 424 | ] 425 | isort = [ 426 | {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, 427 | {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, 428 | ] 429 | mccabe = [ 430 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, 431 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, 432 | ] 433 | mypy = [] 434 | mypy-extensions = [] 435 | packaging = [ 436 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 437 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 438 | ] 439 | pathspec = [ 440 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, 441 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, 442 | ] 443 | platformdirs = [ 444 | {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, 445 | {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, 446 | ] 447 | pluggy = [ 448 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, 449 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, 450 | ] 451 | py = [ 452 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 453 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 454 | ] 455 | pycodestyle = [ 456 | {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, 457 | {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, 458 | ] 459 | pyflakes = [ 460 | {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, 461 | {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, 462 | ] 463 | pyparsing = [] 464 | pytest = [ 465 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, 466 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, 467 | ] 468 | pytest-cov = [ 469 | {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, 470 | {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, 471 | ] 472 | toml = [ 473 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 474 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 475 | ] 476 | tomli = [ 477 | {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, 478 | {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, 479 | ] 480 | typed-ast = [] 481 | typing-extensions = [ 482 | {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, 483 | {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, 484 | ] 485 | zipp = [ 486 | {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, 487 | {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, 488 | ] 489 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "doltcli" 3 | version = "0.2.2" 4 | description = "Slim Python interface for Dolt's CLI API." 5 | authors = ["Max Hoffman ", "Oscar Batori "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = ">=3.6.2,<4.0" 10 | dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} 11 | "typed-ast" = ">1.4.3" 12 | 13 | [tool.poetry.dev-dependencies] 14 | pytest = "^6.2.2" 15 | black = "^21.10b0" 16 | mypy = ">0.800" 17 | pytest-cov = "^2.11.1" 18 | isort = "^5.9.1" 19 | flake8 = "^3.9.2" 20 | 21 | [tool.isort] 22 | profile = "black" 23 | 24 | [build-system] 25 | requires = ["poetry-core>=1.0.0a5"] 26 | build-backend = "poetry.core.masonry.api" 27 | -------------------------------------------------------------------------------- /scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -xeou pipefail 4 | 5 | DIR=$(cd $(dirname ${BASH_SOURCE[0]}) && pwd) 6 | BASE=$DIR/.. 7 | 8 | if [ -x poetry ] ; then 9 | echo "Install poetry" 10 | echo "curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 - --version=1.1.0b2" 11 | exit 1 12 | fi 13 | 14 | cd $BASE 15 | poetry run black . --check --exclude tests/ -t py37 16 | poetry run mypy doltcli/ 17 | poetry run pytest tests 18 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import datetime 3 | import os 4 | import shutil 5 | from typing import Tuple 6 | 7 | import pytest 8 | 9 | from doltcli import Dolt 10 | 11 | TEST_TABLE = "characters" 12 | TEST_DATA_INITIAL = [ 13 | { 14 | "name": "Anna", 15 | "adjective": "tragic", 16 | "id": 1, 17 | "date_of_death": datetime.datetime(1877, 1, 1), 18 | }, 19 | {"name": "Vronksy", "adjective": "honorable", "id": 2, "date_of_death": None}, 20 | {"name": "Oblonksy", "adjective": "buffoon", "id": 3, "date_of_death": None}, 21 | ] 22 | 23 | TEST_DATA_UPDATE = [ 24 | { 25 | "name": "Vronksy", 26 | "adjective": "honorable", 27 | "id": 2, 28 | "date_of_death": datetime.datetime(1879, 1, 1), 29 | }, 30 | {"name": "Levin", "adjective": "tiresome", "id": 4, "date_of_death": None}, 31 | ] 32 | 33 | TEST_DATA_FINAL = [TEST_DATA_INITIAL[0], TEST_DATA_INITIAL[2]] + TEST_DATA_UPDATE 34 | 35 | 36 | def get_repo_path_tmp_path(path: str, subpath: str = None) -> Tuple[str, str]: 37 | if subpath: 38 | return os.path.join(path, subpath), os.path.join(path, subpath, ".dolt") 39 | else: 40 | return path, os.path.join(path, ".dolt") 41 | 42 | 43 | @pytest.fixture() 44 | def with_test_data_initial_file(tmp_path): 45 | return _test_data_to_file(tmp_path, TEST_DATA_INITIAL) 46 | 47 | 48 | @pytest.fixture() 49 | def with_test_table(init_empty_test_repo): 50 | dolt = init_empty_test_repo 51 | dolt.sql( 52 | query=f""" 53 | CREATE TABLE `{TEST_TABLE}` ( 54 | `name` VARCHAR(32), 55 | `adjective` VARCHAR(32), 56 | `id` INT NOT NULL, 57 | `date_of_death` DATETIME, 58 | PRIMARY KEY (`id`) 59 | ); 60 | """ 61 | ) 62 | dolt.add(TEST_TABLE) 63 | dolt.commit("Created test table") 64 | return dolt 65 | 66 | 67 | @pytest.fixture(scope="function") 68 | def doltdb(): 69 | db_path = os.path.join(os.path.dirname(__file__), "foo") 70 | try: 71 | db = Dolt.init(db_path) 72 | db.sql("create table t1 (a bigint primary key, b bigint, c bigint)") 73 | db.sql("insert into t1 values (1,1,1), (2,2,2)") 74 | db.add("t1") 75 | db.commit("initialize t1") 76 | 77 | db.sql("insert into t1 values (3,3,3)") 78 | db.add("t1") 79 | db.commit("initialize edit t1") 80 | yield db_path 81 | finally: 82 | if os.path.exists(db_path): 83 | shutil.rmtree(db_path) 84 | 85 | 86 | @pytest.fixture() 87 | def with_test_data_initial_file(tmp_path): 88 | return _test_data_to_file(tmp_path, "initial", TEST_DATA_INITIAL) 89 | 90 | 91 | @pytest.fixture() 92 | def with_test_data_final_file(tmp_path): 93 | return _test_data_to_file(tmp_path, "final", TEST_DATA_FINAL) 94 | 95 | 96 | def _test_data_to_file(file_path, file_name, test_data): 97 | path = os.path.join(file_path, file_name) 98 | with open(path, "w") as fh: 99 | csv_writer = csv.DictWriter(fh, fieldnames=test_data[0].keys()) 100 | csv_writer.writeheader() 101 | csv_writer.writerows(test_data) 102 | 103 | return path 104 | 105 | 106 | @pytest.fixture 107 | def init_empty_test_repo(tmpdir) -> Dolt: 108 | return _init_helper(tmpdir) 109 | 110 | 111 | @pytest.fixture 112 | def init_other_empty_test_repo(tmpdir) -> Dolt: 113 | return _init_helper(tmpdir, "other") 114 | 115 | @pytest.fixture 116 | def tmpdir2(tmpdir): 117 | return tmpdir.mkdir("tmpdir2") 118 | 119 | @pytest.fixture 120 | def empty_test_repo_with_remote(tmpdir, tmpdir2) -> Dolt: 121 | repo = _init_helper(tmpdir) 122 | repo.remote(add=True, name="origin", url=rf"file:///{tmpdir2}") 123 | return repo 124 | 125 | 126 | def _init_helper(path: str, ext: str = None): 127 | repo_path, repo_data_dir = get_repo_path_tmp_path(path, ext) 128 | return Dolt.init(repo_path) 129 | -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | import csv 2 | from typing import List 3 | 4 | 5 | def write_dict_to_csv(data, file): 6 | csv_columns = list(data[0].keys()) 7 | with open(file, "w") as csvfile: 8 | writer = csv.DictWriter(csvfile, fieldnames=csv_columns) 9 | writer.writeheader() 10 | for row in data: 11 | writer.writerow(row) 12 | 13 | 14 | def read_csv_to_dict(file): 15 | with open(file, "r") as csvfile: 16 | reader = csv.DictReader(csvfile) 17 | return list(reader) 18 | 19 | 20 | def compare_rows_helper(expected: List[dict], actual: List[dict]): 21 | assert len(expected) == len( 22 | actual 23 | ), f"Unequal row counts: {len(expected)} != {len(actual)}" 24 | errors = [] 25 | for k in expected[0].keys(): 26 | if k.startswith("date"): 27 | exp = set([e[k][:10] for e in expected]) 28 | act = set([a[k][:10] for a in actual]) 29 | else: 30 | exp = set([e[k] for e in expected]) 31 | act = set([a[k] for a in actual]) 32 | if exp ^ act != set(): 33 | errors.append(f"Unequal value sets: {exp}, {act}") 34 | 35 | error_str = "\n".join(errors) 36 | assert not errors, f"Failed with the following unequal columns:\n{error_str}" 37 | -------------------------------------------------------------------------------- /tests/test_dolt.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import os 3 | import shutil 4 | import tempfile 5 | import uuid 6 | from typing import List, Tuple 7 | 8 | import pytest 9 | 10 | from doltcli import ( 11 | CREATE, 12 | UPDATE, 13 | Dolt, 14 | DoltException, 15 | _execute, 16 | detach_head, 17 | read_rows, 18 | set_dolt_path, 19 | write_rows, 20 | ) 21 | from tests.helpers import compare_rows_helper, read_csv_to_dict 22 | 23 | BASE_TEST_ROWS = [{"name": "Rafael", "id": "1"}, {"name": "Novak", "id": "2"}] 24 | 25 | 26 | def get_repo_path_tmp_path(path: str, subpath: str = None) -> Tuple[str, str]: 27 | if subpath: 28 | return os.path.join(path, subpath), os.path.join(path, subpath, ".dolt") 29 | else: 30 | return path, os.path.join(path, ".dolt") 31 | 32 | 33 | @pytest.fixture 34 | def create_test_data(tmp_path) -> str: 35 | path = os.path.join(tmp_path, str(uuid.uuid4())) 36 | with open(path, "w", newline="") as f: 37 | writer = csv.writer(f) 38 | writer.writerow(list(BASE_TEST_ROWS[0].keys())) 39 | for row in BASE_TEST_ROWS: 40 | writer.writerow(list(row.values())) 41 | yield path 42 | os.remove(path) 43 | 44 | 45 | @pytest.fixture 46 | def create_test_table(init_empty_test_repo: Dolt, create_test_data: str) -> Tuple[Dolt, str]: 47 | repo, test_data_path = init_empty_test_repo, create_test_data 48 | repo.sql( 49 | query=""" 50 | CREATE TABLE `test_players` ( 51 | `name` LONGTEXT NOT NULL COMMENT 'tag:0', 52 | `id` BIGINT NOT NULL COMMENT 'tag:1', 53 | PRIMARY KEY (`id`) 54 | ); 55 | """ 56 | ) 57 | data = BASE_TEST_ROWS 58 | write_rows(repo, "test_players", data, UPDATE, commit=False) 59 | yield repo, "test_players" 60 | 61 | if "test_players" in [table.name for table in repo.ls()]: 62 | _execute(["table", "rm", "test_players"], repo.repo_dir) 63 | 64 | 65 | @pytest.fixture 66 | def test_repo_with_two_remote_branches( 67 | empty_test_repo_with_remote, 68 | ) -> Tuple[Dolt, str, str, str]: 69 | repo = empty_test_repo_with_remote 70 | new_branch_name = "new_branch" 71 | commit_message_main = "Added table" 72 | commit_message_new_branch = "Added table" 73 | 74 | # add table to main branch and push it to remote 75 | table_name = "test_players" 76 | repo.sql( 77 | query=f""" 78 | CREATE TABLE `{table_name}` ( 79 | `name` LONGTEXT NOT NULL COMMENT 'tag:0', 80 | `id` BIGINT NOT NULL COMMENT 'tag:1', 81 | PRIMARY KEY (`id`) 82 | ); 83 | """ 84 | ) 85 | data = BASE_TEST_ROWS 86 | write_rows(repo, table_name, data, UPDATE, commit=False) 87 | repo.add(table_name) 88 | repo.commit(commit_message_main) 89 | repo.push("origin", "main", set_upstream=True) 90 | 91 | # checkout new branch and add a player 92 | repo.checkout(new_branch_name, checkout_branch=True) 93 | repo.sql(f'INSERT INTO `{table_name}` (`name`, `id`) VALUES ("Juan Martin", 5)') 94 | repo.add(table_name) 95 | repo.commit(commit_message_new_branch) 96 | 97 | # push new branch to remote and delete local branch 98 | repo.push("origin", new_branch_name, set_upstream=True) 99 | repo.checkout("main") 100 | repo.branch(new_branch_name, delete=True, force=True) 101 | 102 | # reset main to no commits 103 | repo.reset(hard=True) 104 | 105 | return repo, new_branch_name, commit_message_main, commit_message_new_branch 106 | 107 | 108 | def test_init(tmp_path): 109 | repo_path, repo_data_dir = get_repo_path_tmp_path(tmp_path) 110 | assert not os.path.exists(repo_data_dir) 111 | Dolt.init(repo_path) 112 | assert os.path.exists(repo_data_dir) 113 | shutil.rmtree(repo_data_dir) 114 | 115 | 116 | def test_home_path(): 117 | path = "~/.dolt_test" 118 | if os.path.exists(os.path.expanduser(path)): 119 | shutil.rmtree(os.path.expanduser(path)) 120 | os.mkdir(os.path.expanduser(path)) 121 | # Create empty file 122 | open(os.path.expanduser(path + "/.dolt"), "a").close() 123 | Dolt(path) 124 | assert os.path.exists(path) 125 | shutil.rmtree(path) 126 | 127 | 128 | def test_bad_repo_path(tmp_path): 129 | bad_repo_path = tmp_path 130 | with pytest.raises(ValueError): 131 | Dolt(bad_repo_path) 132 | 133 | 134 | def test_commit(create_test_table: Tuple[Dolt, str]): 135 | repo, test_table = create_test_table 136 | repo.add(test_table) 137 | before_commit_count = len(repo.log()) 138 | repo.commit("Julianna, the very serious intellectual") 139 | assert repo.status().is_clean and len(repo.log()) == before_commit_count + 1 140 | 141 | 142 | def test_head(create_test_table: Tuple[Dolt, str]): 143 | repo, test_table = create_test_table 144 | assert list(repo.log().values())[0].ref == repo.head 145 | 146 | 147 | @pytest.mark.xfail(reason="Dolt cli bug with --result-format") 148 | def test_working(doltdb): 149 | db = Dolt(doltdb) 150 | assert db.head != db.working 151 | 152 | 153 | def test_active_branch(create_test_table: Tuple[Dolt, str]): 154 | repo, test_table = create_test_table 155 | assert "main" == repo.active_branch 156 | 157 | 158 | def test_merge_fast_forward(create_test_table: Tuple[Dolt, str]): 159 | repo, test_table = create_test_table 160 | message_one = "Base branch" 161 | message_two = "Other branch" 162 | message_merge = "merge" 163 | 164 | # commit the current working set to main 165 | repo.add(test_table) 166 | repo.commit(message_one) 167 | 168 | # create another branch from the working set 169 | repo.branch("other") 170 | 171 | # create a non-trivial commit against `other` 172 | repo.checkout("other") 173 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Juan Martin", 5)') 174 | repo.add(test_table) 175 | repo.commit(message_two) 176 | 177 | # merge 178 | repo.checkout("main") 179 | repo.merge("other", message_merge) 180 | 181 | commits = list(repo.log().values()) 182 | fast_forward_commit = commits[0] 183 | parent = commits[1] 184 | 185 | assert isinstance(fast_forward_commit.parents, str) 186 | assert fast_forward_commit.message == message_two 187 | assert parent.message == message_one 188 | 189 | 190 | @pytest.mark.xfail(reason="Unresolved conflicts requires change test") 191 | def test_merge_conflict(create_test_table: Tuple[Dolt, str]): 192 | repo, test_table = create_test_table 193 | message_one = "Base branch" 194 | message_two = "Base branch new data" 195 | message_three = "Other branch" 196 | message_merge = "merge" 197 | # commit the current working set to main 198 | repo.add(test_table) 199 | repo.commit(message_one) 200 | 201 | # create another branch from the working set 202 | repo.branch("other") 203 | 204 | # create a non-trivial commit against `main` 205 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 206 | repo.add(test_table) 207 | repo.commit(message_two) 208 | 209 | # create a non-trivial commit against `other` 210 | repo.checkout("other") 211 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Marin", 4)') 212 | repo.add(test_table) 213 | repo.commit(message_three) 214 | 215 | # merge 216 | repo.checkout("main") 217 | with pytest.raises(DoltException): 218 | repo.merge("other", message_merge) 219 | 220 | # commits = list(repo.log().values()) 221 | # head_of_main = commits[0] 222 | 223 | # assert head_of_main.message == message_two 224 | 225 | 226 | def test_dolt_log(create_test_table: Tuple[Dolt, str]): 227 | repo, test_table = create_test_table 228 | message_one = "Julianna, the very serious intellectual" 229 | message_two = "Added Stan the Man" 230 | repo.add(test_table) 231 | repo.commit(message_one) 232 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 233 | repo.add(test_table) 234 | repo.commit(message_two) 235 | commits = list(repo.log().values()) 236 | current_commit = commits[0] 237 | previous_commit = commits[1] 238 | assert current_commit.message == message_two 239 | assert previous_commit.message == message_one 240 | 241 | 242 | def test_dolt_log_scope(create_test_table: Tuple[Dolt, str]): 243 | repo, test_table = create_test_table 244 | message_one = "Julianna, the very serious intellectual" 245 | message_two = "Added Stan the Man" 246 | repo.add(test_table) 247 | repo.commit(message_one) 248 | repo.checkout("tmp_br", checkout_branch=True) 249 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 250 | repo.add(test_table) 251 | repo.commit(message_two) 252 | repo.checkout("main") 253 | commits = list(repo.log().values()) 254 | current_commit = commits[0] 255 | previous_commit = commits[1] 256 | assert current_commit.message == message_one 257 | 258 | 259 | def test_dolt_log_number(create_test_table: Tuple[Dolt, str]): 260 | repo, test_table = create_test_table 261 | message_one = "Julianna, the very serious intellectual" 262 | message_two = "Added Stan the Man" 263 | repo.add(test_table) 264 | repo.commit(message_one) 265 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 266 | repo.add(test_table) 267 | repo.commit(message_two) 268 | 269 | commits = list(repo.log(number=1).values()) 270 | 271 | assert len(commits) == 1 272 | current_commit = commits[0] 273 | assert current_commit.message == message_two 274 | 275 | 276 | def test_dolt_single_commit_log(create_test_table: Tuple[Dolt, str]): 277 | repo, test_table = create_test_table 278 | assert len(repo.log()) == 1 279 | 280 | 281 | def test_dolt_log_commit(create_test_table: Tuple[Dolt, str]): 282 | repo, test_table = create_test_table 283 | message_one = "Julianna, the very serious intellectual" 284 | message_two = "Added Stan the Man" 285 | repo.add(test_table) 286 | repo.commit(message_one) 287 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 288 | repo.add(test_table) 289 | repo.commit(message_two) 290 | 291 | commits = list(repo.log(number=1).values()) 292 | commits = list(repo.log(commit=commits[0].ref).values()) 293 | 294 | assert len(commits) == 1 295 | current_commit = commits[0] 296 | assert current_commit.message == message_two 297 | 298 | 299 | @pytest.mark.xfail(reason="Setting up the test is not done correctly") 300 | def test_dolt_log_merge_commit(create_test_table: Tuple[Dolt, str]): 301 | repo, test_table = create_test_table 302 | message_one = "Base branch" 303 | message_two = "Base branch new data" 304 | message_three = "Other branch" 305 | message_merge = "merge" 306 | # commit the current working set to main 307 | repo.add(test_table) 308 | repo.commit(message_one) 309 | 310 | # create another branch from the working set 311 | repo.branch("other") 312 | 313 | # create a non-trivial commit against `main` 314 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') 315 | repo.add(test_table) 316 | repo.commit(message_two) 317 | 318 | # create a non-trivial commit against `other` 319 | repo.checkout("other") 320 | repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Juan Martin", 5)') 321 | repo.add(test_table) 322 | repo.commit(message_three) 323 | 324 | # merge 325 | repo.checkout("main") 326 | repo.merge("other", message_merge) 327 | 328 | commits = list(repo.log().values()) 329 | merge_commit = commits[0] 330 | first_merge_parent = commits[1] 331 | second_merge_parent = commits[2] 332 | 333 | assert merge_commit.message == message_merge 334 | assert {first_merge_parent.ref, second_merge_parent.ref} == set(merge_commit.parents) 335 | 336 | 337 | def test_get_dirty_tables(create_test_table: Tuple[Dolt, str]): 338 | repo, test_table = create_test_table 339 | message = "Committing test data" 340 | 341 | # Some test data 342 | initial = [dict(id=1, name="Bianca", role="Champion")] 343 | appended_row = [dict(id=1, name="Serena", role="Runner-up")] 344 | 345 | def _insert_row_helper(repo, table, row): 346 | write_rows(repo, table, row, UPDATE, commit=False) 347 | 348 | # existing, not modified 349 | repo.add(test_table) 350 | repo.commit(message) 351 | 352 | # existing, modified, staged 353 | modified_staged = "modified_staged" 354 | write_rows(repo, modified_staged, initial, commit=False) 355 | repo.add(modified_staged) 356 | 357 | # existing, modified, unstaged 358 | modified_unstaged = "modified_unstaged" 359 | write_rows(repo, modified_unstaged, initial, commit=False) 360 | repo.add(modified_unstaged) 361 | 362 | # Commit and modify data 363 | repo.commit(message) 364 | _insert_row_helper(repo, modified_staged, appended_row) 365 | write_rows(repo, modified_staged, appended_row, UPDATE, commit=False) 366 | repo.add(modified_staged) 367 | write_rows(repo, modified_unstaged, appended_row, UPDATE, commit=False) 368 | 369 | # created, staged 370 | created_staged = "created_staged" 371 | write_rows( 372 | repo, 373 | created_staged, 374 | initial, 375 | import_mode=CREATE, 376 | primary_key=["id"], 377 | commit=False, 378 | ) 379 | repo.add(created_staged) 380 | 381 | # created, unstaged 382 | created_unstaged = "created_unstaged" 383 | write_rows( 384 | repo, 385 | created_unstaged, 386 | initial, 387 | import_mode=CREATE, 388 | primary_key=["id"], 389 | commit=False, 390 | ) 391 | 392 | status = repo.status() 393 | 394 | expected_new_tables = {"created_staged": True, "created_unstaged": False} 395 | expected_changes = {"modified_staged": True, "modified_unstaged": False} 396 | 397 | assert status.added_tables == expected_new_tables 398 | assert status.modified_tables == expected_changes 399 | 400 | 401 | def test_checkout_with_tables(create_test_table: Tuple[Dolt, str]): 402 | repo, test_table = create_test_table 403 | repo.checkout(tables=test_table) 404 | assert repo.status().is_clean 405 | 406 | 407 | def test_branch(create_test_table: Tuple[Dolt, str]): 408 | repo, _ = create_test_table 409 | active_branch, branches = repo.branch() 410 | assert [active_branch.name] == [branch.name for branch in branches] == ["main"] 411 | 412 | repo.checkout("dosac", checkout_branch=True) 413 | repo.checkout("main") 414 | next_active_branch, next_branches = repo.branch() 415 | assert set(branch.name for branch in next_branches) == {"main", "dosac"} and next_active_branch.name == "main" 416 | 417 | repo.checkout("dosac") 418 | different_active_branch, _ = repo.branch() 419 | assert different_active_branch.name == "dosac" 420 | 421 | 422 | # we want to make sure that we can delte a branch atomically 423 | def test_branch_delete(create_test_table: Tuple[Dolt, str]): 424 | repo, _ = create_test_table 425 | 426 | _verify_branches(repo, ["main"]) 427 | 428 | repo.checkout("dosac", checkout_branch=True) 429 | repo.checkout("main") 430 | _verify_branches(repo, ["main", "dosac"]) 431 | 432 | repo.branch("dosac", delete=True) 433 | _verify_branches(repo, ["main"]) 434 | 435 | 436 | def test_branch_move(create_test_table: Tuple[Dolt, str]): 437 | repo, _ = create_test_table 438 | 439 | _verify_branches(repo, ["main"]) 440 | 441 | repo.branch("main", move=True, new_branch="dosac") 442 | _verify_branches(repo, ["dosac"]) 443 | 444 | 445 | def _verify_branches(repo: Dolt, branch_list: List[str]): 446 | _, branches = repo.branch() 447 | assert set(branch.name for branch in branches) == set(branch for branch in branch_list) 448 | 449 | 450 | def test_remote_list(create_test_table: Tuple[Dolt, str]): 451 | repo, _ = create_test_table 452 | repo.remote(add=True, name="origin", url="blah-blah") 453 | assert repo.remote()[0].name == "origin" 454 | repo.remote(add=True, name="another-origin", url="blah-blah") 455 | assert set([remote.name for remote in repo.remote()]) == { 456 | "origin", 457 | "another-origin", 458 | } 459 | 460 | 461 | def test_pull_from_main(test_repo_with_two_remote_branches): 462 | repo, __, commit_message_main, __ = test_repo_with_two_remote_branches 463 | 464 | # pull remote 465 | repo.pull("origin") 466 | commit_message_to_check = list(repo.log().values())[0].message 467 | 468 | # verify that the commit message is the same as the one in main 469 | assert commit_message_to_check == commit_message_main 470 | 471 | 472 | def test_pull_from_branch(test_repo_with_two_remote_branches): 473 | ( 474 | repo, 475 | new_branch_name, 476 | __, 477 | commit_message_new_branch, 478 | ) = test_repo_with_two_remote_branches 479 | 480 | # pull remote new_branch into current branch 481 | repo.pull("origin", new_branch_name) 482 | commit_message_to_check = list(repo.log().values())[0].message 483 | 484 | # verify that the commit message is the same as the one we pushed to new_branch 485 | assert commit_message_to_check == commit_message_new_branch 486 | 487 | 488 | def test_get_branches_local(test_repo_with_two_remote_branches): 489 | ( 490 | repo, 491 | __, 492 | __, 493 | __, 494 | ) = test_repo_with_two_remote_branches 495 | 496 | _, local = repo._get_branches() 497 | 498 | assert len(local) == 1 499 | assert local[0].name == "main" 500 | 501 | 502 | def test_get_branches_remote(test_repo_with_two_remote_branches): 503 | ( 504 | repo, 505 | new_branch_name, 506 | __, 507 | __, 508 | ) = test_repo_with_two_remote_branches 509 | 510 | _, remote = repo._get_branches(remote=True) 511 | 512 | assert len(remote) == 2 513 | assert remote[0].name == "remotes/origin/main" 514 | assert remote[1].name == f"remotes/origin/{new_branch_name}" 515 | 516 | 517 | def test_get_branches_all(test_repo_with_two_remote_branches): 518 | ( 519 | repo, 520 | new_branch_name, 521 | __, 522 | __, 523 | ) = test_repo_with_two_remote_branches 524 | 525 | _, all = repo._get_branches(all=True) 526 | 527 | assert len(all) == 3 528 | assert all[0].name == "main" 529 | assert all[1].name == "remotes/origin/main" 530 | assert all[2].name == f"remotes/origin/{new_branch_name}" 531 | 532 | 533 | def test_checkout_non_existent_branch(doltdb): 534 | repo = Dolt(doltdb) 535 | repo.checkout("main") 536 | 537 | 538 | def test_ls(create_test_table: Tuple[Dolt, str]): 539 | repo, test_table = create_test_table 540 | assert [table.name for table in repo.ls()] == [test_table] 541 | 542 | 543 | def test_ls_empty(init_empty_test_repo: Dolt): 544 | repo = init_empty_test_repo 545 | assert len(repo.ls()) == 0 546 | 547 | 548 | def test_sql(create_test_table: Tuple[Dolt, str]): 549 | repo, test_table = create_test_table 550 | sql = """ 551 | INSERT INTO {table} (name, id) 552 | VALUES ('Roger', 3) 553 | """.format( 554 | table=test_table 555 | ) 556 | repo.sql(query=sql) 557 | 558 | test_data = read_rows(repo, test_table) 559 | assert "Roger" in [x["name"] for x in test_data] 560 | 561 | 562 | def test_sql_json(create_test_table: Tuple[Dolt, str]): 563 | repo, test_table = create_test_table 564 | result = repo.sql(query="SELECT * FROM `{table}`".format(table=test_table), result_format="json")["rows"] 565 | _verify_against_base_rows(result) 566 | 567 | 568 | def test_sql_csv(create_test_table: Tuple[Dolt, str]): 569 | repo, test_table = create_test_table 570 | result = repo.sql(query="SELECT * FROM `{table}`".format(table=test_table), result_format="csv") 571 | _verify_against_base_rows(result) 572 | 573 | 574 | def _verify_against_base_rows(result: List[dict]): 575 | assert len(result) == len(BASE_TEST_ROWS) 576 | 577 | result_sorted = sorted(result, key=lambda el: el["id"]) 578 | for left, right in zip(BASE_TEST_ROWS, result_sorted): 579 | assert set(left.keys()) == set(right.keys()) 580 | for k in left.keys(): 581 | # Unfortunately csv.DictReader is a stream reader and thus does not look at all values for a given column 582 | # and make type inference, so we have to cast everything to a string. JSON round-trips, but would not 583 | # preserve datetime objects for example. 584 | assert str(left[k]) == str(right[k]) 585 | 586 | 587 | TEST_IMPORT_FILE_DATA = """ 588 | name,id 589 | roger,1 590 | rafa,2 591 | """.lstrip() 592 | 593 | 594 | def test_schema_import_create(init_empty_test_repo: Dolt, tmp_path): 595 | repo = init_empty_test_repo 596 | table = "test_table" 597 | test_file = tmp_path / "test_data.csv" 598 | with open(test_file, "w") as f: 599 | f.writelines(TEST_IMPORT_FILE_DATA) 600 | repo.schema_import(table=table, create=True, pks=["id"], filename=test_file) 601 | 602 | assert repo.status().added_tables == {table: False} 603 | 604 | 605 | def test_config_global(init_empty_test_repo: Dolt): 606 | _ = init_empty_test_repo 607 | current_global_config = Dolt.config_global(list=True) 608 | test_username, test_email = "test_user", "test_email" 609 | Dolt.config_global(add=True, name="user.name", value=test_username) 610 | Dolt.config_global(add=True, name="user.email", value=test_email) 611 | updated_config = Dolt.config_global(list=True) 612 | assert updated_config["user.name"] == test_username and updated_config["user.email"] == test_email 613 | Dolt.config_global(add=True, name="user.name", value=current_global_config["user.name"]) 614 | Dolt.config_global(add=True, name="user.email", value=current_global_config["user.email"]) 615 | reset_config = Dolt.config_global(list=True) 616 | assert reset_config["user.name"] == current_global_config["user.name"] 617 | assert reset_config["user.email"] == current_global_config["user.email"] 618 | 619 | 620 | def test_config_local(init_empty_test_repo: Dolt): 621 | repo = init_empty_test_repo 622 | current_global_config = Dolt.config_global(list=True) 623 | test_username, test_email = "test_user", "test_email" 624 | repo.config_local(add=True, name="user.name", value=test_username) 625 | repo.config_local(add=True, name="user.email", value=test_email) 626 | local_config = repo.config_local(list=True) 627 | global_config = Dolt.config_global(list=True) 628 | assert local_config["user.name"] == test_username and local_config["user.email"] == test_email 629 | assert global_config["user.name"] == current_global_config["user.name"] 630 | assert global_config["user.email"] == current_global_config["user.email"] 631 | 632 | 633 | def test_detached_head_cm(doltdb): 634 | db = Dolt(doltdb) 635 | commits = list(db.log().keys()) 636 | 637 | with detach_head(db, commits[1]): 638 | sum1 = db.sql("select sum(a) as sum from t1", result_format="csv")[0] 639 | 640 | with detach_head(db, commits[0]): 641 | sum2 = db.sql("select sum(a) as sum from t1", result_format="csv")[0] 642 | 643 | assert sum1["sum"] == "3" 644 | assert sum2["sum"] == "6" 645 | 646 | 647 | def test_get_clone_dir_no_remote(tmp_path): 648 | new_dir = os.path.join(tmp_path, "new_dir") 649 | res = Dolt._get_clone_dir(new_dir) 650 | assert new_dir == res 651 | 652 | 653 | def test_get_clone_dir_remote_only(tmp_path): 654 | new_dir = os.path.join(os.getcwd(), "remote") 655 | res = Dolt._get_clone_dir(remote_url="some/remote") 656 | assert new_dir == res 657 | 658 | 659 | def test_get_clone_dir_new_dir_only(tmp_path): 660 | res = Dolt._get_clone_dir("new_dir") 661 | assert "new_dir" == res 662 | 663 | 664 | def test_get_clone_dir_new_dir_and_remote(tmp_path): 665 | new_dir = os.path.join("foo/bar", "remote") 666 | res = Dolt._get_clone_dir(new_dir="foo/bar", remote_url="some/remote") 667 | assert new_dir == res 668 | 669 | 670 | def test_clone_new_dir(tmp_path): 671 | target = os.path.join(tmp_path, "state_age") 672 | Dolt.clone("max-hoffman/state-age", new_dir=target) 673 | db = Dolt(target) 674 | assert db.head is not None 675 | 676 | 677 | def test_dolt_sql_csv(init_empty_test_repo: Dolt): 678 | dolt = init_empty_test_repo 679 | write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) 680 | result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ORDER BY id", result_format="csv") 681 | compare_rows_helper(BASE_TEST_ROWS, result) 682 | 683 | 684 | def test_dolt_sql_json(init_empty_test_repo: Dolt): 685 | dolt = init_empty_test_repo 686 | write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) 687 | result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ", result_format="json") 688 | # JSON return value preserves some type information, we cast back to a string 689 | for row in result["rows"]: 690 | row["id"] = str(row["id"]) 691 | compare_rows_helper(BASE_TEST_ROWS, result["rows"]) 692 | 693 | 694 | def test_dolt_sql_file(init_empty_test_repo: Dolt): 695 | dolt = init_empty_test_repo 696 | 697 | with tempfile.NamedTemporaryFile() as f: 698 | write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) 699 | result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ", result_file=f.name) 700 | res = read_csv_to_dict(f.name) 701 | compare_rows_helper(BASE_TEST_ROWS, res) 702 | 703 | 704 | def test_dolt_sql_errors(doltdb): 705 | db = Dolt(doltdb) 706 | 707 | with pytest.raises(ValueError): 708 | db.sql(result_parser=lambda x: x, query=None) 709 | with pytest.raises(ValueError): 710 | db.sql(result_parser=2, query="select active_branch()") 711 | with pytest.raises(ValueError): 712 | db.sql(result_file="file.csv", query=None) 713 | with pytest.raises(ValueError): 714 | db.sql(result_format="csv", query=None) 715 | 716 | 717 | def test_no_init_error(init_empty_test_repo: Dolt): 718 | dolt = init_empty_test_repo 719 | 720 | dolt.init(dolt.repo_dir, error=False) 721 | 722 | 723 | def test_set_dolt_path_error(doltdb): 724 | db = Dolt(doltdb) 725 | set_dolt_path("dolt") 726 | test_cmd = "show tables" 727 | db.sql(test_cmd, result_format="csv") 728 | try: 729 | with pytest.raises(FileNotFoundError): 730 | set_dolt_path("notdolt") 731 | from doltcli.utils import DOLT_PATH 732 | 733 | assert DOLT_PATH == "notdolt" 734 | db.sql(test_cmd, result_format="csv") 735 | finally: 736 | set_dolt_path("dolt") 737 | 738 | 739 | def test_no_checkout_error(init_empty_test_repo: Dolt): 740 | dolt = init_empty_test_repo 741 | 742 | dolt.checkout(branch="main", error=False) 743 | 744 | 745 | def test_reset(doltdb): 746 | db = Dolt(doltdb) 747 | db.reset() 748 | db.reset(hard=True) 749 | db.reset(soft=True) 750 | db.reset(tables="t1") 751 | db.reset(tables=["t1"]) 752 | db.reset(revision="head~1", soft=True) 753 | with pytest.raises(ValueError): 754 | db.reset(tables=["t1"], revision="head~1") 755 | 756 | 757 | def test_reset_errors(doltdb): 758 | db = Dolt(doltdb) 759 | with pytest.raises(ValueError): 760 | db.reset(hard=True, soft=True) 761 | with pytest.raises(ValueError): 762 | db.reset(tables="t1", hard=True) 763 | with pytest.raises(ValueError): 764 | db.reset(tables="t1", soft=True) 765 | with pytest.raises(ValueError): 766 | db.reset(tables={"t1": True}) 767 | 768 | 769 | def test_repo_name_trailing_slash(tmp_path): 770 | repo_path, repo_data_dir = get_repo_path_tmp_path(tmp_path) 771 | assert Dolt.init(str(repo_path) + "/").repo_name == "test_repo_name_trailing_slash0" 772 | shutil.rmtree(repo_data_dir) 773 | -------------------------------------------------------------------------------- /tests/test_read.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import pytest 4 | 5 | from doltcli import ( 6 | CREATE, 7 | UPDATE, 8 | Dolt, 9 | columns_to_rows, 10 | read_columns, 11 | read_rows, 12 | write_rows, 13 | ) 14 | from tests.helpers import compare_rows_helper 15 | 16 | TEST_TABLE = "characters" 17 | TEST_DATA_INITIAL = [ 18 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 19 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 20 | {"name": "Oblonksy", "adjective": "buffoon", "id": "3", "date_of_death": ""}, 21 | ] 22 | 23 | TEST_DATA_UPDATE = [{"name": "Levin", "adjective": "tiresome", "id": "4", "date_of_death": ""}] 24 | 25 | TEST_DATA_COMBINED = TEST_DATA_INITIAL + TEST_DATA_UPDATE 26 | 27 | 28 | @pytest.fixture() 29 | def with_initial_test_data(init_empty_test_repo): 30 | dolt = init_empty_test_repo 31 | return _write_helper(dolt, TEST_DATA_INITIAL, CREATE) 32 | 33 | 34 | def update_test_data(dolt: Dolt): 35 | _, commit = _write_helper(dolt, TEST_DATA_UPDATE, UPDATE) 36 | return commit 37 | 38 | 39 | def _write_helper(dolt: Dolt, data: List[dict], update_type: str): 40 | write_rows(dolt, TEST_TABLE, data, update_type, ["id"], commit=True) 41 | commit_hash, _ = dolt.log().popitem(last=False) 42 | return dolt, commit_hash 43 | 44 | 45 | def test_read_rows(with_initial_test_data): 46 | dolt, first_commit = with_initial_test_data 47 | second_commit = update_test_data(dolt) 48 | first_write = read_rows(dolt, TEST_TABLE, first_commit) 49 | compare_rows_helper(first_write, TEST_DATA_INITIAL) 50 | second_write = read_rows(dolt, TEST_TABLE, second_commit) 51 | compare_rows_helper(second_write, TEST_DATA_COMBINED) 52 | 53 | 54 | def test_read_columns(with_initial_test_data): 55 | dolt, first_commit = with_initial_test_data 56 | second_commit = update_test_data(dolt) 57 | first_write = read_columns(dolt, TEST_TABLE, first_commit) 58 | compare_rows_helper(columns_to_rows(first_write), TEST_DATA_INITIAL) 59 | second_write = columns_to_rows(read_columns(dolt, TEST_TABLE, second_commit)) 60 | sorted(second_write, key=lambda x: int(x["id"])) 61 | compare_rows_helper(second_write, TEST_DATA_COMBINED) 62 | -------------------------------------------------------------------------------- /tests/test_types.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from doltcli import Branch 4 | 5 | dt = datetime.datetime.strptime("2018-06-29", "%Y-%m-%d") 6 | 7 | 8 | def test_datetime_serialize(): 9 | cmp = dict( 10 | name="test", 11 | hash="23", 12 | latest_committer=None, 13 | latest_commit_date=dt, 14 | latest_committer_email=None, 15 | latest_commit_message=None, 16 | remote=None, 17 | branch=None, 18 | ) 19 | br = Branch(**cmp) 20 | assert br.dict() == cmp 21 | assert ( 22 | br.json() 23 | == """ 24 | {"name": "test", "hash": "23", "latest_committer": null, "latest_committer_email": null, "latest_commit_date": "2018-06-29 00:00:00", "latest_commit_message": null, "remote": null, "branch": null} 25 | """.strip() 26 | ) 27 | -------------------------------------------------------------------------------- /tests/test_write.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from doltcli import ( 6 | CREATE, 7 | DoltException, 8 | read_rows, 9 | write_columns, 10 | write_file, 11 | write_rows, 12 | ) 13 | from tests.helpers import compare_rows_helper, write_dict_to_csv 14 | 15 | # Note that we use string values here as serializing via CSV does preserve type information in any meaningful way 16 | TEST_ROWS = [ 17 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 18 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 19 | {"name": "Oblonksy", "adjective": "buffoon", "id": "3", "date_of_death": ""}, 20 | ] 21 | 22 | TEST_COLUMNS = { 23 | "name": ["Anna", "Vronksy", "Oblonksy"], 24 | "adjective": ["tragic", "honorable", "buffoon"], 25 | "id": ["1", "2", "3"], 26 | "date_of_birth": ["1840-01-01", "1840-01-01", "1840-01-01"], 27 | "date_of_death": ["1877-01-01", "", ""], 28 | } 29 | 30 | 31 | def test_write_rows(init_empty_test_repo): 32 | dolt = init_empty_test_repo 33 | write_rows(dolt, "characters", TEST_ROWS, CREATE, ["id"]) 34 | actual = read_rows(dolt, "characters") 35 | compare_rows_helper(TEST_ROWS, actual) 36 | 37 | 38 | def test_update_rows(init_empty_test_repo): 39 | dolt = init_empty_test_repo 40 | write_rows(dolt, "characters", TEST_ROWS, CREATE, ["id"]) 41 | 42 | new_row = {"name": "dick butkus", "adjective": "buffoon", "id": "3", "date_of_death": ""} 43 | 44 | write_rows(dolt, "characters", [new_row], "update", ["id"]) 45 | actual = read_rows(dolt, "characters") 46 | exp = [ 47 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 48 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 49 | {"name": "dick butkus", "adjective": "buffoon", "id": "3", "date_of_death": ""}, 50 | ] 51 | compare_rows_helper(exp, actual) 52 | 53 | 54 | def test_replace_rows(init_empty_test_repo): 55 | dolt = init_empty_test_repo 56 | write_rows(dolt, "characters", TEST_ROWS, CREATE, ["id"]) 57 | 58 | new_row = {"name": "dick butkus", "adjective": "buffoon", "id": "3", "date_of_death": ""} 59 | 60 | write_rows(dolt, "characters", [new_row], "replace", ["id"]) 61 | actual = read_rows(dolt, "characters") 62 | exp = [ 63 | {"name": "dick butkus", "adjective": "buffoon", "id": "3", "date_of_death": ""}, 64 | ] 65 | compare_rows_helper(exp, actual) 66 | 67 | 68 | def test_write_columns(init_empty_test_repo): 69 | dolt = init_empty_test_repo 70 | write_columns(dolt, "characters", TEST_COLUMNS, CREATE, ["id"]) 71 | actual = read_rows(dolt, "characters") 72 | expected = [{} for _ in range(len(list(TEST_COLUMNS.values())[0]))] 73 | for col_name in TEST_COLUMNS.keys(): 74 | for j, val in enumerate(TEST_COLUMNS[col_name]): 75 | expected[j][col_name] = val 76 | 77 | compare_rows_helper(expected, actual) 78 | 79 | 80 | DICT_OF_LISTS_UNEVEN_LENGTHS = {"name": ["Roger", "Rafael", "Novak"], "rank": [1, 2]} 81 | 82 | 83 | def test_write_columns_uneven(init_empty_test_repo): 84 | repo = init_empty_test_repo 85 | with pytest.raises(ValueError): 86 | write_columns(repo, "players", DICT_OF_LISTS_UNEVEN_LENGTHS, CREATE, ["name"]) 87 | 88 | 89 | def test_write_file_handle(init_empty_test_repo, tmp_path): 90 | tempfile = tmp_path / "test.csv" 91 | TEST_ROWS = [ 92 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 93 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 94 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 95 | ] 96 | write_dict_to_csv(TEST_ROWS, tempfile) 97 | dolt = init_empty_test_repo 98 | with pytest.raises(DoltException): 99 | write_file( 100 | dolt=dolt, 101 | table="characters", 102 | file_handle=open(tempfile), 103 | import_mode=CREATE, 104 | primary_key=["id"], 105 | ) 106 | write_file( 107 | dolt=dolt, 108 | table="characters", 109 | file_handle=open(tempfile), 110 | import_mode=CREATE, 111 | primary_key=["id"], 112 | do_continue=True, 113 | ) 114 | actual = read_rows(dolt, "characters") 115 | compare_rows_helper(TEST_ROWS[:2], actual) 116 | 117 | 118 | def test_write_file(init_empty_test_repo, tmp_path): 119 | tempfile = tmp_path / "test.csv" 120 | TEST_ROWS = [ 121 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 122 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 123 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 124 | ] 125 | write_dict_to_csv(TEST_ROWS, tempfile) 126 | dolt = init_empty_test_repo 127 | write_file( 128 | dolt=dolt, 129 | table="characters", 130 | file=tempfile, 131 | import_mode=CREATE, 132 | primary_key=["id"], 133 | do_continue=True, 134 | ) 135 | assert os.path.exists(tmp_path) 136 | actual = read_rows(dolt, "characters") 137 | compare_rows_helper(TEST_ROWS[:2], actual) 138 | 139 | 140 | def test_write_file_errors(init_empty_test_repo, tmp_path): 141 | tempfile = tmp_path / "test.csv" 142 | TEST_ROWS = [ 143 | {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, 144 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 145 | {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, 146 | ] 147 | write_dict_to_csv(TEST_ROWS, tempfile) 148 | dolt = init_empty_test_repo 149 | with pytest.raises(DoltException): 150 | write_file( 151 | dolt=dolt, 152 | table="characters", 153 | file_handle=open(tempfile), 154 | import_mode=CREATE, 155 | primary_key=["id"], 156 | ) 157 | with pytest.raises(ValueError): 158 | write_file( 159 | dolt=dolt, 160 | table="characters", 161 | file_handle=open(tempfile), 162 | file=tempfile, 163 | import_mode=CREATE, 164 | primary_key=["id"], 165 | ) 166 | with pytest.raises(ValueError): 167 | write_file( 168 | dolt=dolt, 169 | table="characters", 170 | import_mode=CREATE, 171 | primary_key=["id"], 172 | ) 173 | with pytest.raises(ValueError): 174 | write_file( 175 | dolt=dolt, 176 | file_handle=tempfile, 177 | table="characters", 178 | import_mode=CREATE, 179 | primary_key=["id"], 180 | ) 181 | --------------------------------------------------------------------------------