├── .github └── workflows │ ├── pull_request.yml │ ├── tag_release.yml │ └── test_release.yml ├── .gitignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── deta ├── __init__.py ├── _async │ ├── __init__.py │ └── client.py ├── base.py ├── drive.py ├── service.py └── utils.py ├── env.sample ├── pytest.ini ├── requirements.txt ├── scripts ├── build ├── install ├── publish ├── release ├── tag └── test_publish ├── setup.py └── tests ├── __init__.py ├── test_async.py └── test_sync.py /.github/workflows/pull_request.yml: -------------------------------------------------------------------------------- 1 | name: Test on Pull Request 2 | on: 3 | workflow_dispatch: 4 | pull_request: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | # tests can't run in parallel as they write and read data with same keys 13 | max-parallel: 1 14 | matrix: 15 | python-version: ["3.8", "3.9", "3.10", "3.11"] 16 | steps: 17 | # Get the code into the container 18 | - name: Checkout 19 | uses: actions/checkout@v3 20 | # Setup Python 21 | - name: Setup Python 22 | uses: actions/setup-python@v4 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | # Test the code 26 | - name: Test code 27 | env: 28 | DETA_SDK_TEST_PROJECT_KEY: ${{secrets.DETA_SDK_TEST_PROJECT_KEY}} 29 | DETA_SDK_TEST_BASE_NAME: ${{secrets.DETA_SDK_TEST_BASE_NAME}} 30 | DETA_SDK_TEST_DRIVE_NAME: ${{secrets.DETA_SDK_TEST_DRIVE_NAME}} 31 | DETA_SDK_TEST_DRIVE_HOST: ${{secrets.DETA_SDK_TEST_DRIVE_HOST}} 32 | DETA_SDK_TEST_TTL_ATTRIBUTE: __expires 33 | run: | 34 | python -m pip install --upgrade pip 35 | python -m pip install pytest pytest-asyncio aiohttp 36 | pytest 37 | -------------------------------------------------------------------------------- /.github/workflows/tag_release.yml: -------------------------------------------------------------------------------- 1 | name: Lint and tag before release 2 | 3 | on: 4 | push: 5 | branches: 6 | - release 7 | 8 | jobs: 9 | lint-and-tag: 10 | runs-on: ubuntu-18.04 11 | steps: 12 | # Get the code into our container 13 | - name: Checkout 14 | uses: actions/checkout@v3 15 | # Setup Python 16 | - name: Setup Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: ^3.5 20 | # Install dependencies 21 | - name: Install dependencies 22 | run: "scripts/install" 23 | # Make tag 24 | - name: Make git tag 25 | run: "scripts/tag" -------------------------------------------------------------------------------- /.github/workflows/test_release.yml: -------------------------------------------------------------------------------- 1 | name: Test release 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | release: 8 | runs-on: ubuntu-18.04 9 | steps: 10 | # Get the code into our container 11 | - name: Checkout 12 | uses: actions/checkout@v3 13 | # Setup Python 14 | - name: Setup Python 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: ^3.5 18 | # Install dependencies 19 | - name: Install dependencies 20 | run: "scripts/install" 21 | # Build package 22 | - name: Build package 23 | run: "scripts/build" 24 | # Publish with twine 25 | - name: Publish with twine 26 | run: "scripts/test_publish" 27 | env: 28 | TWINE_USERNAME: __token__ 29 | TWINE_PASSWORD: ${{ secrets.PYPI_TEST_TOKEN }} 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | !.gitignore 3 | !.github/ 4 | __pycache__ 5 | test.py 6 | build/ 7 | *.egg-info 8 | dist 9 | testEnv 10 | .env 11 | venv/ -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Latest 2 | * Added changelog -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Setting up your environment 2 | 3 | ### Create a virtual environment 4 | 5 | ```sh 6 | python3 -m venv .venv 7 | ``` 8 | 9 | ### Activate the virtualenv 10 | 11 | **Mac/Linux** 12 | 13 | ```sh 14 | source .venv/bin/activate 15 | ``` 16 | 17 | **Windows** 18 | 19 | ```powershell 20 | venv/scripts/activate 21 | ``` 22 | 23 | ### Installing the dependencies 24 | 25 | ```sh 26 | pip install -r requirements.txt 27 | ``` 28 | 29 | ### Configure the environment variables 30 | 31 | Make a copy of `env.sample` (provided in the root of the project) called `.env` 32 | 33 | ```sh 34 | cp env.sample .env 35 | ``` 36 | 37 | Then provide the values as follows: 38 | 39 | - `DETA_SDK_TEST_PROJECT_KEY` – Test project key (create a new Deta project for testing and grab the generated key). 40 | - `DETA_SDK_TEST_BASE_NAME` – Name of your Base. 41 | - `DETA_SDK_TEST_DRIVE_NAME` – Name of your Drive, default is fine. 42 | - `DETA_SDK_TEST_DRIVE_HOST` – Host URL, default is fine. 43 | - `DETA_SDK_TEST_TTL_ATTRIBUTE` – TTL attribute, default is fine. 44 | 45 | ### Run the tests 46 | 47 | ```sh 48 | pytest tests 49 | ``` 50 | 51 | 🎉 Now you are ready to contribute! 52 | 53 | ### How to contribute 54 | 1. Git clone and make a feature branch 55 | 2. Make a draft PR 56 | 3. Make your changes to the feature branch 57 | 4. Mark draft as ready for review 58 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Abstract Computing UG (haftungsbeschränkt) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: test build publish clean 2 | .DEFAULT_GOAL := help 3 | 4 | test: # Run Unit Test 5 | pytest tests 6 | 7 | test_email: # Test Send Email 8 | pytest tests -k "TestSendEmail" 9 | 10 | build: # Build distribution for SDK 11 | python setup.py sdist bdist_wheel 12 | 13 | publish: # Publish the package to PyPI 14 | python -m twine upload dist/* 15 | 16 | clean: # Remove distribution packages 17 | rm -rf dist build deta.egg.egg-info 18 | 19 | format: # Format using black 20 | @black -l 100 . 21 | 22 | check: # Check for files to format using black 23 | @black -l 100 --check --diff . 24 | 25 | help: # Show this help 26 | @echo "Deta Python SDK" 27 | @egrep -h '\s#\s' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?# "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deta Python Library (SDK) 2 | 3 | Please use a [supported version of Python](https://devguide.python.org/versions/). This package requires a minimum of Python 3.6. [Read the docs.](https://deta.space/docs/en/build/reference/sdk) 4 | 5 | Install from PyPi 6 | 7 | ```sh 8 | pip install deta 9 | ``` 10 | 11 | If you are interested in contributing, please look at [**CONTRIBUTING.md**](CONTRIBUTING.md) 12 | 13 | ## How to release (for maintainers) 14 | 1. Add changes to `CHANGELOG.md` 15 | 2. Merge the `master` branch with the `release` branch. 16 | 3. After scripts finish, update release and tag with relevant info 17 | -------------------------------------------------------------------------------- /deta/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import urllib.error 3 | import urllib.request 4 | import json 5 | from typing import Union 6 | 7 | from .base import _Base 8 | from .drive import _Drive 9 | from .utils import _get_project_key_id 10 | 11 | 12 | try: 13 | from ._async.client import AsyncBase # pyright: ignore 14 | except ImportError: 15 | pass 16 | 17 | __version__ = "1.2.0" 18 | 19 | 20 | def Base(name: str): 21 | project_key, project_id = _get_project_key_id() 22 | return _Base(name, project_key, project_id) 23 | 24 | 25 | def Drive(name: str): 26 | project_key, project_id = _get_project_key_id() 27 | return _Drive(name, project_key, project_id) 28 | 29 | 30 | class Deta: 31 | def __init__(self, project_key: Union[str, None] = None, *, project_id: Union[str, None] = None): 32 | project_key, project_id = _get_project_key_id(project_key, project_id) 33 | self.project_key = project_key 34 | self.project_id = project_id 35 | 36 | def Base(self, name: str, host: Union[str, None] = None): 37 | return _Base(name, self.project_key, self.project_id, host) 38 | 39 | def AsyncBase(self, name: str, host: Union[str, None] = None): 40 | from ._async.client import _AsyncBase 41 | 42 | return _AsyncBase(name, self.project_key, self.project_id, host) 43 | 44 | def Drive(self, name: str, host: Union[str, None] = None): 45 | return _Drive( 46 | name=name, 47 | project_key=self.project_key, 48 | project_id=self.project_id, 49 | host=host, 50 | ) 51 | 52 | def send_email(self, to, subject, message, charset="UTF-8"): 53 | return send_email(to, subject, message, charset) 54 | 55 | 56 | def send_email(to, subject, message, charset="UTF-8"): 57 | pid = os.getenv("AWS_LAMBDA_FUNCTION_NAME") 58 | url = os.getenv("DETA_MAILER_URL") 59 | api_key = os.getenv("DETA_PROJECT_KEY") 60 | endpoint = f"{url}/mail/{pid}" 61 | 62 | to = to if type(to) == list else [to] 63 | data = { 64 | "to": to, 65 | "subject": subject, 66 | "message": message, 67 | "charset": charset, 68 | } 69 | 70 | assert api_key 71 | 72 | headers = {"X-API-Key": api_key} 73 | 74 | req = urllib.request.Request( 75 | endpoint, json.dumps(data).encode("utf-8"), headers) 76 | 77 | try: 78 | resp = urllib.request.urlopen(req) 79 | if resp.getcode() != 200: 80 | raise Exception(resp.read().decode("utf-8")) 81 | except urllib.error.URLError as e: 82 | raise Exception(e.reason) 83 | -------------------------------------------------------------------------------- /deta/_async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deta/deta-python/930b94358a1aaf2355f27522b326792f26f38d05/deta/_async/__init__.py -------------------------------------------------------------------------------- /deta/_async/client.py: -------------------------------------------------------------------------------- 1 | from typing import Union, List 2 | import datetime 3 | import os 4 | from urllib.parse import quote 5 | 6 | import aiohttp 7 | 8 | from deta.utils import _get_project_key_id 9 | from deta.base import FetchResponse, Util, insert_ttl, BASE_TTL_ATTTRIBUTE 10 | 11 | 12 | def AsyncBase(name: str): 13 | project_key, project_id = _get_project_key_id() 14 | return _AsyncBase(name, project_key, project_id) 15 | 16 | 17 | class _AsyncBase: 18 | def __init__(self, name: str, project_key: str, project_id: str, host: Union[str, None] = None): 19 | if not project_key: 20 | raise AssertionError("No Base name provided") 21 | 22 | host = host or os.getenv("DETA_BASE_HOST") or "database.deta.sh" 23 | self._base_url = f"https://{host}/v1/{project_id}/{name}" 24 | 25 | self.util = Util() 26 | self.__ttl_attribute = BASE_TTL_ATTTRIBUTE 27 | 28 | self._session = aiohttp.ClientSession( 29 | headers={ 30 | "Content-type": "application/json", 31 | "X-API-Key": project_key, 32 | }, 33 | raise_for_status=True, 34 | ) 35 | 36 | async def close(self) -> None: 37 | await self._session.close() 38 | 39 | async def get(self, key: str): 40 | key = quote(key, safe="") 41 | 42 | try: 43 | async with self._session.get(f"{self._base_url}/items/{key}") as resp: 44 | return await resp.json() 45 | except aiohttp.ClientResponseError as e: 46 | if e.status == 404: 47 | return 48 | else: 49 | raise e 50 | 51 | async def delete(self, key: str): 52 | key = quote(key, safe="") 53 | 54 | async with self._session.delete(f"{self._base_url}/items/{key}"): 55 | return 56 | 57 | async def insert( 58 | self, 59 | data: Union[dict, list, str, int, bool], 60 | key: Union[str, None] = None, 61 | *, 62 | expire_in: Union[int, None] = None, 63 | expire_at: Union[int, float, datetime.datetime, None] = None, 64 | ): 65 | if not isinstance(data, dict): 66 | data = {"value": data} 67 | else: 68 | data = data.copy() 69 | 70 | if key: 71 | data["key"] = key 72 | 73 | insert_ttl(data, self.__ttl_attribute, 74 | expire_in=expire_in, expire_at=expire_at) 75 | async with self._session.post( 76 | f"{self._base_url}/items", json={"item": data} 77 | ) as resp: 78 | return await resp.json() 79 | 80 | async def put( 81 | self, 82 | data: Union[dict, list, str, int, bool], 83 | key: Union[str, None] = None, 84 | *, 85 | expire_in: Union[int, None] = None, 86 | expire_at: Union[int, float, datetime.datetime, None] = None, 87 | ): 88 | if not isinstance(data, dict): 89 | data = {"value": data} 90 | else: 91 | data = data.copy() 92 | 93 | if key: 94 | data["key"] = key 95 | 96 | 97 | insert_ttl(data, self.__ttl_attribute, 98 | expire_in=expire_in, expire_at=expire_at) 99 | async with self._session.put( 100 | f"{self._base_url}/items", json={"items": [data]} 101 | ) as resp: 102 | if resp.status == 207: 103 | resp_json = await resp.json() 104 | if "processed" in resp_json: 105 | return resp_json["processed"]["items"][0] 106 | return None 107 | 108 | async def put_many( 109 | self, 110 | items: List[Union[dict, list, str, int, bool]], 111 | *, 112 | expire_in: Union[int, None] = None, 113 | expire_at: Union[int, float, datetime.datetime, None] = None, 114 | ): 115 | if len(items) > 25: 116 | raise AssertionError("We can't put more than 25 items at a time.") 117 | _items = [] 118 | for i in items: 119 | data = i 120 | if not isinstance(i, dict): 121 | data = {"value": i} 122 | insert_ttl( 123 | data, self.__ttl_attribute, expire_in=expire_in, expire_at=expire_at 124 | ) 125 | _items.append(data) 126 | 127 | async with self._session.put( 128 | f"{self._base_url}/items", json={"items": _items} 129 | ) as resp: 130 | return await resp.json() 131 | 132 | async def fetch( 133 | self, 134 | query: Union[dict, list, None] = None, 135 | *, 136 | limit: int = 1000, 137 | last: Union[str, None] = None, 138 | desc: bool = False, 139 | ): 140 | payload = {} 141 | if query: 142 | payload["query"] = query if isinstance(query, list) else [query] 143 | if limit: 144 | payload["limit"] = limit 145 | if last: 146 | payload["last"] = last 147 | if desc: 148 | payload["sort"] = "desc" 149 | 150 | async with self._session.post(f"{self._base_url}/query", json=payload) as resp: 151 | resp_json = await resp.json() 152 | paging = resp_json.get("paging") 153 | return FetchResponse( 154 | paging.get("size"), paging.get("last"), resp_json.get("items") 155 | ) 156 | 157 | async def update( 158 | self, 159 | updates: dict, 160 | key: str, 161 | *, 162 | expire_in: Union[int, None] = None, 163 | expire_at: Union[int, float, datetime.datetime, None] = None, 164 | ): 165 | if key == "": 166 | raise ValueError("Key is empty") 167 | 168 | payload = { 169 | "set": {}, 170 | "increment": {}, 171 | "append": {}, 172 | "prepend": {}, 173 | "delete": [], 174 | } 175 | if updates: 176 | for attr, value in updates.items(): 177 | if isinstance(value, Util.Trim): 178 | payload["delete"].append(attr) 179 | elif isinstance(value, Util.Increment): 180 | payload["increment"][attr] = value.val 181 | elif isinstance(value, Util.Append): 182 | payload["append"][attr] = value.val 183 | elif isinstance(value, Util.Prepend): 184 | payload["prepend"][attr] = value.val 185 | else: 186 | payload["set"][attr] = value 187 | 188 | if not payload: 189 | raise ValueError("Provide at least one update action.") 190 | 191 | insert_ttl( 192 | payload["set"], 193 | self.__ttl_attribute, 194 | expire_in=expire_in, 195 | expire_at=expire_at, 196 | ) 197 | 198 | key = quote(key, safe="") 199 | 200 | await self._session.patch(f"{self._base_url}/items/{key}", json=payload) 201 | -------------------------------------------------------------------------------- /deta/base.py: -------------------------------------------------------------------------------- 1 | import os 2 | import datetime 3 | from typing import Union, List, Tuple, Optional 4 | from urllib.parse import quote 5 | 6 | from .service import _Service, JSON_MIME 7 | 8 | # timeout for Base service in seconds 9 | BASE_SERVICE_TIMEOUT = 300 10 | BASE_TTL_ATTTRIBUTE = "__expires" 11 | 12 | 13 | class FetchResponse: 14 | def __init__(self, count=0, last=None, items=[]): 15 | self._count = count 16 | self._last = last 17 | self._items = items 18 | 19 | @property 20 | def count(self): 21 | return self._count 22 | 23 | @property 24 | def last(self): 25 | return self._last 26 | 27 | @property 28 | def items(self): 29 | return self._items 30 | 31 | def __eq__(self, other): 32 | return ( 33 | self.count == other.count 34 | and self.last == other.last 35 | and self.items == other.items 36 | ) 37 | 38 | 39 | class Util: 40 | class Trim: 41 | pass 42 | 43 | class Increment: 44 | def __init__(self, value=None): 45 | self.val = value 46 | if not value: 47 | self.val = 1 48 | 49 | class Append: 50 | def __init__(self, value): 51 | self.val = value 52 | if not isinstance(value, list): 53 | self.val = [value] 54 | 55 | class Prepend: 56 | def __init__(self, value): 57 | self.val = value 58 | if not isinstance(value, list): 59 | self.val = [value] 60 | 61 | def trim(self): 62 | return self.Trim() 63 | 64 | def increment(self, value: Union[int, float, None] = None): 65 | return self.Increment(value) 66 | 67 | def append(self, value: Union[dict, list, str, int, float, bool]): 68 | return self.Append(value) 69 | 70 | def prepend(self, value: Union[dict, list, str, int, float, bool]): 71 | return self.Prepend(value) 72 | 73 | 74 | class _Base(_Service): 75 | def __init__(self, name: str, project_key: str, project_id: str, host: Union[str, None] = None): 76 | assert name, "No Base name provided" 77 | 78 | host = host or os.getenv("DETA_BASE_HOST") or "database.deta.sh" 79 | super().__init__( 80 | project_key=project_key, 81 | project_id=project_id, 82 | host=host, 83 | name=name, 84 | timeout=BASE_SERVICE_TIMEOUT, 85 | ) 86 | self.__ttl_attribute = "__expires" 87 | self.util = Util() 88 | 89 | def get(self, key: str): 90 | if key == "": 91 | raise ValueError("Key is empty") 92 | 93 | # encode key 94 | key = quote(key, safe="") 95 | _, res = self._request("/items/{}".format(key), "GET") 96 | return res or None 97 | 98 | def delete(self, key: str): 99 | """Delete an item from the database 100 | key: the key of item to be deleted 101 | """ 102 | if key == "": 103 | raise ValueError("Key is empty") 104 | 105 | # encode key 106 | key = quote(key, safe="") 107 | self._request("/items/{}".format(key), "DELETE") 108 | return None 109 | 110 | def insert( 111 | self, 112 | data: Union[dict, list, str, int, bool], 113 | key: Union[str, None] = None, 114 | *, 115 | expire_in: Union[int, None] = None, 116 | expire_at: Union[int, float, datetime.datetime, None] = None, 117 | ): 118 | if not isinstance(data, dict): 119 | data = {"value": data} 120 | else: 121 | data = data.copy() 122 | 123 | if key: 124 | data["key"] = key 125 | 126 | insert_ttl(data, self.__ttl_attribute, 127 | expire_in=expire_in, expire_at=expire_at) 128 | code, res = self._request( 129 | "/items", "POST", {"item": data}, content_type=JSON_MIME 130 | ) 131 | if code == 201: 132 | return res 133 | elif code == 409: 134 | raise Exception("Item with key '{4}' already exists".format(key)) 135 | 136 | def put( 137 | self, 138 | data: Union[dict, list, str, int, bool], 139 | key: Union[str, None] = None, 140 | *, 141 | expire_in: Union[int, None] = None, 142 | expire_at: Union[int, float, datetime.datetime, None] = None, 143 | ): 144 | """store (put) an item in the database. Overrides an item if key already exists. 145 | `key` could be provided as function argument or a field in the data dict. 146 | If `key` is not provided, the server will generate a random 12 chars key. 147 | """ 148 | 149 | if not isinstance(data, dict): 150 | data = {"value": data} 151 | else: 152 | data = data.copy() 153 | 154 | if key: 155 | data["key"] = key 156 | 157 | insert_ttl(data, self.__ttl_attribute, 158 | expire_in=expire_in, expire_at=expire_at) 159 | code, res = self._request( 160 | "/items", "PUT", {"items": [data]}, content_type=JSON_MIME 161 | ) 162 | 163 | if code == 207 and "processed" in res: 164 | return res["processed"]["items"][0] 165 | else: 166 | return None 167 | 168 | def put_many( 169 | self, 170 | items: List[Union[dict, list, str, int, bool]], 171 | *, 172 | expire_in: Union[int, None] = None, 173 | expire_at: Union[int, float, datetime.datetime, None] = None, 174 | ): 175 | assert len(items) <= 25, "We can't put more than 25 items at a time." 176 | _items = [] 177 | for i in items: 178 | data = i 179 | if not isinstance(i, dict): 180 | data = {"value": i} 181 | insert_ttl( 182 | data, self.__ttl_attribute, expire_in=expire_in, expire_at=expire_at 183 | ) 184 | _items.append(data) 185 | 186 | _, res = self._request( 187 | "/items", "PUT", {"items": _items}, content_type=JSON_MIME 188 | ) 189 | return res 190 | 191 | def _fetch( 192 | self, 193 | query: Union[dict, list, None] = None, 194 | buffer: Union[int, None] = None, 195 | last: Union[str, None] = None, 196 | desc: bool = False, 197 | ): 198 | """This is where actual fetch happens.""" 199 | payload = { 200 | "limit": buffer, 201 | "last": last if not isinstance(last, bool) else None, 202 | "sort": "desc" if desc else "", 203 | } 204 | 205 | if query: 206 | payload["query"] = query if isinstance(query, list) else [query] 207 | 208 | _, res = self._request( 209 | "/query", "POST", payload, content_type=JSON_MIME) 210 | 211 | return res 212 | 213 | def fetch( 214 | self, 215 | query: Union[dict, list, None] = None, 216 | *, 217 | limit: int = 1000, 218 | last: Union[str, None] = None, 219 | desc: bool = False, 220 | 221 | ): 222 | """ 223 | fetch items from the database. 224 | `query` is an optional filter or list of filters. Without filter, it will return the whole db. 225 | """ 226 | 227 | res = self._fetch(query, limit, last, desc) 228 | 229 | paging = res.get("paging") # pyright: ignore 230 | 231 | return FetchResponse(paging.get("size"), 232 | paging.get("last"), 233 | res.get("items")) # pyright: ignore 234 | 235 | def update( 236 | self, 237 | updates: dict, 238 | key: str, 239 | *, 240 | expire_in: Union[int, None] = None, 241 | expire_at: Union[int, float, datetime.datetime, None] = None, 242 | ): 243 | """ 244 | update an item in the database 245 | `updates` specifies the attribute names and values to update,add or remove 246 | `key` is the key of the item to be updated 247 | """ 248 | 249 | if key == "": 250 | raise ValueError("Key is empty") 251 | 252 | payload = { 253 | "set": {}, 254 | "increment": {}, 255 | "append": {}, 256 | "prepend": {}, 257 | "delete": [], 258 | } 259 | if updates: 260 | for attr, value in updates.items(): 261 | if isinstance(value, Util.Trim): 262 | payload["delete"].append(attr) 263 | elif isinstance(value, Util.Increment): 264 | payload["increment"][attr] = value.val 265 | elif isinstance(value, Util.Append): 266 | payload["append"][attr] = value.val 267 | elif isinstance(value, Util.Prepend): 268 | payload["prepend"][attr] = value.val 269 | else: 270 | payload["set"][attr] = value 271 | 272 | insert_ttl( 273 | payload["set"], 274 | self.__ttl_attribute, 275 | expire_in=expire_in, 276 | expire_at=expire_at, 277 | ) 278 | 279 | encoded_key = quote(key, safe="") 280 | code, _ = self._request( 281 | "/items/{}".format(encoded_key), "PATCH", payload, content_type=JSON_MIME 282 | ) 283 | if code == 200: 284 | return None 285 | elif code == 404: 286 | raise Exception("Key '{}' not found".format(key)) 287 | 288 | 289 | def insert_ttl(item, ttl_attribute, expire_in=None, expire_at=None): 290 | if expire_in and expire_at: 291 | raise ValueError("both expire_in and expire_at provided") 292 | if not expire_in and not expire_at: 293 | return 294 | 295 | if expire_in: 296 | expire_at = datetime.datetime.now() + datetime.timedelta(seconds=expire_in) 297 | 298 | if isinstance(expire_at, datetime.datetime): 299 | expire_at = expire_at.replace(microsecond=0).timestamp() 300 | 301 | if not isinstance(expire_at, (int, float)): 302 | raise TypeError("expire_at should one one of int, float or datetime") 303 | 304 | item[ttl_attribute] = int(expire_at) 305 | -------------------------------------------------------------------------------- /deta/drive.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Union, List 3 | from io import BufferedIOBase, TextIOBase, RawIOBase, StringIO, BytesIO 4 | from urllib.parse import quote_plus 5 | 6 | from .service import JSON_MIME, _Service 7 | 8 | # 10 MB upload chunk size 9 | UPLOAD_CHUNK_SIZE = 1024 * 1024 * 10 10 | 11 | # timeout for Drive service in seconds 12 | DRIVE_SERVICE_TIMEOUT = 300 13 | 14 | 15 | class DriveStreamingBody: 16 | def __init__(self, res: BufferedIOBase): 17 | self.__stream = res 18 | 19 | @property 20 | def closed(self): 21 | return self.__stream.closed 22 | 23 | def read(self, size: Union[int, None] = None): 24 | return self.__stream.read(size) 25 | 26 | def iter_chunks(self, chunk_size: int = 1024): 27 | while True: 28 | chunk = self.__stream.read(chunk_size) 29 | if not chunk: 30 | break 31 | yield chunk 32 | 33 | def iter_lines(self, chunk_size: int = 1024): 34 | while True: 35 | chunk = self.__stream.readline(chunk_size) 36 | if not chunk: 37 | break 38 | yield chunk 39 | 40 | def close(self): 41 | # close stream 42 | try: 43 | self.__stream.close() 44 | except: 45 | pass 46 | 47 | 48 | class _Drive(_Service): 49 | def __init__( 50 | self, 51 | name: Union[str, None] = None, 52 | project_key: Union[str, None] = None, 53 | project_id: Union[str, None] = None, 54 | host: Union[str, None] = None, 55 | ): 56 | assert name, "No Drive name provided" 57 | host = host or os.getenv("DETA_DRIVE_HOST") or "drive.deta.sh" 58 | 59 | assert project_key, "Project key must be provided" 60 | assert project_id, "Project id must be provided" 61 | 62 | super().__init__( 63 | project_key=project_key, 64 | project_id=project_id, 65 | host=host, 66 | name=name, 67 | timeout=DRIVE_SERVICE_TIMEOUT, 68 | keep_alive=False, 69 | ) 70 | 71 | def _quote(self, param: str): 72 | return quote_plus(param) 73 | 74 | def get(self, name: str): 75 | """Get/Download a file from drive. 76 | `name` is the name of the file. 77 | Returns a DriveStreamingBody. 78 | """ 79 | assert name, "No name provided" 80 | _, res = self._request( 81 | f"/files/download?name={self._quote(name)}", "GET", stream=True 82 | ) 83 | if res: 84 | return DriveStreamingBody(res) # pyright: ignore 85 | return None 86 | 87 | def delete_many(self, names: List[str]): 88 | """Delete many files from drive in single request. 89 | `names` are the names of the files to be deleted. 90 | Returns a dict with 'deleted' and 'failed' files. 91 | """ 92 | assert names, "Names is empty" 93 | assert len(names) <= 1000, "More than 1000 names to delete" 94 | _, res = self._request( 95 | "/files", "DELETE", {"names": names}, content_type=JSON_MIME 96 | ) 97 | return res 98 | 99 | def delete(self, name: str): 100 | """Delete a file from drive. 101 | `name` is the name of the file. 102 | Returns the name of the file deleted. 103 | """ 104 | assert name, "Name not provided or empty" 105 | 106 | payload = self.delete_many([name]) 107 | 108 | failed = payload.get("failed") # pyright: ignore 109 | 110 | if failed: 111 | raise Exception(f"Failed to delete '{name}':{failed[name]}") 112 | 113 | return name 114 | 115 | def list(self, limit: int = 1000, prefix: Union[str, None] = None, 116 | last: Union[str, None] = None): 117 | """List file names from drive. 118 | `limit` is the limit of number of file names to get, defaults to 1000. 119 | `prefix` is the prefix of file names. 120 | `last` is the last name seen in the a previous paginated response. 121 | Returns a dict with 'paging' and 'names'. 122 | """ 123 | url = f"/files?limit={limit}" 124 | if prefix: 125 | url += f"&prefix={prefix}" 126 | if last: 127 | url += f"&last={last}" 128 | _, res = self._request(url, "GET") 129 | return res 130 | 131 | def _start_upload(self, name: str): 132 | _, res = self._request(f"/uploads?name={self._quote(name)}", "POST") 133 | return res["upload_id"] # pyright: ignore 134 | 135 | def _finish_upload(self, name: str, upload_id: str): 136 | self._request(f"/uploads/{upload_id}?name={self._quote(name)}", "PATCH") 137 | 138 | def _abort_upload(self, name: str, upload_id: str): 139 | self._request( 140 | f"/uploads/{upload_id}?name={self._quote(name)}", "DELETE") 141 | 142 | def _upload_part( 143 | self, 144 | name: str, 145 | chunk: Union[bytes, str], 146 | upload_id: str, 147 | part: int, 148 | content_type: Union[str, None] = None, 149 | ): 150 | self._request( 151 | f"/uploads/{upload_id}/parts?name={self._quote(name)}&part={part}", 152 | "POST", 153 | data=chunk, 154 | content_type=content_type, 155 | ) 156 | 157 | def _get_content_stream( 158 | self, data: Union[str, bytes, TextIOBase, BufferedIOBase, RawIOBase] 159 | ): 160 | if isinstance(data, str): 161 | return StringIO(data) 162 | elif isinstance(data, bytes): 163 | return BytesIO(data) 164 | return data 165 | 166 | def put( 167 | self, 168 | name: str, 169 | data: Union[str, bytes, TextIOBase, 170 | BufferedIOBase, RawIOBase, None] = None, 171 | *, 172 | path: Union[str, None] = None, 173 | content_type: Union[str, None] = None, 174 | ) -> str: 175 | """Put a file in drive. 176 | `name` is the name of the file. 177 | `data` is the data to be put. 178 | `content_type` is the mime type of the file. 179 | Returns the name of the file. 180 | """ 181 | assert name, "No name provided" 182 | assert path or data, "No data or path provided" 183 | assert not (path and data), "Both path and data provided" 184 | 185 | # start upload 186 | upload_id = self._start_upload(name) 187 | 188 | if path: 189 | content_stream = open(path, "rb") 190 | else: 191 | assert data 192 | content_stream = self._get_content_stream(data) 193 | 194 | part = 1 195 | 196 | # upload chunks 197 | while True: 198 | chunk = content_stream.read(UPLOAD_CHUNK_SIZE) 199 | # eof stop the loop 200 | if not chunk: 201 | self._finish_upload(name, upload_id) 202 | content_stream.close() 203 | return name 204 | 205 | # upload part 206 | try: 207 | self._upload_part(name, chunk, upload_id, part, content_type) 208 | part += 1 209 | # clean up on exception 210 | # and raise exception again 211 | except Exception as e: 212 | self._abort_upload(name, upload_id) 213 | content_stream.close() 214 | raise e 215 | -------------------------------------------------------------------------------- /deta/service.py: -------------------------------------------------------------------------------- 1 | import http.client 2 | import os 3 | import json 4 | import socket 5 | import struct 6 | from typing import Union, Any 7 | import urllib.error 8 | from pathlib import Path 9 | 10 | JSON_MIME = "application/json" 11 | 12 | 13 | class CustomJSONEncoder(json.JSONEncoder): 14 | 15 | def default(self, o: Any) -> Any: 16 | if isinstance(o, Path): 17 | return o.resolve().as_posix() 18 | return super().default(o) 19 | 20 | 21 | class _Service: 22 | def __init__( 23 | self, 24 | project_key: str, 25 | project_id: str, 26 | host: str, 27 | name: str, 28 | timeout: int, 29 | keep_alive: bool = True, 30 | ): 31 | self.project_key = project_key 32 | self.base_path = "/v1/{0}/{1}".format(project_id, name) 33 | self.host = host 34 | self.timeout = timeout 35 | self.keep_alive = keep_alive 36 | self.client = (http.client.HTTPSConnection( 37 | host, timeout=timeout) if keep_alive else None) 38 | 39 | def _is_socket_closed(self): 40 | if not self.client or not self.client.sock: 41 | return True 42 | 43 | fmt = "B" * 7 + "I" * 21 44 | tcp_info = struct.unpack( 45 | fmt, self.client.sock.getsockopt( 46 | socket.IPPROTO_TCP, socket.TCP_INFO, 92) 47 | ) 48 | # 8 = CLOSE_WAIT 49 | if len(tcp_info) > 0 and tcp_info[0] == 8: 50 | return True 51 | return False 52 | 53 | def _request( 54 | self, 55 | path: str, 56 | method: str, 57 | data: Union[str, bytes, dict, None] = None, 58 | headers: Union[dict, None] = None, 59 | content_type: Union[str, None] = None, 60 | stream: bool = False, 61 | ): 62 | 63 | url = self.base_path + path 64 | 65 | headers = headers or {} 66 | headers["X-Api-Key"] = self.project_key 67 | 68 | if content_type: 69 | headers["Content-Type"] = content_type 70 | 71 | if not self.keep_alive: 72 | headers["Connection"] = "close" 73 | 74 | # close connection if socket is closed 75 | # fix for a bug in lambda 76 | try: 77 | if ( 78 | self.client 79 | and os.environ.get("DETA_RUNTIME") == "true" 80 | and self._is_socket_closed() 81 | ): 82 | self.client.close() 83 | except: 84 | pass 85 | 86 | # send request 87 | body = json.dumps( 88 | data, cls=CustomJSONEncoder 89 | ) if content_type == JSON_MIME else data 90 | 91 | # response 92 | res = self._send_request_with_retry(method, url, headers, body) 93 | 94 | assert res 95 | 96 | status = res.status 97 | 98 | if status not in [200, 201, 202, 207]: 99 | # need to read the response so subsequent requests can be sent on the client 100 | res.read() 101 | if not self.keep_alive and self.client: 102 | self.client.close() 103 | # return None if not found 104 | if status == 404: 105 | return status, None 106 | # FIXME: workaround to fix traceback printing for HTTPError 107 | fp = res.fp if res.fp is not None else '' 108 | raise urllib.error.HTTPError( 109 | url, status, res.reason, res.headers, fp) 110 | 111 | # if stream return the response and client without reading and closing the client 112 | if stream: 113 | return status, res 114 | 115 | # return json if application/json 116 | res_content_type = res.getheader("content-type") 117 | if res_content_type and JSON_MIME in res_content_type: 118 | payload = json.loads(res.read()) 119 | else: 120 | payload = res.read() 121 | 122 | if not self.keep_alive and self.client: 123 | self.client.close() 124 | 125 | return status, payload 126 | 127 | def _send_request_with_retry( 128 | self, 129 | method: str, 130 | url: str, 131 | headers: Union[dict, None] = None, 132 | body: Union[str, bytes, dict, None] = None, 133 | retry=2, # try at least twice to regain a new connection 134 | ): 135 | reinitializeConnection = False 136 | while retry > 0: 137 | try: 138 | if not self.keep_alive or reinitializeConnection: 139 | self.client = http.client.HTTPSConnection( 140 | host=self.host, timeout=self.timeout 141 | ) 142 | 143 | if headers is None: 144 | headers = {} 145 | 146 | assert self.client 147 | 148 | self.client.request( 149 | method, 150 | url, 151 | headers=headers, 152 | body=body, 153 | ) 154 | res = self.client.getresponse() 155 | return res 156 | 157 | except http.client.RemoteDisconnected: 158 | reinitializeConnection = True 159 | retry -= 1 160 | -------------------------------------------------------------------------------- /deta/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Union 3 | 4 | 5 | def _get_project_key_id(project_key: Union[str, None] = None, 6 | project_id: Union[str, None] = None): 7 | project_key = project_key or os.getenv("DETA_PROJECT_KEY", "") 8 | 9 | if not project_key: 10 | raise AssertionError("No project key defined") 11 | 12 | if not project_id: 13 | project_id = project_key.split("_")[0] 14 | 15 | if project_id == project_key: 16 | raise AssertionError("Bad project key provided") 17 | 18 | return project_key, project_id 19 | -------------------------------------------------------------------------------- /env.sample: -------------------------------------------------------------------------------- 1 | DETA_SDK_TEST_PROJECT_KEY= 2 | DETA_SDK_TEST_DRIVE_NAME=testdrive 3 | DETA_SDK_TEST_DRIVE_HOST=drive.deta.sh 4 | DETA_SDK_TEST_BASE_NAME=testbase -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --color=yes 3 | asyncio_mode = auto 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | black 2 | python-dotenv 3 | setuptools 4 | wheel 5 | twine 6 | 7 | # Requirements for testing [async] extra 8 | aiohttp 9 | pytest-asyncio 10 | pytest -------------------------------------------------------------------------------- /scripts/build: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | if [ -d 'venv' ] ; then 4 | PREFIX="venv/bin/" 5 | else 6 | PREFIX="" 7 | fi 8 | 9 | #set -x 10 | 11 | ${PREFIX}python setup.py sdist bdist_wheel 12 | ${PREFIX}twine check dist/* -------------------------------------------------------------------------------- /scripts/install: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | # Use the Python executable provided from the `-p` option, or a default. 4 | [ "$1" = "-p" ] && PYTHON=$2 || PYTHON="python3" 5 | 6 | REQUIREMENTS="requirements.txt" 7 | VENV="venv" 8 | 9 | #set -x 10 | 11 | if [ -z "$GITHUB_ACTIONS" ]; then 12 | "$PYTHON" -m venv "$VENV" 13 | PIP="$VENV/bin/pip" 14 | else 15 | PIP="pip" 16 | fi 17 | 18 | "$PIP" install -r "$REQUIREMENTS" 19 | 20 | sudo apt-get install gh -------------------------------------------------------------------------------- /scripts/publish: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | VERSION_FILE="deta/__init__.py" 4 | SETUP_FILE="setup.py" 5 | 6 | if [ -d 'venv' ] ; then 7 | PREFIX="venv/bin/" 8 | else 9 | PREFIX="" 10 | fi 11 | 12 | if [ ! -z "$GITHUB_ACTIONS" ]; then 13 | git config --local user.email "action@github.com" 14 | git config --local user.name "GitHub Action" 15 | 16 | VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` 17 | VERSION_SETUP=`grep version ${SETUP_FILE}| grep -o '[0-9][^"]*'` 18 | 19 | if [ "${VERSION}" != "${VERSION_SETUP}" ] ; then 20 | echo "__init__.py version '${VERSION}' did not match setup version '${VERSION_SETUP}'" 21 | exit 1 22 | fi 23 | fi 24 | 25 | #set -x 26 | 27 | ${PREFIX}twine upload dist/* -------------------------------------------------------------------------------- /scripts/release: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | VERSION_FILE="deta/__init__.py" 4 | SETUP_FILE="setup.py" 5 | 6 | if [ -d 'venv' ] ; then 7 | PREFIX="venv/bin/" 8 | else 9 | PREFIX="" 10 | fi 11 | 12 | if [ ! -z "$GITHUB_ACTIONS" ]; then 13 | git config --local user.email "action@github.com" 14 | git config --local user.name "GitHub Action" 15 | 16 | VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` 17 | VERSION_SETUP=`grep version ${SETUP_FILE}| grep -o '[0-9][^"]*'` 18 | 19 | if [ "${VERSION}" != "${VERSION_SETUP}" ] ; then 20 | echo "__init__.py version '${VERSION}' did not match setup version '${VERSION_SETUP}'" 21 | exit 1 22 | fi 23 | fi 24 | 25 | #set -x 26 | 27 | ${PREFIX}gh release create "v${VERSION}" -F CHANGELOG.md -------------------------------------------------------------------------------- /scripts/tag: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | VERSION_FILE="deta/__init__.py" 4 | SETUP_FILE="setup.py" 5 | 6 | if [ ! -z "$GITHUB_ACTIONS" ]; then 7 | 8 | VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` 9 | VERSION_SETUP=`grep version ${SETUP_FILE} | grep -o '[0-9][^"]*'` 10 | 11 | if [ "${VERSION}" != "${VERSION_SETUP}" ] ; then 12 | echo "__init__.py version '${VERSION}' did not match setup version '${VERSION_SETUP}'" 13 | exit 15 14 | fi 15 | fi 16 | 17 | git tag "v${VERSION}" 18 | git push origin "v${VERSION}" -------------------------------------------------------------------------------- /scripts/test_publish: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | VERSION_FILE="deta/__init__.py" 4 | 5 | if [ -d 'venv' ] ; then 6 | PREFIX="venv/bin/" 7 | else 8 | PREFIX="" 9 | fi 10 | 11 | if [ ! -z "$GITHUB_ACTIONS" ]; then 12 | git config --local user.email "action@github.com" 13 | git config --local user.name "GitHub Action" 14 | 15 | VERSION=`grep __version__ ${VERSION_FILE} | grep -o '[0-9][^"]*'` 16 | 17 | if [ "refs/tags/${VERSION}" != "${GITHUB_REF}" ] ; then 18 | echo "GitHub Ref '${GITHUB_REF}' did not match package version '${VERSION}'" 19 | exit 1 20 | fi 21 | fi 22 | 23 | #set -x 24 | 25 | ${PREFIX}twine upload --repository testpypi dist/* -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup( 4 | name="deta", 5 | version="1.2.0", 6 | description="Python SDK for Deta Base & Deta Drive.", 7 | url="http://github.com/deta/deta-python", 8 | author="Deta", 9 | author_email="hello@deta.sh", 10 | license="MIT", 11 | classifiers=[ 12 | "Programming Language :: Python :: 3", 13 | "License :: OSI Approved :: MIT License", 14 | "Operating System :: OS Independent", 15 | ], 16 | packages=["deta", "deta._async"], 17 | extras_require={ 18 | "async": ["aiohttp>=3,<4"], 19 | }, 20 | ) 21 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/deta/deta-python/930b94358a1aaf2355f27522b326792f26f38d05/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_async.py: -------------------------------------------------------------------------------- 1 | from deta.base import FetchResponse 2 | import datetime 3 | import os 4 | import pytest 5 | import random 6 | import string 7 | from deta import Deta 8 | 9 | try: 10 | from dotenv import load_dotenv 11 | 12 | load_dotenv() 13 | except: 14 | pass 15 | 16 | pytestmark = pytest.mark.asyncio 17 | 18 | 19 | PROJECT_KEY = os.getenv("DETA_SDK_TEST_PROJECT_KEY") 20 | BASE_NAME = os.getenv("DETA_SDK_TEST_BASE_NAME") 21 | BASE_TEST_TTL_ATTRIBUTE = os.getenv("DETA_SDK_TEST_TTL_ATTRIBUTE") or "__expires" 22 | 23 | 24 | @pytest.fixture() 25 | async def db(): 26 | assert PROJECT_KEY 27 | assert BASE_NAME 28 | assert BASE_TEST_TTL_ATTRIBUTE 29 | 30 | deta = Deta(PROJECT_KEY) 31 | db = deta.AsyncBase(BASE_NAME) 32 | 33 | yield db 34 | 35 | result = await db.fetch() 36 | for i in result.items: 37 | await db.delete(i["key"]) 38 | await db.close() 39 | 40 | 41 | @pytest.fixture() 42 | async def items(db): 43 | items = [ 44 | {"key": "existing1", "value": "test"}, 45 | {"key": "existing2", "value": 7}, 46 | {"key": "existing3", "value": 44}, 47 | {"key": "existing4", "value": {"name": "patrick"}}, 48 | {"key": "%@#//#!#)#$_", "value": 0, "list": ["a"]}, 49 | ] 50 | await db.put_many(items) 51 | yield items 52 | 53 | 54 | async def test_put(db): 55 | item = {"msg": "hello"} 56 | expected_resp = {"key": "one", "msg": "hello"} 57 | resp = await db.put(item, "one") 58 | assert resp == expected_resp 59 | assert {"msg": "hello"} == item 60 | 61 | for input in ["Hello", 1, True, False, 3.14159265359]: 62 | 63 | resp = await db.put(input) 64 | assert set(resp.keys()) == set(["key", "value"]) 65 | 66 | 67 | async def test_put_fail(db): 68 | with pytest.raises(Exception): 69 | await db.put({"msg": "hello"}, 1) 70 | await db.put({"msg": "hello", "key": True}) 71 | 72 | 73 | async def test_put_many(db): 74 | resp = await db.put_many([1, 2, 3]) 75 | assert len(resp["processed"]["items"]) == 3 76 | 77 | resp = await db.put_many([{"msg": "hello"}, {"msg2": "hi"}]) 78 | ok = resp["processed"]["items"] 79 | assert len(ok) == 2 80 | 81 | 82 | async def test_put_many_fail(db): 83 | with pytest.raises(Exception): 84 | await db.put_many([{"name": "joe", "key": "ok"}, {"name": "mo", "key": 7}]) 85 | 86 | 87 | async def test_put_many_fail_limit(db): 88 | with pytest.raises(Exception): 89 | await db.put_many([i for i in range(26)]) 90 | 91 | 92 | async def test_insert(db): 93 | item = {"msg": "hello"} 94 | resp = await db.insert(item) 95 | assert set(resp.keys()) == set(["key", "msg"]) 96 | 97 | 98 | async def test_insert_fail(db, items): 99 | with pytest.raises(Exception): 100 | await db.insert(items[0]) 101 | 102 | 103 | async def test_get(db, items): 104 | resp = await db.get(items[0]["key"]) 105 | assert resp == items[0] 106 | 107 | resp = await db.get("key_does_not_exist") 108 | assert resp is None 109 | 110 | 111 | async def test_delete(db, items): 112 | resp = await db.delete(items[0]["key"]) 113 | assert resp is None 114 | 115 | resp = await db.delete("key_does_not_exist") 116 | assert resp is None 117 | 118 | 119 | async def test_fetch(db, items): 120 | res1 = await db.fetch({"value?gte": 7}) 121 | expectedItem = FetchResponse( 122 | 2, 123 | None, 124 | [ 125 | {"key": "existing2", "value": 7}, 126 | {"key": "existing3", "value": 44}, 127 | ], 128 | ) 129 | assert res1 == expectedItem 130 | 131 | res2 = await db.fetch({"value?gte": 7}, limit=1) 132 | expectedItem = FetchResponse( 133 | 1, 134 | "existing2", 135 | [ 136 | {"key": "existing2", "value": 7}, 137 | ], 138 | ) 139 | assert res2 == expectedItem 140 | 141 | res3 = await db.fetch([{"value?gt": 6}, {"value?lt": 50}], limit=2) 142 | expectedItem = FetchResponse( 143 | 2, 144 | "existing2", 145 | [ 146 | {"key": "%@#//#!#)#$_", "list": ["a"], "value": 0}, 147 | {"key": "existing2", "value": 7}, 148 | ], 149 | ) 150 | assert res3 == expectedItem 151 | 152 | res4 = await db.fetch( 153 | [{"value?gt": 6}, {"value?lt": 50}], limit=2, last="existing2" 154 | ) 155 | expectedItem = FetchResponse( 156 | 1, 157 | None, 158 | [{"key": "existing3", "value": 44}], 159 | ) 160 | assert res4 == expectedItem 161 | 162 | res5 = await db.fetch({"value": "test"}) 163 | expectedItem = FetchResponse( 164 | 1, 165 | None, 166 | [{"key": "existing1", "value": "test"}], 167 | ) 168 | assert res5 == expectedItem 169 | 170 | res6 = await db.fetch({"valuexyz": "test_none_existing_value"}) 171 | expectedItem = FetchResponse( 172 | 0, 173 | None, 174 | [], 175 | ) 176 | assert res6 == expectedItem 177 | 178 | res7 = await db.fetch({"value.name": items[3]["value"]["name"]}) 179 | expectedItem = FetchResponse( 180 | 1, 181 | None, 182 | [{"key": "existing4", "value": {"name": "patrick"}}], 183 | ) 184 | assert res7 == expectedItem 185 | 186 | res8 = await db.fetch({"value?gte": 7}, desc=True) 187 | expectedItem = FetchResponse( 188 | 2, 189 | None, 190 | [ 191 | {"key": "existing3", "value": 44}, 192 | {"key": "existing2", "value": 7}, 193 | ], 194 | ) 195 | assert res8 == expectedItem 196 | 197 | 198 | async def test_update(db, items): 199 | resp = await db.update({"value.name": "spongebob"}, "existing4") 200 | assert resp is None 201 | 202 | resp = await db.get("existing4") 203 | expectedItem = {"key": "existing4", "value": {"name": "spongebob"}} 204 | assert resp == expectedItem 205 | 206 | resp = await db.update({"value.name": db.util.trim(), "value.age": 32}, "existing4") 207 | 208 | assert resp is None 209 | expectedItem = {"key": "existing4", "value": {"age": 32}} 210 | resp = await db.get("existing4") 211 | 212 | assert resp == expectedItem 213 | 214 | resp = await db.update( 215 | { 216 | "list": db.util.append(["b", "c"]), 217 | "value": db.util.increment(), 218 | }, 219 | "%@#//#!#)#$_", 220 | ) 221 | assert resp is None 222 | 223 | resp = await db.update( 224 | {"list": db.util.prepend("x"), "value": db.util.increment(2)}, 225 | "%@#//#!#)#$_", 226 | ) 227 | assert resp is None 228 | expectedItem = {"key": "%@#//#!#)#$_", 229 | "list": ["x", "a", "b", "c"], "value": 3} 230 | resp = await db.get("%@#//#!#)#$_") 231 | assert resp == expectedItem 232 | 233 | # key does not exist 234 | with pytest.raises(Exception): 235 | await db.update({"value": "test"}, "doesNotExist") 236 | 237 | # deleting a key 238 | with pytest.raises(Exception): 239 | await db.update({"value": "test", "key": db.util.trim()}, "existing4") 240 | 241 | # updating a key 242 | with pytest.raises(Exception): 243 | await db.update({"key": "test"}, "existing4") 244 | 245 | # upper hierarchy does not exist 246 | with pytest.raises(Exception): 247 | await db.update({"profile.age": 32}, "existing4") 248 | 249 | # no attributes specified 250 | with pytest.raises(Exception): 251 | await db.update({}, "existing4") 252 | 253 | # appending to a key 254 | with pytest.raises(Exception): 255 | await db.update( 256 | {"key": db.util.append("test")}, 257 | "%@#//#!#)#$_", 258 | ) 259 | 260 | 261 | def get_expire_at(expire_at): 262 | return int(expire_at.replace(microsecond=0).timestamp()) 263 | 264 | 265 | def get_expire_in(expire_in): 266 | expire_at = datetime.datetime.now() + datetime.timedelta(seconds=expire_in) 267 | return get_expire_at(expire_at) 268 | 269 | 270 | async def test_ttl(db, items): 271 | item1 = items[0] 272 | expire_in = 300 273 | expire_at = datetime.datetime.now() + datetime.timedelta(seconds=300) 274 | delta = 2 # allow time delta of 2 seconds 275 | test_cases = [ 276 | { 277 | "item": item1, 278 | "expire_in": expire_in, 279 | "expected_ttl_value": get_expire_in(expire_in), 280 | "delta": delta, 281 | }, 282 | { 283 | "item": item1, 284 | "expire_at": expire_at, 285 | "expected_ttl_value": get_expire_at(expire_at), 286 | "delta": delta, 287 | }, 288 | { 289 | "item": item1, 290 | "expire_in": expire_in, 291 | "expire_at": expire_at, 292 | "delta": delta, 293 | "error": ValueError, 294 | }, 295 | { 296 | "item": item1, 297 | "expire_in": "randomtest", 298 | "delta": delta, 299 | "error": TypeError, 300 | }, 301 | { 302 | "item": item1, 303 | "expire_at": "not a datetime, int or float", 304 | "error": TypeError, 305 | "delta": delta, 306 | }, 307 | ] 308 | 309 | for case in test_cases: 310 | item = case.get("item") 311 | cexp_in = case.get("expire_in") 312 | cexp_at = case.get("expire_at") 313 | expected = case.get("expected_ttl_value") 314 | error = case.get("error") 315 | cdelta = case.get("delta") 316 | 317 | assert item 318 | 319 | if not error: 320 | # put 321 | await db.put(item, expire_in=cexp_in, expire_at=cexp_at) 322 | got = await db.get(item.get("key")) 323 | assert abs(expected - got.get(BASE_TEST_TTL_ATTRIBUTE)) <= cdelta 324 | 325 | # insert 326 | # need to udpate key as insert does not allow pre existing key 327 | item["key"] = "".join(random.choices(string.ascii_lowercase, k=6)) 328 | await db.insert(item, expire_in=cexp_in, expire_at=cexp_at) 329 | got = await db.get(item.get("key")) 330 | assert abs(expected - got.get(BASE_TEST_TTL_ATTRIBUTE)) <= cdelta 331 | 332 | # put many 333 | await db.put_many([item], expire_in=cexp_in, expire_at=cexp_at) 334 | got = await db.get(item.get("key")) 335 | assert abs(expected - got.get(BASE_TEST_TTL_ATTRIBUTE)) <= cdelta 336 | 337 | # update 338 | # only if one of expire_in or expire_at 339 | if cexp_in or cexp_at: 340 | await db.update( 341 | None, item.get("key"), expire_in=cexp_in, expire_at=cexp_at 342 | ) 343 | got = await db.get(item.get("key")) 344 | assert abs( 345 | expected - got.get(BASE_TEST_TTL_ATTRIBUTE)) <= cdelta 346 | else: 347 | with pytest.raises(error): 348 | await db.put(item, expire_in=cexp_in, expire_at=cexp_at) 349 | with pytest.raises(error): 350 | await db.put_many([item], expire_in=cexp_in, expire_at=cexp_at) 351 | with pytest.raises(error): 352 | await db.insert(item, expire_in=cexp_in, expire_at=cexp_at) 353 | with pytest.raises(error): 354 | await db.update( 355 | None, item.get("key"), expire_in=cexp_in, expire_at=cexp_at 356 | ) 357 | -------------------------------------------------------------------------------- /tests/test_sync.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import io 3 | import os 4 | import random 5 | import string 6 | import unittest 7 | from pathlib import Path 8 | 9 | from deta import Deta 10 | from deta.drive import UPLOAD_CHUNK_SIZE 11 | from deta.base import FetchResponse 12 | 13 | try: 14 | from dotenv import load_dotenv 15 | 16 | load_dotenv() 17 | except: 18 | pass 19 | 20 | 21 | """ 22 | class TestSendEmail(unittest.TestCase): 23 | def setUp(self): 24 | self.deta = Deta() 25 | 26 | def test_function(self): 27 | self.assertIsNone( 28 | send_email("mustafa@deta.sh", "Hello from test", "this is a test!") 29 | ) 30 | 31 | def test_method(self): 32 | self.assertIsNone( 33 | self.deta.send_email( 34 | "mustafa@deta.sh", "Hello from test", "this is a test!" 35 | ) 36 | )""" 37 | 38 | 39 | class TestDriveMethods(unittest.TestCase): 40 | def setUp(self) -> None: 41 | key = os.getenv("DETA_SDK_TEST_PROJECT_KEY") 42 | name = os.getenv("DETA_SDK_TEST_DRIVE_NAME") 43 | host = os.getenv("DETA_SDK_TEST_DRIVE_HOST") 44 | self.assertIsNotNone(key) 45 | self.assertIsNotNone(name) 46 | deta = Deta(key) 47 | self.drive = deta.Drive(name, host=host) 48 | return super().setUp() 49 | 50 | def tearDown(self) -> None: 51 | all_items = self.drive.list() 52 | for item in all_items["names"]: 53 | self.drive.delete(item) 54 | 55 | def test_put_string(self): 56 | test_cases = [ 57 | {"name": "test_file_1.txt", "content": "this is a string."}, 58 | {"name": "name with spaces.txt", "content": "lorem ipsum"}, 59 | { 60 | "name": "test_file_1.txt", 61 | "content": "same file name should be overwritten", 62 | }, 63 | ] 64 | for tc in test_cases: 65 | name = self.drive.put(tc["name"], tc["content"]) 66 | self.assertEqual(name, tc["name"]) 67 | self.assertEqual(self.drive.get( 68 | tc["name"]).read().decode(), tc["content"]) 69 | 70 | def test_put_bytes(self): 71 | test_cases = [ 72 | {"name": "byte_file.txt", "content": b"bytes content"}, 73 | {"name": "another bytes file.txt", "content": b"another bytes content"}, 74 | ] 75 | for tc in test_cases: 76 | name = self.drive.put(tc["name"], tc["content"]) 77 | self.assertEqual(name, tc["name"]) 78 | self.assertEqual(self.drive.get(tc["name"]).read(), tc["content"]) 79 | 80 | def test_put_stream(self): 81 | test_cases = [ 82 | { 83 | "name": "string_stream.txt", 84 | "raw": b"string stream", 85 | "content": io.StringIO("string stream"), 86 | }, 87 | { 88 | "name": "binary_stream.txt", 89 | "raw": b"binary stream", 90 | "content": io.BytesIO(b"binary stream"), 91 | }, 92 | ] 93 | for tc in test_cases: 94 | name = self.drive.put(tc["name"], tc["content"]) 95 | self.assertEqual(name, tc["name"]) 96 | self.assertEqual(self.drive.get(tc["name"]).read(), tc["raw"]) 97 | self.assertEqual(tc["content"].closed, True) 98 | 99 | def test_large_file(self): 100 | name = "large_binary_file" 101 | large_binary_file = os.urandom(UPLOAD_CHUNK_SIZE * 2 + 1000) 102 | self.assertEqual(self.drive.put( 103 | "large_binary_file", large_binary_file), name) 104 | 105 | body = self.drive.get(name) 106 | binary_stream = io.BytesIO(large_binary_file) 107 | for chunk in body.iter_chunks(UPLOAD_CHUNK_SIZE): 108 | self.assertEqual(chunk, binary_stream.read(UPLOAD_CHUNK_SIZE)) 109 | 110 | def test_delete(self): 111 | test_cases = [ 112 | {"name": "to_del_1.txt", "content": "hello"}, 113 | {"name": "to del name with spaces.txt", "content": "hola"}, 114 | ] 115 | for tc in test_cases: 116 | self.drive.put(tc["name"], tc["content"]) 117 | self.assertEqual(self.drive.delete(tc["name"]), tc["name"]) 118 | self.assertIsNone(self.drive.get(tc["name"])) 119 | 120 | def test_delete_many(self): 121 | test_cases = [ 122 | {"name": "to_del_1.txt", "content": "hello"}, 123 | {"name": "to del name with spaces.txt", "content": "hola"}, 124 | ] 125 | for tc in test_cases: 126 | self.drive.put(tc["name"], tc["content"]) 127 | 128 | names = [tc["name"] for tc in test_cases] 129 | deleted = self.drive.delete_many(names)["deleted"] 130 | self.assertIn(names[0], deleted) 131 | self.assertIn(names[1], deleted) 132 | 133 | for n in names: 134 | self.assertIsNone(self.drive.get(n)) 135 | 136 | def test_list(self): 137 | test_cases = [ 138 | {"name": "a", "content": "a"}, 139 | {"name": "b", "content": "b"}, 140 | {"name": "c/d", "content": "c and d"}, 141 | ] 142 | for tc in test_cases: 143 | self.drive.put(tc["name"], tc["content"]) 144 | 145 | self.assertEqual(self.drive.list()["names"], ["a", "b", "c/d"]) 146 | self.assertEqual(self.drive.list(limit=1)["names"], ["a"]) 147 | self.assertEqual(self.drive.list(limit=2)["paging"]["last"], "b") 148 | self.assertEqual(self.drive.list(prefix="c/")["names"], ["c/d"]) 149 | 150 | def test_read_close(self): 151 | test_cases = [ 152 | { 153 | "name": "string_stream.txt", 154 | "raw": b"string stream", 155 | "content": io.StringIO("string stream"), 156 | }, 157 | ] 158 | for tc in test_cases: 159 | self.drive.put(tc["name"], tc["content"]) 160 | body = self.drive.get(tc["name"]) 161 | body.close() 162 | self.assertEqual(body.closed, True) 163 | 164 | def test_read_lines(self): 165 | test_cases = [ 166 | { 167 | "name": "read_lines_test.txt", 168 | "content": "first line\nSecond line\nLast Line\n", 169 | }, 170 | { 171 | "name": "read_lines_test_2.txt", 172 | "content": "has no new lines, just a normal string", 173 | }, 174 | { 175 | "name": "read_lines_test_3.txt", 176 | "content": "different new line\ranother line\r", 177 | }, 178 | ] 179 | for tc in test_cases: 180 | test_stream = io.StringIO(tc["content"]) 181 | self.drive.put(tc["name"], tc["content"]) 182 | body = self.drive.get(tc["name"]) 183 | for line in body.iter_lines(): 184 | self.assertEqual(test_stream.readline(), line.decode()) 185 | 186 | 187 | class TestBaseMethods(unittest.TestCase): 188 | def setUp(self): 189 | key = os.getenv("DETA_SDK_TEST_PROJECT_KEY") 190 | name = os.getenv("DETA_SDK_TEST_BASE_NAME") 191 | self.assertIsNotNone(key) 192 | self.assertIsNotNone(name) 193 | deta = Deta(key) 194 | self.db = deta.Base(str(name)) 195 | self.ttl_attribute = os.getenv( 196 | "DETA_SDK_TEST_TTL_ATTRIBUTE") or "__expires" 197 | self.item1 = {"key": "existing1", "value": "test"} 198 | self.item2 = {"key": "existing2", "value": 7} 199 | self.item3 = {"key": "existing3", "value": 44} 200 | self.item4 = {"key": "existing4", "value": {"name": "patrick"}} 201 | self.item5 = {"key": "%@#//#!#)#$_", "value": 0, "list": ["a"]} 202 | self.db.put_many( 203 | [self.item1, self.item2, self.item3, self.item4, self.item5]) 204 | 205 | def tearDown(self): 206 | items = self.db.fetch().items 207 | for i in items: 208 | self.db.delete(i["key"]) 209 | self.db.client.close() 210 | 211 | def test_put(self): 212 | item = {"msg": "hello"} 213 | resp = {"key": "one", "msg": "hello"} 214 | example_path = Path(__file__).parent / ".." 215 | self.assertEqual(self.db.put(item, "one"), resp) 216 | self.assertEqual(self.db.put(item, "one"), resp) 217 | self.assertEqual({"msg": "hello"}, item) 218 | self.assertEqual( 219 | self.db.put({"example_path": example_path}, "example_key"), 220 | {"example_path": example_path.resolve().as_posix(), "key": "example_key"} 221 | ) 222 | self.assertEqual(set(self.db.put("Hello").keys()), 223 | set(["key", "value"])) 224 | self.assertEqual(set(self.db.put(1).keys()), set(["key", "value"])) 225 | self.assertEqual(set(self.db.put(True).keys()), set(["key", "value"])) 226 | self.assertEqual(set(self.db.put(False).keys()), set(["key", "value"])) 227 | self.assertEqual(set(self.db.put(3.14159265359).keys()), 228 | set(["key", "value"])) 229 | self.assertEqual(set(self.db.put(example_path).keys()), 230 | set(["key", "value"])) 231 | 232 | @unittest.expectedFailure 233 | def test_put_fail(self): 234 | self.db.put({"msg": "hello"}, 1) 235 | self.db.put({"msg": "hello", "key": True}) 236 | 237 | def test_put_many(self): 238 | self.assertEqual(len(self.db.put_many( 239 | [1, 2, 3])["processed"]["items"]), 3) 240 | ok = self.db.put_many([{"msg": "hello"}, {"msg2": "hi"}])[ 241 | "processed"]["items"] 242 | self.assertEqual(len(ok), 2) 243 | 244 | @unittest.expectedFailure 245 | def test_put_many_fail(self): 246 | self.db.put_many( 247 | [{"name": "joe", "key": "ok"}, {"name": "mo", "key": 7}]) 248 | 249 | @unittest.expectedFailure 250 | def test_put_many_fail_limit(self): 251 | self.db.put_many([i for i in range(26)]) 252 | 253 | def test_insert(self): 254 | item = {"msg": "hello"} 255 | self.assertEqual(set(self.db.insert(item).keys()), set(["key", "msg"])) 256 | self.assertEqual({"msg": "hello"}, item) 257 | 258 | @unittest.expectedFailure 259 | def test_insert_fail(self): 260 | self.db.insert(self.item1) 261 | 262 | def test_get(self): 263 | self.assertEqual(self.db.get(self.item1["key"]), self.item1) 264 | self.assertIsNone(self.db.get("key_does_not_exist")) 265 | 266 | def test_delete(self): 267 | self.assertIsNone(self.db.delete(self.item1["key"])) 268 | self.assertIsNone(self.db.delete("key_does_not_exist")) 269 | 270 | def test_fetch(self): 271 | res1 = self.db.fetch({"value?gte": 7}) 272 | expectedItem = FetchResponse( 273 | 2, 274 | None, 275 | [ 276 | {"key": "existing2", "value": 7}, 277 | {"key": "existing3", "value": 44}, 278 | ], 279 | ) 280 | self.assertEqual(res1, expectedItem) 281 | 282 | res2 = self.db.fetch({"value?gte": 7}, limit=1) 283 | expectedItem = FetchResponse( 284 | 1, 285 | "existing2", 286 | [ 287 | {"key": "existing2", "value": 7}, 288 | ], 289 | ) 290 | self.assertEqual(res2, expectedItem) 291 | 292 | res3 = self.db.fetch([{"value?gt": 6}, {"value?lt": 50}], limit=2) 293 | expectedItem = FetchResponse( 294 | 2, 295 | "existing2", 296 | [ 297 | {"key": "%@#//#!#)#$_", "list": ["a"], "value": 0}, 298 | {"key": "existing2", "value": 7}, 299 | ], 300 | ) 301 | self.assertEqual(res3, expectedItem) 302 | 303 | res4 = self.db.fetch( 304 | [{"value?gt": 6}, {"value?lt": 50}], limit=2, last="existing2" 305 | ) 306 | expectedItem = FetchResponse( 307 | 1, 308 | None, 309 | [{"key": "existing3", "value": 44}], 310 | ) 311 | self.assertEqual(res4, expectedItem) 312 | 313 | res5 = self.db.fetch({"value": "test"}) 314 | expectedItem = FetchResponse( 315 | 1, 316 | None, 317 | [{"key": "existing1", "value": "test"}], 318 | ) 319 | self.assertEqual(res5, expectedItem) 320 | 321 | res6 = self.db.fetch({"valuexyz": "test_none_existing_value"}) 322 | expectedItem = FetchResponse( 323 | 0, 324 | None, 325 | [], 326 | ) 327 | self.assertEqual(res6, expectedItem) 328 | 329 | res7 = self.db.fetch({"value.name": self.item4["value"]["name"]}) 330 | expectedItem = FetchResponse( 331 | 1, 332 | None, 333 | [{"key": "existing4", "value": {"name": "patrick"}}], 334 | ) 335 | self.assertEqual(res7, expectedItem) 336 | 337 | res8 = self.db.fetch({"value?gte": 7}, desc=True) 338 | expectedItem = FetchResponse( 339 | 2, 340 | None, 341 | [ 342 | {"key": "existing3", "value": 44}, 343 | {"key": "existing2", "value": 7}, 344 | ], 345 | ) 346 | self.assertEqual(res8, expectedItem) 347 | 348 | def test_update(self): 349 | self.assertIsNone(self.db.update( 350 | {"value.name": "spongebob"}, "existing4")) 351 | expectedItem = {"key": "existing4", "value": {"name": "spongebob"}} 352 | self.assertEqual(self.db.get("existing4"), expectedItem) 353 | 354 | self.assertIsNone( 355 | self.db.update( 356 | {"value.name": self.db.util.trim(), "value.age": 32}, "existing4" 357 | ) 358 | ) 359 | expectedItem = {"key": "existing4", "value": {"age": 32}} 360 | self.assertEqual(self.db.get("existing4"), expectedItem) 361 | 362 | self.assertIsNone( 363 | self.db.update( 364 | { 365 | "list": self.db.util.append(["b", "c"]), 366 | "value": self.db.util.increment(), 367 | }, 368 | "%@#//#!#)#$_", 369 | ) 370 | ) 371 | 372 | self.assertIsNone( 373 | self.db.update( 374 | {"list": self.db.util.prepend( 375 | "x"), "value": self.db.util.increment(2)}, 376 | "%@#//#!#)#$_", 377 | ) 378 | ) 379 | expectedItem = {"key": "%@#//#!#)#$_", 380 | "list": ["x", "a", "b", "c"], "value": 3} 381 | self.assertEqual(self.db.get("%@#//#!#)#$_"), expectedItem) 382 | 383 | # key does not exist 384 | self.assertRaises(Exception, self.db.update, { 385 | "value": "test"}, "doesNotExist") 386 | # deleting a key 387 | self.assertRaises( 388 | Exception, 389 | self.db.update, 390 | {"value": "test", "key": self.db.util.trim()}, 391 | "existing4", 392 | ) 393 | # updating a key 394 | self.assertRaises(Exception, self.db.update, { 395 | "key": "test"}, "existing4") 396 | # upper hierarchy does not exist 397 | self.assertRaises(Exception, self.db.update, { 398 | "profile.age": 32}, "existing4") 399 | # no attributes specified 400 | self.assertRaises(Exception, self.db.update, {}, "existing4") 401 | 402 | # appending to a key 403 | self.assertRaises( 404 | Exception, 405 | self.db.update, 406 | {"key": self.db.util.append("test")}, 407 | "%@#//#!#)#$_", 408 | ) 409 | 410 | def get_expire_at(self, expire_at): 411 | return int(expire_at.replace(microsecond=0).timestamp()) 412 | 413 | def get_expire_in(self, expire_in): 414 | expire_at = datetime.datetime.now() + datetime.timedelta(seconds=expire_in) 415 | return self.get_expire_at(expire_at) 416 | 417 | def test_ttl(self): 418 | expire_in = 300 419 | expire_at = datetime.datetime.now() + datetime.timedelta(seconds=300) 420 | delta = 2 # allow time delta of 2 seconds 421 | test_cases = [ 422 | { 423 | "item": self.item1, 424 | "expire_in": expire_in, 425 | "expected_ttl_value": self.get_expire_in(expire_in), 426 | "delta": delta, 427 | }, 428 | { 429 | "item": self.item1, 430 | "expire_at": expire_at, 431 | "expected_ttl_value": self.get_expire_at(expire_at), 432 | "delta": delta, 433 | }, 434 | { 435 | "item": self.item2, 436 | "expected_ttl_value": None, 437 | "delta": delta, 438 | }, 439 | { 440 | "item": self.item1, 441 | "expire_in": expire_in, 442 | "expire_at": expire_at, 443 | "delta": delta, 444 | "expected_ttl_value": None, 445 | "error": ValueError, 446 | }, 447 | { 448 | "item": self.item1, 449 | "expire_in": "randomtest", 450 | "expected_ttl_value": None, 451 | "delta": delta, 452 | "error": TypeError, 453 | }, 454 | { 455 | "item": self.item1, 456 | "expire_at": "not a datetime, int or float", 457 | "expected_ttl_value": None, 458 | "error": TypeError, 459 | "delta": delta, 460 | }, 461 | ] 462 | 463 | for case in test_cases: 464 | item = case.get("item") 465 | cexp_in = case.get("expire_in") 466 | cexp_at = case.get("expire_at") 467 | expected = case.get("expected_ttl_value") 468 | error = case.get("error") 469 | cdelta = case.get("delta") 470 | 471 | assert item 472 | if not error: 473 | # put 474 | self.db.put(item, expire_in=cexp_in, expire_at=cexp_at) 475 | got = self.db.get(item.get("key")) 476 | self.assertAlmostEqual( 477 | expected, got.get(self.ttl_attribute), delta=cdelta 478 | ) 479 | 480 | # insert 481 | # need to udpate key as insert does not allow pre existing key 482 | item["key"] = "".join(random.choices( 483 | string.ascii_lowercase, k=6)) 484 | self.db.insert(item, expire_in=cexp_in, expire_at=cexp_at) 485 | got = self.db.get(item.get("key")) 486 | self.assertAlmostEqual( 487 | expected, got.get(self.ttl_attribute), delta=cdelta 488 | ) 489 | 490 | # put many 491 | self.db.put_many([item], expire_in=cexp_in, expire_at=cexp_at) 492 | got = self.db.get(item.get("key")) 493 | self.assertAlmostEqual( 494 | expected, got.get(self.ttl_attribute), delta=cdelta 495 | ) 496 | 497 | # update 498 | # only if one of expire_in or expire_at 499 | if cexp_in or cexp_at: 500 | self.db.update( 501 | None, item.get("key"), expire_in=cexp_in, expire_at=cexp_at 502 | ) 503 | got = self.db.get(item.get("key")) 504 | self.assertAlmostEqual( 505 | expected, got.get(self.ttl_attribute), delta=cdelta 506 | ) 507 | else: 508 | self.assertRaises( 509 | error, self.db.put, item, expire_in=cexp_in, expire_at=cexp_at 510 | ) 511 | self.assertRaises( 512 | error, self.db.insert, item, expire_in=cexp_in, expire_at=cexp_at 513 | ) 514 | self.assertRaises( 515 | error, 516 | self.db.put_many, 517 | [item], 518 | expire_in=cexp_in, 519 | expire_at=cexp_at, 520 | ) 521 | self.assertRaises( 522 | error, 523 | self.db.update, 524 | None, 525 | item.get("key"), 526 | expire_in=cexp_in, 527 | expire_at=cexp_at, 528 | ) 529 | 530 | 531 | if __name__ == "__main__": 532 | unittest.main() 533 | --------------------------------------------------------------------------------