├── tests ├── __init__.py ├── base.py ├── test_save_and_load.py ├── test_composite_key.py ├── test_get_object_or_none.py ├── test_auto_int.py ├── test_filter_by.py └── test_all.py ├── requirements.txt ├── subconscious ├── __init__.py ├── query.py ├── column.py └── model.py ├── tox.ini ├── .gitignore ├── .travis.yml ├── setup.py ├── LICENSE └── README.md /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aioredis 2 | coverage 3 | flake8 4 | -------------------------------------------------------------------------------- /subconscious/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E226,E302,E41 3 | max-line-length = 121 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.ve* 2 | .idea 3 | *.pyc 4 | .coverage 5 | MANIFEST 6 | subconscious/__pycache__/ 7 | tests/__pycache__/ 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "3.6" 4 | services: 5 | - redis-server 6 | # command to install dependencies 7 | install: "pip install -r requirements.txt" 8 | # commands to run flake8 and tests 9 | script: 10 | - flake8 subconscious/ tests/ 11 | - nosetests --with-coverage --cover-package=. -v -x --nocapture 12 | notifications: 13 | slack: itbit:ItuklhM0fg3jeFHLvIblLDcs 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from setuptools import setup, find_packages 4 | 5 | 6 | setup( 7 | name='subconscious', 8 | version='0.08.5', 9 | packages=find_packages(), 10 | url='https://github.com/paxos-bankchain/subconscious', 11 | license='MIT', 12 | author='Paxos Trust Company, LLC', 13 | author_email='pypi@paxos.com', 14 | description='redis-backed db for python3 (asyncio compatible)', 15 | install_requires=['aioredis'], 16 | classifiers=[ 17 | 'License :: OSI Approved :: MIT License', 18 | # async_generator requires python3.6+ 19 | 'Programming Language :: Python :: 3.6', 20 | ], 21 | ) 22 | -------------------------------------------------------------------------------- /tests/base.py: -------------------------------------------------------------------------------- 1 | import aioredis 2 | import asyncio 3 | from unittest import TestCase 4 | 5 | 6 | class BaseTestCase(TestCase): 7 | def setUp(self): 8 | self.loop = asyncio.new_event_loop() 9 | asyncio.set_event_loop(None) 10 | db_co = aioredis.create_redis( 11 | address=('localhost', 6379), 12 | db=13, 13 | loop=self.loop, 14 | encoding='utf-8', 15 | ) 16 | self.db = self.loop.run_until_complete(db_co) 17 | 18 | def tearDown(self): 19 | async def delete_all(): 20 | async for k in self.db.iscan(match='*Test*', count=100): 21 | await self.db.delete(k) 22 | self.loop.run_until_complete(delete_all()) 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Paxos Trust Company, LLC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /subconscious/query.py: -------------------------------------------------------------------------------- 1 | class Query(object): 2 | def __init__(self, model, db): 3 | self._model = model 4 | self._filter = {} 5 | self._order_by = None 6 | self._limit = None 7 | self._offset = None 8 | self._db = db 9 | 10 | def filter(self, **kwargs): 11 | self._filter.update(kwargs) 12 | return self 13 | 14 | def order_by(self, order_by): 15 | self._order_by = order_by 16 | return self 17 | 18 | def limit(self, limit): 19 | self._limit = limit 20 | return self 21 | 22 | def offset(self, offset): 23 | self._offset = offset 24 | return self 25 | 26 | def __aiter__(self): 27 | self.result_set = self._model.filter_by( 28 | db=self._db, 29 | order_by=self._order_by, 30 | limit=self._limit, 31 | offset=self._offset, 32 | **self._filter,) 33 | 34 | return self 35 | 36 | async def __anext__(self): 37 | async for x in self.result_set: 38 | return x 39 | raise StopAsyncIteration 40 | 41 | async def first(self): 42 | return await self._model.get_object_or_none(db=self._db, order_by=self._order_by, **self._filter) 43 | -------------------------------------------------------------------------------- /tests/test_save_and_load.py: -------------------------------------------------------------------------------- 1 | from subconscious.model import RedisModel, Column, InvalidModelDefinition, UnexpectedColumnError 2 | from uuid import uuid1 3 | from .base import BaseTestCase 4 | import enum 5 | 6 | 7 | class StatusEnum(enum.Enum): 8 | ACTIVE = 'active' 9 | 10 | 11 | class TestUser(RedisModel): 12 | id = Column(primary_key=True) 13 | name = Column(index=True) 14 | age = Column(index=True, type=int) 15 | locale = Column(index=True, type=int, required=False) 16 | status = Column(type=str, enum=StatusEnum, index=True) 17 | 18 | 19 | class TestSaveAndLoad(BaseTestCase): 20 | 21 | def test_save_and_load(self): 22 | user_id = str(uuid1()) 23 | user = TestUser(id=user_id, name='Test name', age=100, status='active') 24 | ret = self.loop.run_until_complete(user.save(self.db)) 25 | self.assertTrue(ret) 26 | 27 | # load 28 | user_in_db = self.loop.run_until_complete(TestUser.load(self.db, identifier=user_id)) 29 | self.assertEqual(user_in_db.name, user.name) 30 | 31 | def test_init_model_with_no_indexed_cols_should_error(self): 32 | with self.assertRaises(InvalidModelDefinition): 33 | class BadModel(RedisModel): 34 | unindex_col = Column() 35 | 36 | 37 | class BadSave(BaseTestCase): 38 | 39 | def test_unexpected_column_should_fail(self): 40 | 41 | class TestModel(RedisModel): 42 | id = Column(type=int, primary_key=True) 43 | 44 | with self.assertRaises(UnexpectedColumnError): 45 | TestModel(id=1, this_column_does_not_exist='foo') 46 | -------------------------------------------------------------------------------- /tests/test_composite_key.py: -------------------------------------------------------------------------------- 1 | from subconscious.model import RedisModel, Column 2 | from .base import BaseTestCase 3 | import enum 4 | 5 | 6 | class StatusEnum(enum.Enum): 7 | ACTIVE = 'active' 8 | 9 | 10 | class Diner(RedisModel): 11 | table_num = Column(composite_key=True, type=int) 12 | seat_num = Column(composite_key=True, type=int) 13 | comments = Column(type=str) 14 | 15 | 16 | class TestCompositeKeys(BaseTestCase): 17 | 18 | def setUp(self): 19 | super(TestCompositeKeys, self).setUp() 20 | 21 | diner = Diner( 22 | table_num=1, 23 | seat_num=4, 24 | comments='Very polite', 25 | ) 26 | ret = self.loop.run_until_complete(diner.save(self.db)) 27 | self.assertTrue(ret) 28 | 29 | def test_valid_composite_key_should_return(self): 30 | async def _test(): 31 | count = 0 32 | async for x in Diner.filter_by(db=self.db, table_num=1, seat_num=4,): 33 | self.assertEqual(x.comments, 'Very polite') 34 | count += 1 35 | self.assertEqual(count, 1) 36 | 37 | self.loop.run_until_complete(_test()) 38 | 39 | def test_partial_composite_key_should_succeed(self): 40 | # FIXME: is this really the desired behavior? 41 | async def _test(): 42 | count = 0 43 | async for x in Diner.filter_by(db=self.db, table_num=1,): 44 | self.assertEqual(x.comments, 'Very polite') 45 | count += 1 46 | self.assertEqual(count, 1) 47 | self.loop.run_until_complete(_test()) 48 | -------------------------------------------------------------------------------- /tests/test_get_object_or_none.py: -------------------------------------------------------------------------------- 1 | from subconscious.model import RedisModel, Column 2 | from .base import BaseTestCase 3 | import enum 4 | 5 | 6 | class StatusEnum(enum.Enum): 7 | ACTIVE = 'active' 8 | 9 | 10 | class Diner(RedisModel): 11 | table_num = Column(composite_key=True, type=int) 12 | seat_num = Column(composite_key=True, type=int) 13 | comments = Column(type=str) 14 | 15 | 16 | class TestGetObectOrNone(BaseTestCase): 17 | def setUp(self): 18 | super(TestGetObectOrNone, self).setUp() 19 | 20 | diner = Diner( 21 | table_num=1, 22 | seat_num=4, 23 | comments='Very polite', 24 | ) 25 | ret = self.loop.run_until_complete(diner.save(self.db)) 26 | self.assertTrue(ret) 27 | diner = Diner( 28 | table_num=2, 29 | seat_num=5, 30 | comments='Very rude', 31 | ) 32 | ret = self.loop.run_until_complete(diner.save(self.db)) 33 | self.assertTrue(ret) 34 | 35 | def test_get_object_or_none(self): 36 | diner = self.loop.run_until_complete(Diner.get_object_or_none(self.db, table_num=1)) 37 | self.assertIsNotNone(diner) 38 | self.assertEqual(Diner, type(diner)) 39 | 40 | # Not existing in the db 41 | diner = self.loop.run_until_complete(Diner.get_object_or_none(self.db, table_num=999)) 42 | self.assertIsNone(diner) 43 | 44 | diner = self.loop.run_until_complete(Diner.get_object_or_none(self.db, table_num=[1, 2])) 45 | self.assertIsNotNone(diner) 46 | self.assertEqual(Diner, type(diner)) 47 | -------------------------------------------------------------------------------- /tests/test_auto_int.py: -------------------------------------------------------------------------------- 1 | from .base import BaseTestCase 2 | from subconscious.column import Integer, Column 3 | from subconscious.model import RedisModel, BadDataError 4 | 5 | 6 | class TestUser(RedisModel): 7 | id = Integer(primary_key=True, auto_increment=True) 8 | name = Column(type=str) 9 | auto_id = Integer(auto_increment=True) 10 | 11 | 12 | class TestAutoInt(BaseTestCase): 13 | 14 | def test_auto(self): 15 | user = TestUser(name='foo') 16 | self.loop.run_until_complete(user.save(self.db)) 17 | self.assertEqual(user.id, 1) 18 | user = TestUser() 19 | self.loop.run_until_complete(user.save(self.db)) 20 | self.assertEqual(user.id, 2) 21 | 22 | def test_setting_auto_increment_should_throw_bad_data_error(self): 23 | # via constructor 24 | with self.assertRaises(BadDataError): 25 | TestUser(id=777) 26 | 27 | user = TestUser() 28 | # via attribute mutation 29 | with self.assertRaises(BadDataError): 30 | user.id = 424 31 | 32 | def test_load(self): 33 | user = TestUser(name='foo') 34 | self.loop.run_until_complete(user.save(self.db)) 35 | user_in_db = self.loop.run_until_complete(TestUser.load(db=self.db, identifier=1)) 36 | self.assertEqual(user.name, user_in_db.name) 37 | 38 | def test_update(self): 39 | user = TestUser(name='foo') 40 | self.loop.run_until_complete(user.save(self.db)) 41 | user_in_db = self.loop.run_until_complete(TestUser.load(db=self.db, identifier=1)) 42 | self.assertEqual(user.name, user_in_db.name) 43 | user_in_db.name = 'bar' 44 | self.loop.run_until_complete(user_in_db.save(self.db)) 45 | user_in_db = self.loop.run_until_complete(TestUser.load(db=self.db, identifier=1)) 46 | self.assertEqual('bar', user_in_db.name) 47 | -------------------------------------------------------------------------------- /subconscious/column.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from datetime import datetime 4 | from enum import EnumMeta 5 | 6 | 7 | class InvalidColumnDefinition(Exception): 8 | pass 9 | 10 | 11 | class Column(object): 12 | """Defined fields (columns) for a given RedisModel. 13 | """ 14 | 15 | def __init__(self, type=str, primary_key=None, composite_key=None, index=None, 16 | required=None, enum=None, sort=None): 17 | """primary_key can exist in only a single column. 18 | composite_key can exist in multiple columns. 19 | You can't have both a primary_key and composite_key in the same model. 20 | index is whether you want this column indexed or not for faster retrieval. 21 | """ 22 | if type not in (str, int, datetime): 23 | # TODO: support for other field types (uuid, etc) 24 | err_msg = 'Bad Field Type: {}'.format(type) 25 | raise InvalidColumnDefinition(err_msg) 26 | 27 | if primary_key and composite_key: 28 | err_msg = 'Column can be either primary_key or composite_key, but not both' 29 | raise InvalidColumnDefinition(err_msg) 30 | 31 | self.field_type = type 32 | self.primary = primary_key is True 33 | self.composite = composite_key is True 34 | self.sorted = sort is True 35 | self.indexed = (index is True) or self.composite 36 | self.required = required is True or self.primary or self.composite 37 | 38 | self.enum = enum 39 | if enum: 40 | if not isinstance(enum, EnumMeta): 41 | err_msg = '`{}` is not an instance of {}'.format(enum, EnumMeta) 42 | raise InvalidColumnDefinition(err_msg) 43 | self.enum_choices = set([x.value for x in enum]) 44 | else: 45 | self.enum_choices = set() 46 | 47 | def __repr__(self): 48 | return "<{}: {}>".format(self.__class__.__name__, self.name) 49 | 50 | 51 | class Integer(Column): 52 | def __init__( 53 | self, 54 | primary_key=None, 55 | composite_key=None, 56 | index=None, 57 | required=None, 58 | enum=None, 59 | sort=None, 60 | auto_increment=False,): 61 | super(Integer, self).__init__( 62 | int, 63 | primary_key=primary_key, 64 | composite_key=composite_key, 65 | index=index, 66 | required=required, 67 | enum=enum, 68 | sort=sort, 69 | ) 70 | self.auto_increment = auto_increment 71 | 72 | async def auto_generate(self, db, model): 73 | return await db.incr('auto:{}:{}'.format(model.key_prefix(), self.name)) 74 | -------------------------------------------------------------------------------- /tests/test_filter_by.py: -------------------------------------------------------------------------------- 1 | from subconscious.model import RedisModel, Column 2 | from uuid import uuid1 3 | from .base import BaseTestCase 4 | import enum 5 | 6 | 7 | class StatusEnum(enum.Enum): 8 | ACTIVE = 'active' 9 | INACTIVE = 'inactive' 10 | 11 | 12 | class TestUser(RedisModel): 13 | id = Column(primary_key=True) 14 | name = Column(index=True) 15 | age = Column(index=True, type=int) 16 | locale = Column(index=True, type=int, required=False) 17 | status = Column(type=str, enum=StatusEnum, index=True) 18 | 19 | 20 | class TestFilterBy(BaseTestCase): 21 | def setUp(self): 22 | super(TestFilterBy, self).setUp() 23 | user = TestUser(id=str(uuid1()), age=0, locale=0+10, status='active') 24 | self.loop.run_until_complete(user.save(self.db)) 25 | for i in range(9): 26 | user = TestUser(id=str(uuid1()), name='name-{}'.format(i), age=i, locale=i+10, status='active') 27 | self.loop.run_until_complete(user.save(self.db)) 28 | 29 | def test_filter_by(self): 30 | async def _test(): 31 | count = 0 32 | async for x in TestUser.filter_by(self.db, age=1): 33 | count += 1 34 | self.assertEqual(1, count) 35 | 36 | count = 0 37 | async for x in TestUser.filter_by(self.db, age=[1, 2]): 38 | count += 1 39 | self.assertEqual(2, count) 40 | 41 | count = 0 42 | result_list = [] 43 | async for x in TestUser.filter_by(self.db, status='active'): 44 | count += 1 45 | self.assertEqual(x.status, 'active') 46 | result_list.append(x) 47 | self.assertEqual(10, count) 48 | result_list[0].status = 'inactive' 49 | await result_list[0].save(self.db) 50 | 51 | count = 0 52 | async for x in TestUser.filter_by(self.db, status='active'): 53 | count += 1 54 | self.assertEqual(x.status, 'active') 55 | # Should be one less now 56 | self.assertEqual(9, count) 57 | 58 | self.loop.run_until_complete(_test()) 59 | 60 | def test_get_by_none(self): 61 | async def _test(): 62 | result_list = [] 63 | async for x in TestUser.filter_by(self.db, name=None): 64 | result_list.append(x) 65 | self.assertEqual(1, len(result_list)) 66 | self.loop.run_until_complete(_test()) 67 | 68 | def test_query(self): 69 | async def _test(): 70 | result_list = [] 71 | async for x in TestUser.query(db=self.db).filter(status='active'): 72 | result_list.append(x) 73 | self.assertEqual(10, len(result_list)) 74 | self.loop.run_until_complete(_test()) 75 | 76 | def test_query_no_filter(self): 77 | async def _test(): 78 | result_list = [] 79 | async for x in TestUser.query(db=self.db): 80 | result_list.append(x) 81 | self.assertEqual(10, len(result_list)) 82 | self.loop.run_until_complete(_test()) 83 | 84 | def test_query_first(self): 85 | async def _test(): 86 | user = await TestUser.query(db=self.db).filter(status='active').first() 87 | self.assertEqual(TestUser, type(user)) 88 | self.assertEqual(user.status, 'active') 89 | self.loop.run_until_complete(_test()) 90 | 91 | def test_query_first_no_filter(self): 92 | async def _test(): 93 | user = await TestUser.query(db=self.db).first() 94 | self.assertEqual(TestUser, type(user)) 95 | self.assertEqual(user.status, 'active') 96 | self.loop.run_until_complete(_test()) 97 | 98 | def test_query_chaining_filters(self): 99 | async def _test(): 100 | user = await TestUser.query(db=self.db).filter(name='name-1').filter(status='active').first() 101 | self.assertEqual(TestUser, type(user)) 102 | self.assertEqual(user.status, 'active') 103 | self.assertEqual(user.name, 'name-1') 104 | self.loop.run_until_complete(_test()) 105 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # subconscious 2 | 3 | In-memory database for python3.6+ only 4 | 5 | [![Build Status](https://api.travis-ci.com/paxos-bankchain/subconscious.svg?token=PA4epyQZ24dEsEEpEEEZ&branch=develop)](https://travis-ci.com/paxos-bankchain/subconscious) 6 | 7 | ## Install 8 | 9 | From [PyPi](https://pypi.python.org/pypi/subconscious): 10 | ```bash 11 | $ pip3 install subconscious 12 | ``` 13 | 14 | ## Quickstart 15 | 16 | Let's say you have the following in your `models.py` file: 17 | ```python 18 | from enum import Enum 19 | from subconscious.model import RedisModel 20 | from subconscious.column import Column 21 | 22 | class User(RedisModel): 23 | 24 | # This can be defined inside this class (easier imports) or elsewhere 25 | class Gender(Enum): 26 | MALE = 'male' 27 | FEMALE = 'female' 28 | 29 | uuid = Column(type=str, primary_key=True) 30 | name = Column(type=str, required=True) 31 | age = Column(index=True, type=int, sort=True, required=True) 32 | gender = Column(index=True, enum=Gender) 33 | country_code = Column(type=str, index=True) 34 | ``` 35 | 36 | Then somewhere you can use that model like this: 37 | ```python 38 | from aioredis import create_redis 39 | from asyncio import get_event_loop 40 | from models import User 41 | from uuid import uuid4 42 | 43 | loop = get_event_loop() 44 | 45 | async def go(): 46 | db = await create_redis(('localhost', 6379), loop=loop, encoding='utf-8') 47 | my_uuid = str(uuid4()) 48 | my_user = User( 49 | uuid=my_uuid, 50 | name='John Doe', 51 | age=30, 52 | gender=User.Gender.MALE.value, 53 | country_code='USA', 54 | ) 55 | print('Saving user with uuid {}...'.format(my_uuid)) 56 | await my_user.save(db) 57 | retrieved_user = await User.load(db, my_uuid) 58 | print('Retrieved {}'.format(retrieved_user.as_dict())) 59 | 60 | loop.run_until_complete(go()) 61 | ``` 62 | 63 | Which results in: 64 | ``` 65 | Saving user with uuid 153d68ff-2897-4385-af0c-fea986a68d1f... 66 | Retrieved {'age': 30, 'country_code': 'USA', 'gender': 'male', 'name': 'John Doe', 'uuid': '153d68ff-2897-4385-af0c-fea986a68d1f'} 67 | ``` 68 | 69 | You can also do advanced queries like this: 70 | ```python 71 | users = await User.filter_by( 72 | db=db, 73 | age=[18, 19, 20, 21, 22], 74 | country_code='USA', 75 | gender=User.Gender.MALE, 76 | ) 77 | ``` 78 | 79 | Or use an async generator like this: 80 | ```python 81 | [user async for user in User.all( 82 | db=db, 83 | order_by='age', # you can also do '-age' for reverse sort 84 | limit=10, 85 | )] 86 | ``` 87 | 88 | ## More Examples 89 | See our demo app for a live example: https://github.com/paxos-bankchain/pastey 90 | 91 | ## Test 92 | 93 | Run redis. We recommend using [docker](https://www.docker.com/community-edition): 94 | ```bash 95 | $ docker run -p 6379:6379 redis 96 | ``` 97 | (you can use `-d` to daemonize this process) 98 | 99 | Install [nose](http://nose.readthedocs.io/en/latest/]): 100 | ```bash 101 | $ pip3 install nose 102 | ``` 103 | 104 | Confirm tests pass: 105 | ``` 106 | $ nosetests . 107 | ``` 108 | 109 | ## Contribute 110 | 111 | Check out repo: 112 | ```bash 113 | $ git checkout git+https://github.com/paxos-bankchain/subconscious.git && cd subconscious 114 | ``` 115 | 116 | Install locally 117 | ```bash 118 | pip3 install --editable 119 | ``` 120 | 121 | Make some changes and confirm that tests still pass 122 | 123 | --- 124 | 125 | ## Updating PyPi 126 | 127 | You must have the credentials in order to push updates to [PyPi](https://pypi.python.org/pypi). 128 | 129 | ### Do it Live 130 | Create a `.pypirc` file in your home directory: 131 | ``` 132 | $ cat ~/.pypirc 133 | [distutils] 134 | index-servers= 135 | pypi 136 | 137 | [pypi] 138 | repository = https://pypi.python.org/pypi 139 | username = paxos 140 | password = 141 | ``` 142 | 143 | Create a distribution: 144 | ``` 145 | $ python setup.py sdist bdist_wheel 146 | ``` 147 | 148 | Push your distribution to PyPi (may need to `pip3 install twine` first): 149 | ``` 150 | $ twine upload dist/* -r pypi 151 | ``` 152 | 153 | ### Testing 154 | 155 | To test this process, you can use [PyPi's test server](https://testpypi.python.org/). Add an entry to `.pypirc` that looks like this with whatever creds you create for testpypi: 156 | ``` 157 | [testpypi] 158 | repository = https://testpypi.python.org/pypi 159 | username = 160 | password = 161 | ``` 162 | 163 | Then use the following command to push your distrobution to test PyPi: 164 | ``` 165 | $ twine upload dist/* -r testpypi 166 | ``` 167 | -------------------------------------------------------------------------------- /tests/test_all.py: -------------------------------------------------------------------------------- 1 | from subconscious.model import RedisModel, Column, InvalidQuery 2 | from uuid import uuid1 3 | from datetime import datetime 4 | from .base import BaseTestCase 5 | import enum 6 | 7 | 8 | class StatusEnum(enum.Enum): 9 | ACTIVE = 'active' 10 | 11 | 12 | class TestUser(RedisModel): 13 | id = Column(primary_key=True) 14 | name = Column(index=True, sort=True) 15 | age = Column(index=True, type=int,) 16 | locale = Column(index=True, type=int, required=False) 17 | status = Column(type=str, enum=StatusEnum) 18 | birth_date = Column(type=datetime, required=False) 19 | 20 | 21 | class TestAll(BaseTestCase): 22 | 23 | def setUp(self): 24 | super(TestAll, self).setUp() 25 | user_id = str(uuid1()) 26 | user = TestUser(id=user_id, name='Test name', age=100, status='active') 27 | ret = self.loop.run_until_complete(user.save(self.db)) 28 | self.assertTrue(ret) 29 | 30 | user_id = str(uuid1()) 31 | bdate = datetime(1854, 1, 6, 14, 35, 19) 32 | user1 = TestUser(id=user_id, name='ZTest name', age=53, birth_date=bdate) 33 | ret = self.loop.run_until_complete(user1.save(self.db)) 34 | self.assertTrue(ret) 35 | 36 | user_id = str(uuid1()) 37 | user1 = TestUser(id=user_id, name='Test name2', age=53) 38 | ret = self.loop.run_until_complete(user1.save(self.db)) 39 | self.assertTrue(ret) 40 | 41 | def test_all(self): 42 | async def _test_all(): 43 | async for x in TestUser.all(db=self.db): 44 | self.assertEqual(type(x), TestUser) 45 | self.assertTrue(x.name in ('Test name', 'ZTest name', 'Test name2')) 46 | self.assertTrue(x.age in (100, 53)) 47 | if x.name == 'ZTest name': 48 | bdate = datetime(1854, 1, 6, 14, 35, 19) 49 | self.assertEqual(x.birth_date, bdate) 50 | self.loop.run_until_complete(_test_all()) 51 | 52 | def test_all_with_order(self): 53 | async def _test(): 54 | 55 | expected_in_order = ['Test name', 'Test name2', 'ZTest name'] 56 | result_list = [] 57 | async for x in TestUser.all(db=self.db, order_by='name'): 58 | result_list.append(x.name) 59 | self.assertEqual(result_list, expected_in_order) 60 | 61 | expected_in_order.sort(reverse=True) 62 | result_list = [] 63 | async for x in TestUser.all(db=self.db, order_by='-name'): 64 | result_list.append(x) 65 | self.assertEqual([x.name for x in result_list], expected_in_order) 66 | 67 | # update a record to force sort order change 68 | result_list[0].name = 'AATest name' 69 | await result_list[0].save(self.db) 70 | result_list = [] 71 | expected_in_order = ['AATest name', 'Test name', 'Test name2'] 72 | async for x in TestUser.all(db=self.db, order_by='name'): 73 | result_list.append(x) 74 | self.assertEqual([x.name for x in result_list], expected_in_order) 75 | 76 | self.loop.run_until_complete(_test()) 77 | 78 | def test_filter_by_non_existing_fields_should_fail(self): 79 | async def _test(): 80 | async for x in TestUser.filter_by(db=self.db, non_existing1='dummy', non_existing2=1): 81 | assert x # Just to satisfy flake8 82 | with self.assertRaises(InvalidQuery): 83 | self.loop.run_until_complete(_test()) 84 | 85 | def test_filter_by_non_indexed_field_should_fail(self): 86 | async def _test(): 87 | async for x in TestUser.filter_by(db=self.db, status='active',): 88 | assert x # Just to satisfy flake8 89 | with self.assertRaises(InvalidQuery): 90 | self.loop.run_until_complete(_test()) 91 | 92 | def test_all_iter(self): 93 | names_in_expected_order = ['Test name', 'Test name2', 'ZTest name'] 94 | result_array = [] 95 | 96 | async def _test_loop(): 97 | count = 0 98 | async for x in TestUser.all(db=self.db, order_by='name'): 99 | self.assertEqual(x.name, names_in_expected_order[count]) 100 | count += 1 101 | result_array.append(x.name) 102 | self.assertEqual(names_in_expected_order, result_array) 103 | 104 | self.loop.run_until_complete(_test_loop()) 105 | 106 | 107 | class TestAllLimitOffset(TestAll): 108 | 109 | def test_limit_only(self): 110 | async def _test(): 111 | result_array = [] 112 | async for x in TestUser.all(db=self.db, order_by='name', limit=1): 113 | result_array.append(x.name) 114 | self.assertEqual(result_array, ['Test name']) 115 | self.loop.run_until_complete(_test()) 116 | 117 | def test_limit_and_offset(self): 118 | async def _test(): 119 | result_array = [] 120 | async for x in TestUser.all(db=self.db, order_by='name', limit=1, offset=1): 121 | result_array.append(x.name) 122 | self.assertEqual(result_array, ['Test name2']) 123 | self.loop.run_until_complete(_test()) 124 | 125 | def test_offset_only(self): 126 | async def _test(): 127 | result_array = [] 128 | async for x in TestUser.all(db=self.db, order_by='name', offset=1): 129 | result_array.append(x.name) 130 | self.assertEqual(result_array, ['Test name2', 'ZTest name']) 131 | self.loop.run_until_complete(_test()) 132 | 133 | def test_over_offset(self): 134 | async def _test(): 135 | result_array = [] 136 | async for x in TestUser.all(db=self.db, order_by='name', offset=999): 137 | result_array.append(x.name) 138 | self.assertEqual(result_array, []) 139 | self.loop.run_until_complete(_test()) 140 | 141 | def test_nonbinding_limit(self): 142 | async def _test(): 143 | result_array = [] 144 | async for x in TestUser.all(db=self.db, order_by='name', limit=999): 145 | result_array.append(x.name) 146 | self.assertEqual(result_array, ['Test name', 'Test name2', 'ZTest name']) 147 | self.loop.run_until_complete(_test()) 148 | -------------------------------------------------------------------------------- /subconscious/model.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import inspect 4 | import logging 5 | import uuid 6 | from datetime import datetime 7 | 8 | from .column import Column 9 | from .query import Query 10 | 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | VALUE_ID_SEPARATOR = '\x00' 15 | MODEL_NAME_ID_SEPARATOR = ':' 16 | DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f' 17 | 18 | 19 | # Exceptions 20 | 21 | class InvalidQuery(Exception): 22 | pass 23 | 24 | 25 | class InvalidModelDefinition(Exception): 26 | pass 27 | 28 | 29 | class BadDataError(Exception): 30 | pass 31 | 32 | 33 | class UnexpectedColumnError(Exception): 34 | pass 35 | 36 | 37 | class ModelMeta(type): 38 | 39 | def __init__(cls, what, bases=None, attributes=None): 40 | super(ModelMeta, cls).__init__(what, bases, attributes) 41 | if cls.__name__ not in ('RedisModel', 'TimeStampedModel'): 42 | columns = [] 43 | num_primary, num_composite = 0, 0 44 | cls._pk_name = None 45 | # grab all Columns from the model 46 | for name, column in inspect.getmembers(cls, lambda col: isinstance(col, Column)): 47 | column.name = name 48 | columns.append(column) 49 | if column.primary: 50 | num_primary += 1 51 | cls._pk_name = column.name 52 | if column.composite: 53 | num_composite += 1 54 | 55 | # Defensive checks 56 | if num_primary == 0: 57 | if num_composite == 0: 58 | err_msg = 'No primary key or composite key in {}'.format(cls.__name__) 59 | raise InvalidModelDefinition(err_msg) 60 | if num_composite == 1: 61 | err_msg = 'Your composite key is really a primary key in {}'.format(cls.__name__) 62 | raise InvalidModelDefinition(err_msg) 63 | if num_primary == 1: 64 | if num_composite != 0: 65 | err_msg = 'Cannot have both primary and composite keys in {}'.format(cls.__name__) 66 | raise InvalidModelDefinition(err_msg) 67 | 68 | cls._columns = tuple(sorted(columns, key=lambda c: c.name)) 69 | cls._indexed_columns = tuple(sorted([col for col in cls._columns if col.indexed], key=lambda c: c.name)) 70 | cls._sortable_columns = tuple(sorted([col for col in cls._columns if col.sorted], key=lambda c: c.name)) 71 | cls._identifier_columns = tuple( 72 | sorted([col for col in cls._columns if col.primary or col.composite], 73 | key=lambda c: c.name)) 74 | cls._auto_columns = sorted( 75 | [col for col in cls._columns if getattr(col, 'auto_increment', False)], 76 | key=lambda c: c.name 77 | ) 78 | cls._queryable_colnames_set = set( 79 | [col.name for col in cls._indexed_columns + cls._identifier_columns + cls._sortable_columns] 80 | ) 81 | cls._sortable_column_names = tuple([x.name for x in cls._sortable_columns]) 82 | cls._auto_column_names = {col.name for col in cls._auto_columns} 83 | cls._indexed_column_names = {col.name for col in cls._indexed_columns} 84 | cls._columns_map = {c.name: c for c in cls._columns} 85 | cls._identifier_column_names = tuple([x.name for x in cls._identifier_columns]) 86 | 87 | 88 | class RedisModel(object, metaclass=ModelMeta): 89 | 90 | # force only keyword arguments 91 | def __init__(self, **kwargs): 92 | loading = kwargs.pop('loading', False) 93 | for column in self._columns: 94 | if column.name in kwargs: 95 | value = kwargs.pop(column.name) 96 | if type(value) != column.field_type: 97 | err_msg = "Column `{}` in {} has value {}, should be of type {}".format( 98 | column.name, 99 | self.__class__.__name__, 100 | value, 101 | column.field_type, 102 | ) 103 | raise BadDataError(err_msg) 104 | 105 | if column.enum_choices and value not in column.enum_choices: 106 | err_msg = "Column `{}` in {} has value {}, should be in set {}".format( 107 | column.name, 108 | self.__class__.__name__, 109 | value, 110 | column.enum_choices, 111 | ) 112 | raise BadDataError(err_msg) 113 | if getattr(column, 'auto_increment', False) and not loading: 114 | err_msg = "Not allowed to set auto_increment column({})".format(column.name) 115 | raise BadDataError(err_msg) 116 | 117 | self.__dict__.update({column.name: value}) 118 | else: 119 | if column.required and not getattr(column, 'auto_increment', False): 120 | err_msg = 'Missing column `{}` in `{}` is required'.format( 121 | column.name, 122 | self.__class__.__name__, 123 | ) 124 | raise BadDataError(err_msg) 125 | 126 | # Require that every kwarg supplied matches an expected column 127 | # TODO: handle TimeStampedModel cols better 128 | known_cols_set = set([column.name for column in self._columns] + ['updated_at', 'created_at']) 129 | supplied_cols_set = set([x for x in kwargs]) 130 | unknown_cols_set = supplied_cols_set - known_cols_set 131 | if unknown_cols_set != set(): 132 | err_msg = 'Unknown column(s): {} in `{}`'.format( 133 | unknown_cols_set, 134 | self.__class__.__name__, 135 | ) 136 | raise UnexpectedColumnError(err_msg) 137 | 138 | def __setattr__(self, name, value): 139 | if name in self._auto_column_names: 140 | err_msg = "Not allowed to set auto_increment column({})".format(name) 141 | raise BadDataError(err_msg) 142 | 143 | return super(RedisModel, self).__setattr__(name, value) 144 | 145 | @classmethod 146 | def key_prefix(cls): 147 | """Prefix that we use for Redis storage, used for all keys related 148 | to this object. Default to class name. 149 | """ 150 | return cls.__name__ 151 | 152 | @classmethod 153 | def make_key(cls, identifier): 154 | """Convenience method for computing the Redis object instance key 155 | from the identifier 156 | """ 157 | return "{}{}{}".format(cls.key_prefix(), MODEL_NAME_ID_SEPARATOR, identifier) 158 | 159 | def has_real_data(self, column_name): 160 | return not isinstance(getattr(self, column_name), Column) 161 | 162 | def identifier(self): 163 | identifiers = [str(getattr(self, column.name)) for column in self._identifier_columns] 164 | return ':'.join(identifiers) 165 | 166 | def redis_key(self): 167 | """Key used for storage of object instance in Redis. 168 | """ 169 | return "{}{}{}".format(self.key_prefix(), MODEL_NAME_ID_SEPARATOR, self.identifier()) 170 | 171 | def as_dict(self): 172 | """Dict version of this object 173 | """ 174 | # WARNING: we have to send a copy, otherwise changing the dict 175 | # changes the object! 176 | # FIXME: this returns no keys for keys whose value is None! 177 | return self.__dict__.copy() 178 | 179 | def __repr__(self): 180 | return "<{}>".format(self.redis_key()) 181 | 182 | @classmethod 183 | def get_index_key(cls, column_name): 184 | return 'index{}{}{}{}'.format(MODEL_NAME_ID_SEPARATOR, cls.key_prefix(), MODEL_NAME_ID_SEPARATOR, column_name) 185 | 186 | async def save_index(self, db, stale_object=None): 187 | for indexed_column in self._queryable_colnames_set: 188 | index_key = self.get_index_key(indexed_column) 189 | if stale_object: 190 | stale_index_value = '{}{}{}'.format( 191 | getattr(stale_object, indexed_column), 192 | VALUE_ID_SEPARATOR, 193 | stale_object.identifier() 194 | ) 195 | await db.zrem(index_key, stale_index_value) 196 | index_value = '{}{}{}'.format( 197 | getattr(self, indexed_column), 198 | VALUE_ID_SEPARATOR, 199 | self.identifier() 200 | ) 201 | # Index it by adding to a sorted set with 0 score. It will be lexically sorted by redis 202 | await db.zadd(index_key, 0, index_value,) 203 | 204 | async def save(self, db): 205 | """Save the object to Redis. 206 | """ 207 | kwargs = {} 208 | for col in self._auto_columns: 209 | if not self.has_real_data(col.name): 210 | kwargs[col.name] = await col.auto_generate(db, self) 211 | self.__dict__.update(kwargs) 212 | 213 | # we have to delete the old index key 214 | stale_object = await self.__class__.load(db, identifier=self.identifier()) 215 | d = { 216 | k: (v.strftime(DATETIME_FORMAT) if isinstance(v, datetime) else v) 217 | for k, v in self.__dict__.items() 218 | } 219 | success = await db.hmset_dict(self.redis_key(), d) 220 | await self.save_index(db, stale_object=stale_object) 221 | return success 222 | 223 | async def exists(self, db): 224 | return await db.exists(self.redis_key()) 225 | 226 | @classmethod 227 | async def load(cls, db, identifier=None, redis_key=None): 228 | """Load the object from redis. Use the identifier (colon-separated 229 | composite keys or the primary key) or the redis_key. 230 | """ 231 | if not identifier and not redis_key: 232 | raise InvalidQuery('Must supply identifier or redis_key') 233 | if redis_key is None: 234 | redis_key = cls.make_key(identifier) 235 | if await db.exists(redis_key): 236 | data = await db.hgetall(redis_key) 237 | kwargs = {} 238 | for key_bin, value_bin in data.items(): 239 | key, value = key_bin, value_bin 240 | column = getattr(cls, key, False) 241 | if not column or (column.field_type == str): 242 | kwargs[key] = value 243 | elif column.field_type == datetime: 244 | kwargs[key] = datetime.strptime(value, DATETIME_FORMAT) 245 | else: 246 | kwargs[key] = column.field_type(value) 247 | kwargs['loading'] = True 248 | return cls(**kwargs) 249 | else: 250 | logger.debug("No Redis key found: {}".format(redis_key)) 251 | return None 252 | 253 | @classmethod 254 | async def all(cls, db, order_by=None, limit=None, offset=None): 255 | async for x in cls.filter_by(db, order_by=order_by, limit=limit, offset=offset): 256 | yield x 257 | 258 | @classmethod 259 | async def _get_ordered_result(cls, db, list_to_order, order_by, direction): 260 | """ 261 | 262 | :param list_to_order: 263 | :param order_by: 264 | :param direction: 265 | :return: 266 | 267 | Sort the given list in redis. 268 | https://redis.io/commands/sort#using-hashes-in-codebycode-and-codegetcode 269 | """ 270 | pairs = [] 271 | for x in list_to_order: 272 | pairs.extend([0, x]) 273 | if pairs: 274 | ordered_res_key = 'filtered_result-{}'.format(uuid.uuid1()) 275 | await db.zadd(ordered_res_key, pairs[0], pairs[1], *pairs[2:]) 276 | ordered_result = await db.sort( 277 | ordered_res_key, 278 | by='{}:*->{}'.format(cls.__name__, order_by), 279 | alpha=True, 280 | asc=direction 281 | ) 282 | # Delete the temp store 283 | await db.delete(ordered_res_key) 284 | return ordered_result 285 | else: 286 | return [] 287 | 288 | @classmethod 289 | async def _get_ids_filter_by(cls, db, order_by=None, **kwargs): 290 | if order_by: 291 | direction = b'DESC' if order_by[0] == '-' else None 292 | if order_by[0] in ('+', '-'): 293 | order_by = order_by[1:] 294 | if order_by not in cls._queryable_colnames_set: 295 | err_msg = 'order_by field {order_by} is not in {queryable_cols}'.format( 296 | order_by=order_by, 297 | queryable_cols=cls._queryable_colnames_set, 298 | ) 299 | raise InvalidQuery(err_msg) 300 | 301 | missing_cols_set = set(kwargs.keys()) - cls._queryable_colnames_set 302 | if missing_cols_set: 303 | err_msg = '{missing_cols_set} not in {queryable_cols}'.format( 304 | missing_cols_set=missing_cols_set, 305 | queryable_cols=cls._queryable_colnames_set, 306 | ) 307 | raise InvalidQuery(err_msg) 308 | result_set = set() 309 | first_iteration = True 310 | for k, v in kwargs.items(): 311 | if v is None: 312 | v = cls._columns_map[k] 313 | if isinstance(v, (list, tuple)): 314 | values = [str(x) for x in v] 315 | elif isinstance(v, datetime): 316 | values = (v.strftime(DATETIME_FORMAT),) 317 | else: 318 | values = (str(v),) 319 | temp_set = set() 320 | for value in values: 321 | temp_set = temp_set.union({x.partition(VALUE_ID_SEPARATOR)[2] for x in await db.zrangebylex( 322 | cls.get_index_key(k), 323 | min='{}{}'.format(value, VALUE_ID_SEPARATOR).encode(), 324 | max='{}{}\xff'.format(value, VALUE_ID_SEPARATOR).encode())}) 325 | if first_iteration: 326 | result_set = result_set.union(temp_set) 327 | first_iteration = False 328 | else: 329 | result_set = result_set.intersection(temp_set) 330 | if not kwargs: 331 | for index_entry in await db.zrange(cls.get_index_key(cls._identifier_column_names[0]), 0, -1): 332 | result_set.add(index_entry.split(VALUE_ID_SEPARATOR)[-1]) 333 | if order_by: 334 | return await cls._get_ordered_result(db, list_to_order=result_set, order_by=order_by, direction=direction) 335 | 336 | return sorted(result_set) 337 | 338 | @classmethod 339 | async def filter_by(cls, db, offset=None, limit=None, **kwargs): 340 | """Query by attributes iteratively. Ordering is not supported 341 | Example: 342 | User.get_by(db, age=[32, 54]) 343 | User.get_by(db, age=23, name="guido") 344 | 345 | """ 346 | if limit and type(limit) is not int: 347 | raise InvalidQuery('If limit is supplied it must be an int') 348 | if offset and type(offset) is not int: 349 | raise InvalidQuery('If offset is supplied it must be an int') 350 | 351 | ids_to_iterate = await cls._get_ids_filter_by(db, **kwargs) 352 | if offset: 353 | # Using offset without order_by is pretty strange, but allowed 354 | if limit: 355 | ids_to_iterate = ids_to_iterate[offset:offset+limit] 356 | else: 357 | ids_to_iterate = ids_to_iterate[offset:] 358 | elif limit: 359 | ids_to_iterate = ids_to_iterate[:limit] 360 | 361 | for key in ids_to_iterate: 362 | yield await cls.load(db, key) 363 | 364 | @classmethod 365 | async def get_object_or_none(cls, db, **kwargs): 366 | """ 367 | Returns the first object exists for this query or None. 368 | WARNING: if there are more than 1 results in cls that satisfy the conditions in kwargs, 369 | only 1 random result will be returned 370 | """ 371 | async for obj in cls.filter_by(db, limit=1, **kwargs): 372 | return obj 373 | return None 374 | 375 | @classmethod 376 | def query(cls, db) -> Query: 377 | return Query(model=cls, db=db) 378 | --------------------------------------------------------------------------------