├── .gitignore ├── .travis.yml ├── LICENSE ├── MANIFEST.in ├── README.md ├── config_local.py.enc ├── couchdb_compose.yaml ├── fam.toml ├── requirements.txt ├── setup.py └── src └── fam ├── __init__.py ├── acl ├── __init__.py ├── requirement.py └── writer.py ├── blud.py ├── buffer ├── __init__.py ├── buffer_views.py └── write_buffer.py ├── constants.py ├── database ├── __init__.py ├── base.py ├── base_adapter.py ├── caching.py ├── couchbase_server.py ├── couchdb.py ├── couchdb_adapter.py ├── custom_token.py ├── firestore.py ├── firestore_adapter.py ├── firestore_contexts.py ├── firestore_test_client.py ├── firestore_test_wrapper.py ├── mock.py ├── null.py └── sync_gateway.py ├── exceptions.py ├── extra_types ├── __init__.py └── lat_long.py ├── fam_json.py ├── fields.py ├── firestore_sync ├── __init__.py └── syncer.py ├── mapper.py ├── schema ├── __init__.py ├── mutator.py ├── validator.py └── writer.py ├── tests ├── __init__.py ├── common │ ├── __init__.py │ ├── anything_tests.py │ ├── basic_tests.py │ ├── callback_tests.py │ ├── data │ │ ├── animal_views.js │ │ └── last_seq.json │ ├── field_attribute_tests.py │ ├── index_tests.py │ ├── migration_tests.py │ └── schema_tests.py ├── data │ └── goldfish.jpg ├── depricated │ ├── couchbase │ │ └── couchbase_utils_tests.py │ ├── test_couchbase │ │ ├── __init__.py │ │ ├── _test_basic.py │ │ ├── _test_basic2.py │ │ └── couchbase_tests.py │ └── test_sync_gateway │ │ ├── __init__.py │ │ ├── _test_acl.py │ │ ├── _test_basic.py │ │ ├── _test_sync_gateway_common.py │ │ ├── _test_user.py │ │ ├── _test_view_creation.py │ │ ├── config.py │ │ └── data │ │ ├── last_seq.json │ │ └── sync_conf_template ├── models │ ├── __init__.py │ ├── _test02.py │ ├── _test03.py │ ├── acl.py │ ├── test01.py │ ├── test04.py │ └── test05.py ├── test_couchdb │ ├── __init__.py │ ├── animal_views.js │ ├── config.py │ ├── test_buffer.py │ ├── test_buffer_views.py │ ├── test_couchdb_common.py │ ├── test_index.py │ ├── test_iterator.py │ ├── test_mapper.py │ ├── test_mapping.py │ ├── test_serialisation.py │ ├── test_temp.py │ └── test_unique.py ├── test_firestore │ ├── __init__.py │ ├── firestore │ │ ├── .firebaserc │ │ ├── .gitignore │ │ └── firebase.json │ ├── fixtures.py │ ├── test_firestore_contexts.py │ ├── test_firestore_db.py │ ├── test_firestore_fields.py │ └── test_serialisation.py ├── test_mock │ ├── __init__.py │ └── test_mock_db.py ├── test_mutation │ ├── __init__.py │ ├── data │ │ └── dog_mutation.py │ ├── models │ │ ├── __init__.py │ │ ├── test01.py │ │ └── test02.py │ └── test_mutator.py └── test_sync │ ├── __init__.py │ └── test_sync.py └── utils ├── __init__.py ├── backoff.py ├── couchbase_utils.py └── requests_shim.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | .DS_Store 3 | .idea 4 | *.egg-info 5 | 6 | 7 | /build 8 | /dist 9 | /src/fam/tests/test_couchdb/data/ 10 | src/fam/tests/test_firestore/config_local.py 11 | /src/fam/tests/secrets/ 12 | src/fam/tests/test_mutation/data/schemata/ 13 | 14 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - '2.7' 4 | - '3.6' 5 | install: 6 | - pip install -r requirements.txt 7 | services: 8 | - couchdb 9 | script: 10 | - pytest src/fam/tests/test_couchdb src/fam/tests/test_mock src/fam/tests/test_firestore src/fam/tests/test_sync 11 | before_install: 12 | - openssl aes-256-cbc -K $encrypted_a3b886232b8c_key -iv $encrypted_a3b886232b8c_iv 13 | -in config_local.py.enc -out src/fam/tests/test_firestore/config_local.py -d 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Paul Harter 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | 21 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE *.md *.py 2 | recursive-include src *.py 3 | -------------------------------------------------------------------------------- /config_local.py.enc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/config_local.py.enc -------------------------------------------------------------------------------- /couchdb_compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | postgres: 5 | container_name: couchdb 6 | image: couchdb:latest 7 | restart: always 8 | ports: 9 | - "5984:5984" 10 | volumes: 11 | - /Users/paul/Dropbox/glowinthedark/fam/data:/opt/couchdb/data 12 | environment: 13 | - COUCHDB_USER=paul 14 | - COUCHDB_PASSWORD=password 15 | -------------------------------------------------------------------------------- /fam.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "example_package_YOUR_USERNAME_HERE" 7 | version = "3.0.0" 8 | authors = [ 9 | { name="Paul Harter", email="paul@glowinthedark.co.uk" }, 10 | ] 11 | description = "Simple Python ORM for CouchDB, Firebase and Sync Gateway" 12 | readme = "README.md" 13 | requires-python = ">=3.7" 14 | classifiers=[ 15 | 'Development Status :: 4 - Beta', 16 | 'Natural Language :: English', 17 | 'Operating System :: OS Independent', 18 | 'Programming Language :: Python :: 3.7', 19 | 'License :: OSI Approved :: MIT License' 20 | ] 21 | 22 | dependencies = ['js2py', 'requests', 'simplejson', 'jsonschema', 'mock', 'pytz', 'ply==3.4', 'firebase_admin', 'six', 'grpcio'] 23 | zip_safe = false 24 | package_dir = {''='src'} 25 | include_package_data = true 26 | zip_safe = false 27 | 28 | [tool.setuptools.packages.find] 29 | where = ["src"] 30 | include = ["fam*"] 31 | exclude = ["fam.tests*"] 32 | namespaces = false 33 | 34 | [project.urls] 35 | "Homepage" = "https://github.com/paulharter/fam" 36 | "Bug Tracker" = "https://github.com/paulharter/fam/issues" 37 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | js2py 2 | requests 3 | simplejson 4 | jsonschema 5 | mock 6 | pytz 7 | ply==3.4 8 | firebase_admin 9 | six 10 | grpcio 11 | google-auth -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | 2 | from setuptools import setup, find_packages 3 | 4 | setup(name='fam', 5 | version='3.0.2', 6 | description="Simple Python ORM for CouchDB, Firebase and Sync Gateway", 7 | url="https://github.com/paulharter/fam", 8 | classifiers=[ 9 | 'Development Status :: 4 - Beta', 10 | 'Natural Language :: English', 11 | 'Operating System :: OS Independent', 12 | 'Programming Language :: Python :: 3.7', 13 | 'License :: OSI Approved :: MIT License' 14 | ], 15 | author='Paul Harter', 16 | author_email='paul@glowinthedark.co.uk', 17 | license="LICENSE", 18 | install_requires=['js2py', 'requests', 'simplejson', 'jsonschema', 'mock', 'pytz', 'ply==3.4', 19 | 'firebase_admin', 'six', 'grpcio'], 20 | packages=find_packages('src'), 21 | package_dir={'': 'src'}, 22 | include_package_data=True, 23 | zip_safe=False 24 | ) 25 | -------------------------------------------------------------------------------- /src/fam/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/__init__.py -------------------------------------------------------------------------------- /src/fam/acl/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from .requirement import CreateRequirement, DeleteRequirement, UpdateRequirement 3 | 4 | ANYONE = None 5 | NO_ONE = [] 6 | 7 | __all__ = [ 8 | "CreateRequirement", 9 | "UpdateRequirement", 10 | "DeleteRequirement", 11 | "ANYONE", 12 | "NO_ONE" 13 | ] 14 | -------------------------------------------------------------------------------- /src/fam/acl/requirement.py: -------------------------------------------------------------------------------- 1 | 2 | CREATE = "create" 3 | UPDATE = "update" 4 | DELETE = "delete" 5 | 6 | 7 | 8 | class BaseRequirement(object): 9 | 10 | action = None 11 | 12 | def __init__(self, user=None, role=(), access=True, owner=False): 13 | 14 | self.user = user 15 | self.role = role 16 | self.access = access 17 | self.owner = owner 18 | 19 | def as_json(self): 20 | 21 | j = {} 22 | if self.user is not None: 23 | j["user"] = self.user 24 | if self.role is not None: 25 | j["role"] = self.role 26 | if self.access is False: 27 | j["withoutAccess"] = True 28 | if self.owner is True: 29 | j["owner"] = True 30 | 31 | return j 32 | 33 | 34 | class CreateRequirement(BaseRequirement): 35 | pass 36 | 37 | 38 | class UpdateRequirement(BaseRequirement): 39 | 40 | def __init__(self, user=None, role=(), access=True, owner=False, fields=None): 41 | 42 | super(UpdateRequirement, self).__init__(user=user, role=role, access=access, owner=owner) 43 | self.fields = fields 44 | 45 | def as_json(self): 46 | 47 | j = super(UpdateRequirement, self).as_json() 48 | if self.fields is not None: 49 | j["fields"] = self.fields 50 | return j 51 | 52 | 53 | class DeleteRequirement(BaseRequirement): 54 | pass 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /src/fam/acl/writer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from .requirement import CreateRequirement, UpdateRequirement, DeleteRequirement 5 | 6 | THIS_DIR = os.path.dirname(__file__) 7 | 8 | 9 | def _access_from_mapper(mapper): 10 | 11 | types = [] 12 | for ns_name, ns in mapper.namespaces.items(): 13 | for class_name, cls in ns.items(): 14 | if cls.grants_access: 15 | types.append(class_name) 16 | return types 17 | 18 | 19 | def _requirements_from_mapper(mapper): 20 | 21 | create_reqs = {} 22 | update_reqs = {} 23 | delete_reqs = {} 24 | 25 | requirements = {"create": create_reqs, 26 | "update": update_reqs, 27 | "delete": delete_reqs} 28 | 29 | for ns_name, ns in mapper.namespaces.items(): 30 | for class_name, cls in ns.items(): 31 | if cls.acl is not None: 32 | class_create_req = [r for r in cls.acl if isinstance(r, CreateRequirement)] 33 | if len(class_create_req) == 0: 34 | create_reqs[class_name] = CreateRequirement().as_json() 35 | elif len(class_create_req) == 1: 36 | create_reqs[class_name] = class_create_req[0].as_json() 37 | else: 38 | raise Exception("too many create requirements in %s" % class_name) 39 | 40 | class_update_req = [r for r in cls.acl if isinstance(r, UpdateRequirement)] 41 | if len(class_update_req) == 0: 42 | update_reqs[class_name] = [UpdateRequirement().as_json()] 43 | else: 44 | update_reqs[class_name] = [req.as_json() for req in class_update_req] 45 | 46 | class_delete_req = [r for r in cls.acl if isinstance(r, DeleteRequirement)] 47 | if len(class_delete_req) == 0: 48 | delete_reqs[class_name] = DeleteRequirement().as_json() 49 | elif len(class_create_req) == 1: 50 | delete_reqs[class_name] = class_delete_req[0].as_json() 51 | else: 52 | raise Exception("too many delete requirements in %s" % class_name) 53 | 54 | return requirements 55 | 56 | 57 | def write_sync_function(config_template_path, sync_template_path, mapper, output_path, sync_function_symbol="SYNC_FUNCTION"): 58 | 59 | with open(config_template_path, "r") as f: 60 | config_src_str = f.read() 61 | 62 | config_src_str = config_src_str.replace("sync = ", "") 63 | 64 | with open(sync_template_path, "r") as f: 65 | permissions = f.read() 66 | 67 | requirements_str = json.dumps(_requirements_from_mapper(mapper)) 68 | access_types_str = json.dumps(_access_from_mapper(mapper)) 69 | 70 | permissions = permissions.replace("sync = ", "") 71 | permissions = permissions.replace('"REQUIREMENTS_LOOKUP"', requirements_str) 72 | permissions = permissions.replace('"ACCESS_TYPES"', access_types_str) 73 | 74 | config_str = config_src_str.replace(sync_function_symbol, permissions) 75 | 76 | with open(output_path, "w") as f: 77 | f.write(config_str) 78 | 79 | 80 | 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /src/fam/buffer/__init__.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from .write_buffer import FamWriteBuffer 3 | 4 | @contextmanager 5 | def buffered_db(db): 6 | dbb = FamWriteBuffer(db) 7 | yield dbb 8 | dbb.flush() -------------------------------------------------------------------------------- /src/fam/buffer/buffer_views.py: -------------------------------------------------------------------------------- 1 | import js2py 2 | import json 3 | 4 | class FamWriteBufferViews(object): 5 | 6 | FOREIGN_KEY_MAP_STRING = '''function(doc) { 7 | var resources = %s; 8 | if (resources.indexOf(doc.type) != -1 && doc.namespace == \"%s\"){ 9 | emit(doc.%s, doc); 10 | } 11 | }''' 12 | 13 | def __init__(self, mapper): 14 | 15 | self.mapper = mapper 16 | self.views = {} 17 | self.indexes = {} 18 | self.reverse_indexes = {} 19 | self.js_context = None 20 | self._update_designs() 21 | self.view_name = None 22 | self.doc = None 23 | 24 | 25 | def clear_indexes(self): 26 | self.indexes = {} 27 | self.reverse_indexes = {} 28 | 29 | 30 | def query_view(self, view_name, key, **kwargs): 31 | 32 | k = tuple(key) if type(key) == list else key 33 | 34 | name = self._clean_name(view_name) 35 | view_index = self.indexes.get(name) 36 | # print "****************** - ", self.indexes.get("join_views_reel_item_memberships") 37 | if view_index is None: 38 | return [] 39 | values = view_index.get(k) 40 | if values is None: 41 | return [] 42 | return [item[1] for item in values.items()] 43 | 44 | 45 | def index_obj(self, obj): 46 | 47 | self.obj = obj 48 | doc = obj.as_dict() 49 | for view_name, view in self.views.items(): 50 | self.view_name = view_name 51 | view(doc) 52 | 53 | 54 | def _clean_name(self, name): 55 | return name.replace("/", "_").replace(".", "_").replace("-", "_").replace(":", "_") 56 | 57 | 58 | def remove_from_indexes(self, obj_id): 59 | 60 | for view_name in self.indexes.keys(): 61 | index = self.indexes.get(view_name) 62 | reverse_index = self.reverse_indexes.get(view_name) 63 | 64 | existing_key = reverse_index.get(obj_id) 65 | 66 | ## remove the previous entry 67 | if existing_key is not None: 68 | old_indexed_values = index[existing_key] 69 | del old_indexed_values[obj_id] 70 | del reverse_index[obj_id] 71 | 72 | 73 | 74 | def _add_to_index(self, k): 75 | 76 | # print "********* adding: ", k 77 | 78 | kstr = repr(k).replace("'", "\"").replace("u\"", "\"") 79 | key = None if kstr == '"undefined"' else json.loads(kstr) 80 | 81 | if type(key) == list: 82 | key = tuple(key) 83 | # print "************ adding key: ", key 84 | 85 | index = self.indexes.get(self.view_name) 86 | reverse_index = self.reverse_indexes.get(self.view_name) 87 | if index is None: 88 | index = {} 89 | self.indexes[self.view_name] = index 90 | 91 | if reverse_index is None: 92 | reverse_index = {} 93 | self.reverse_indexes[self.view_name] = reverse_index 94 | 95 | obj_id = self.obj.key 96 | existing_key = reverse_index.get(obj_id) 97 | 98 | if existing_key == key: 99 | return 100 | 101 | ## remove the previous entry 102 | if existing_key is not None: 103 | old_indexed_values = index[existing_key] 104 | del old_indexed_values[obj_id] 105 | del reverse_index[obj_id] 106 | 107 | ## add the new one 108 | if key is not None: 109 | reverse_index[obj_id] = key 110 | new_indexed_values = index.get(key) 111 | if new_indexed_values is None: 112 | new_indexed_values = {} 113 | index[key] = new_indexed_values 114 | new_indexed_values[obj_id] = self.obj 115 | 116 | 117 | def _raw_design_doc(self): 118 | design_doc = { 119 | "views": { 120 | "all": { 121 | "map": "function(doc) {emit(doc.type, doc);}" 122 | } 123 | } 124 | } 125 | return design_doc 126 | 127 | 128 | def _update_designs(self): 129 | 130 | def add(k): 131 | self._add_to_index(k) 132 | 133 | self.js_context = js2py.EvalJs({"add": add}) 134 | 135 | code = """ 136 | function emit(k, v){ 137 | add(k); 138 | } 139 | """ 140 | 141 | self.js_context.execute(code) 142 | 143 | ## simple type index 144 | doc = self._raw_design_doc() 145 | key = "_design/raw" 146 | doc["_id"] = key 147 | 148 | self._add_design(self.js_context, key, doc) 149 | 150 | # ## relational indexes 151 | for namespace_name, namespace in self.mapper.namespaces.items(): 152 | view_namespace = self._clean_name(namespace_name) 153 | key = "_design/%s" % view_namespace 154 | doc = self.mapper.get_design(namespace, namespace_name, self.FOREIGN_KEY_MAP_STRING) 155 | doc["_id"] = key 156 | self._add_design(self.js_context, key, doc) 157 | 158 | # ## extra indexes 159 | # for doc in self.mapper.extra_design_docs(): 160 | # key = doc["_id"] 161 | # self._add_design(self.js_context, key, doc) 162 | 163 | 164 | def _add_design(self, js_context, key, doc): 165 | 166 | if(not key.startswith("_design/")): 167 | raise Exception("DataBaseCacheViews design doc key should start with _design") 168 | 169 | design_name = key[len("_design/"):] 170 | 171 | for view_name, view in doc["views"].items(): 172 | name = "%s_%s" % (design_name, view_name) 173 | name = self._clean_name(name) 174 | code = "var %s = %s" % (name, view["map"]) 175 | js_context.execute(code) 176 | self.views[name] = getattr(js_context, name) 177 | -------------------------------------------------------------------------------- /src/fam/buffer/write_buffer.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | 3 | 4 | class FamWriteBuffer(object): 5 | 6 | def __init__(self, db): 7 | self.store = {} 8 | self.to_be_saved = set() 9 | self.db = db 10 | self.views = db.mapper.buffer_views 11 | 12 | 13 | def _get(self, *args, **kwargs): 14 | traceback.print_stack() 15 | raise Exception() 16 | 17 | 18 | def view(self, name, **kwargs): 19 | traceback.print_stack() 20 | raise NotImplementedError("view doesn't work with the DataBaseCache yet") 21 | 22 | 23 | def put(self, thing): 24 | 25 | thing._db = self 26 | self.views.index_obj(thing) 27 | 28 | key = thing.key 29 | if key in self.store: 30 | if id(not self.store[key]) == id(thing): 31 | raise Exception("putting thing with same key but different python id into cache - it's confused") 32 | else: 33 | self.store[thing.key] = thing 34 | self.to_be_saved.add(key) 35 | 36 | def get_refs_from(self, namespace, type_name, name, key, field): 37 | view_namespace = namespace.replace("/", "_") 38 | view_name = "%s/%s_%s" % (view_namespace, type_name, name) 39 | return self.query_view(view_name, key=key) 40 | 41 | def query_view(self, view_name, **kwargs): 42 | db_objs = self.db.query_view(view_name, **kwargs) 43 | [self._refresh_cache(o.key, o) for o in db_objs] 44 | # these will inclde the refreshed ones 45 | buffer_objs = self.views.query_view(view_name, **kwargs) 46 | return buffer_objs 47 | 48 | 49 | def _refresh_cache(self, key, got): 50 | if got is not None: 51 | got._db = self 52 | current_in_cache = self.store.get(key) 53 | # If there are changes from the database then update the current one in place 54 | if current_in_cache is not None and got is not None: 55 | if current_in_cache.rev != got.rev: 56 | current_in_cache.rev = got.rev 57 | current_in_cache._properties = got._properties 58 | self.views.index_obj(current_in_cache) 59 | return current_in_cache 60 | elif current_in_cache is not None: 61 | return current_in_cache 62 | elif got is not None: 63 | self.store[got.key] = got 64 | self.views.index_obj(got) 65 | return got 66 | else: 67 | return None 68 | 69 | 70 | def delete(self, thing): 71 | if thing.key in self.store: 72 | del self.store[thing.key] 73 | self.views.remove_from_indexes(thing.key) 74 | return self.db.delete(thing) 75 | 76 | 77 | def get(self, key, class_name=None): 78 | got = self.db.get(key) 79 | result = self._refresh_cache(key, got) 80 | return result 81 | 82 | 83 | def delete_key(self, key): 84 | if key in self.store: 85 | del self.store[key] 86 | self.views.remove_from_indexes(key) 87 | return self.db.delete_key(key) 88 | 89 | 90 | def flush(self): 91 | # only save the things that have been "put" 92 | for key in list(self.to_be_saved): 93 | thing = self.store[key] 94 | thing._db = self.db 95 | self.db.put(thing) 96 | 97 | self.views.clear_indexes() 98 | -------------------------------------------------------------------------------- /src/fam/constants.py: -------------------------------------------------------------------------------- 1 | 2 | RESERVED_PROPERTY_NAMES = ("key", "rev", "_properties", "_db") 3 | 4 | # NAMESPACE_STR = "namespace" 5 | NAMESPACE_STR = "namespace" 6 | TYPE_STR = "type" 7 | 8 | FIELD_REQUIRED = "required" 9 | FIELD_UPDATE_ACL = "update_acl" 10 | -------------------------------------------------------------------------------- /src/fam/database/__init__.py: -------------------------------------------------------------------------------- 1 | from .couchdb import CouchDBWrapper 2 | from .sync_gateway import SyncGatewayWrapper 3 | from .firestore import FirestoreWrapper 4 | try: 5 | from fam.database.couchbase_server import CouchbaseWrapper 6 | except Exception as e: 7 | pass 8 | # print("failed to import couchbase wrapper", e) 9 | 10 | from fam.utils.backoff import http_backoff 11 | 12 | @http_backoff 13 | def get_db(db_type, 14 | mapper, 15 | host, 16 | port=None, 17 | db_name="sync_gateway", 18 | https=False, 19 | username=None, 20 | password=None, 21 | backoff=False, 22 | **kwargs): 23 | 24 | if db_type == "sync_gateway": 25 | url = _get_url(host, 4984 if port is None else port, https, username, password) 26 | ## kwargs may inc auth_url 27 | return SyncGatewayWrapper(mapper, url, db_name, username=username, password=password, **kwargs) 28 | elif db_type == "couchdb": 29 | url = _get_url(host, 5984 if port is None else port, https, username, password) 30 | ## kwargs may inc reset, remote_url, continuous 31 | return CouchDBWrapper(mapper, url, db_name, **kwargs) 32 | # elif db_type == "couchbase": 33 | # ## kwargs may inc read_only 34 | # return CouchbaseWrapper(mapper, host, db_name, **kwargs) 35 | else: 36 | raise NotImplementedError("Can't make a database of type %s" % db_type) 37 | 38 | 39 | def _get_url(host, port, https, username, password): 40 | if username is None: 41 | return "%s://%s:%s" % ("https" if https else "http", host, port) 42 | else: 43 | return "%s://%s:%s@%s:%s" % ("https" if https else "http", username, password, host, port) -------------------------------------------------------------------------------- /src/fam/database/base.py: -------------------------------------------------------------------------------- 1 | from fam.blud import ReferenceFrom, GenericObject 2 | import json 3 | 4 | class FamDbAuthException(Exception): 5 | pass 6 | 7 | class BaseDatabase(object): 8 | 9 | FOREIGN_KEY_MAP_STRING = '''function(doc) { 10 | var resources = %s; 11 | if (resources.indexOf(doc.type) != -1 && doc.namespace == \"%s\"){ 12 | emit(doc.%s, null); 13 | } 14 | }''' 15 | 16 | check_on_save = True 17 | 18 | ################################### 19 | 20 | #double dispatch accessors that return objects 21 | 22 | def put(self, thing): 23 | return thing.save(self) 24 | 25 | def delete(self, thing): 26 | return thing.delete(self) 27 | 28 | def get(self, key, class_name=None): 29 | return GenericObject.get(self, key, class_name=class_name) 30 | 31 | def delete_key(self, key): 32 | return GenericObject.delete_key(self, key) 33 | 34 | def query_view(self, view_name, **kwargs): 35 | return GenericObject.view(self, view_name, **kwargs) 36 | 37 | def changes(self, since=None, channels=None, limit=None, feed=None, timeout=None, filter=None): 38 | return GenericObject.changes(self, since=since, channels=channels, limit=limit, feed=feed, timeout=timeout, filter=filter) 39 | 40 | 41 | ################################# 42 | 43 | def class_for_type_name(self, type_name, namespace_name): 44 | return self.mapper.get_class(type_name, namespace_name) 45 | 46 | -------------------------------------------------------------------------------- /src/fam/database/base_adapter.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pytz 3 | import base64 4 | import re 5 | 6 | from copy import deepcopy 7 | import datetime 8 | from fam.extra_types.lat_long import LatLong 9 | from fractions import Fraction 10 | from decimal import Decimal 11 | from google.cloud.firestore_v1 import GeoPoint 12 | 13 | 14 | if sys.version_info[0] < 3: 15 | PYTHON_VERSION = 2 16 | else: 17 | PYTHON_VERSION = 3 18 | 19 | 20 | class BaseDataAdapter(object): 21 | 22 | # a fam doc to serialise into firestore 23 | def serialise(self, doc): 24 | dup = deepcopy(doc) 25 | return self._serialise_walk(dup) 26 | 27 | def deserialise(self, doc): 28 | dup = deepcopy(doc) 29 | result = self._deserialise_walk(dup) 30 | return result 31 | 32 | 33 | def is_a_string(self, node): 34 | 35 | if PYTHON_VERSION == 3: 36 | return isinstance(node, str) 37 | else: 38 | return isinstance(node, basestring) 39 | 40 | 41 | def is_a_number(self, node): 42 | 43 | if PYTHON_VERSION == 3: 44 | return isinstance(node, int) or isinstance(node, float) 45 | else: 46 | return isinstance(node, int) or isinstance(node, float) or isinstance(node, long) 47 | 48 | 49 | def _serialise_walk(self, node): 50 | 51 | if isinstance(node, dict): 52 | for k, v in node.items(): 53 | node[k] = self._serialise_walk(v) 54 | return node 55 | if isinstance(node, list): 56 | return [self._serialise_walk(v) for v in node] 57 | if isinstance(node, LatLong): 58 | return self.serialise_lat_long(node) 59 | if isinstance(node, Fraction): 60 | return self.serialise_fraction(node) 61 | if isinstance(node, Decimal): 62 | return self.serialise_decimal(node) 63 | if isinstance(node, datetime.datetime): 64 | return self.serialise_date_time(node) 65 | if self.is_a_string(node): 66 | return self.serialise_string(node) 67 | if isinstance(node, bytes) or isinstance(node, bytearray): 68 | return self.serialise_bytes(node) 69 | if isinstance(node, bool): 70 | return self.serialise_bool(node) 71 | if self.is_a_number(node): 72 | return self.serialise_number(node) 73 | if hasattr(node, "to_json"): 74 | return self.serialise_object(node) 75 | if node is None: 76 | return None 77 | 78 | raise Exception("BaseAdapter can't serialise this value: %s" % str(node)) 79 | 80 | 81 | def serialise_lat_long(self, lat_long): 82 | return "::latlong::%s,%s" % (lat_long.latitude, lat_long.longitude) 83 | 84 | def serialise_fraction(self, fraction): 85 | return "::fraction::%s/%s" % (fraction.numerator, fraction.denominator) 86 | 87 | def serialise_decimal(self, decimal): 88 | return "::decimal::%s" % str(decimal) 89 | 90 | def serialise_date_time(self, date_time): 91 | utc = pytz.utc 92 | if date_time.tzinfo is None: 93 | dt = date_time.replace(tzinfo=utc) 94 | else: 95 | dt = date_time.astimezone(utc) 96 | as_iso = dt.isoformat("T") + "Z" 97 | fixed = as_iso.replace("+00:00", "") 98 | return "::datetime::%s" % str(fixed) 99 | 100 | 101 | def serialise_bytes(self, btes): 102 | encoded = base64.b64encode(btes) 103 | result = "::bytes::%s" % encoded.decode("utf-8") 104 | return result 105 | 106 | def serialise_string(self, string): 107 | return string 108 | 109 | def serialise_bool(self, boolean): 110 | return boolean 111 | 112 | def serialise_number(self, number): 113 | return number 114 | 115 | def serialise_object(self, obj): 116 | return obj.to_json() 117 | 118 | def is_legacy_datetime(self, node): 119 | 120 | if not self.is_a_string(node): 121 | return False 122 | 123 | datepattern = r"""^\d{4}-(0[1-9]|1[0-2])-([0-2]\d|3[0-1])T([0-1][0-9]|2[0-3]):[0-5]\d:[0-5]\d([.]\d{1,8})?Z$""" 124 | pattern = re.compile(datepattern) 125 | return pattern.match(node) 126 | 127 | 128 | def _deserialise_walk(self, node): 129 | 130 | 131 | if isinstance(node, dict): 132 | for k, v in node.items(): 133 | node[k] = self._deserialise_walk(v) 134 | return node 135 | if isinstance(node, list): 136 | return [self._deserialise_walk(v) for v in node] 137 | if self.is_a_string(node): 138 | if node.startswith("::fraction::"): 139 | stripped = node[len("::fraction::"):] 140 | num, denom = stripped.split("/") 141 | return Fraction(int(num), int(denom)) 142 | if node.startswith("::decimal::"): 143 | stripped = node[len("::decimal::"):] 144 | return Decimal(stripped) 145 | if node.startswith("::latlong::"): 146 | stripped = node[len("::latlong::"):] 147 | lat, long = stripped.split(",") 148 | return LatLong(float(lat), float(long)) 149 | if node.startswith("::bytes::"): 150 | stripped = node[len("::bytes::"):] 151 | return base64.b64decode(stripped) 152 | if node.startswith("::datetime::"): 153 | stripped = node[len("::datetime::"):] 154 | if "." in stripped: 155 | dt = datetime.datetime.strptime(stripped, '%Y-%m-%dT%H:%M:%S.%fZ') 156 | else: 157 | dt = datetime.datetime.strptime(stripped, '%Y-%m-%dT%H:%M:%SZ') 158 | dt = dt.replace(tzinfo=pytz.utc) 159 | return dt 160 | if self.is_legacy_datetime(node): 161 | stripped = node 162 | if "." in stripped: 163 | dt = datetime.datetime.strptime(stripped, '%Y-%m-%dT%H:%M:%S.%fZ') 164 | else: 165 | dt = datetime.datetime.strptime(stripped, '%Y-%m-%dT%H:%M:%SZ') 166 | dt = dt.replace(tzinfo=pytz.utc) 167 | return dt 168 | return node 169 | if isinstance(node, bool): 170 | return node 171 | if node is None: 172 | return node 173 | if self.is_a_number(node): 174 | return node 175 | 176 | raise Exception("FirestoreDataAdapter can't deserialise this value: %s", node) -------------------------------------------------------------------------------- /src/fam/database/caching.py: -------------------------------------------------------------------------------- 1 | from fam.buffer import buffered_db 2 | cache = buffered_db 3 | 4 | -------------------------------------------------------------------------------- /src/fam/database/couchbase_server.py: -------------------------------------------------------------------------------- 1 | import copy 2 | ## little dance to use patch version if necessary 3 | 4 | def is_gevent_monkey_patched(): 5 | try: 6 | from gevent import monkey 7 | except ImportError: 8 | return False 9 | else: 10 | return monkey.is_module_patched('__builtin__') 11 | 12 | 13 | from couchbase.cluster import Cluster 14 | from couchbase.cluster import PasswordAuthenticator 15 | from couchbase.bucket import View 16 | from couchbase.n1ql import N1QLQuery 17 | from couchbase.exceptions import NotFoundError, KeyExistsError 18 | 19 | 20 | from fam.blud import FamObject 21 | from fam.database.base import BaseDatabase 22 | from fam.database.couchdb import ResultWrapper 23 | from fam.exceptions import * 24 | 25 | 26 | class CouchbaseWrapper(BaseDatabase): 27 | 28 | def __init__(self, mapper, host, bucket_name, read_only=True): 29 | # connection_str = "couchbase://%s/%s" % (host, bucket_name) 30 | # self.bucket = Bucket(connection_str) 31 | self.read_only = read_only 32 | self.mapper = mapper 33 | # self.bucket_name = bucket_name 34 | 35 | 36 | cluster = Cluster('couchbase://%s' % host) 37 | authenticator = PasswordAuthenticator('test', 'bollocks') 38 | cluster.authenticate(authenticator) 39 | self.bucket = cluster.open_bucket(bucket_name) 40 | 41 | self.mapper = mapper 42 | 43 | def update_designs(self): 44 | 45 | ## simple type index 46 | doc = self._raw_design_doc() 47 | key = "_design/raw" 48 | 49 | doc["_id"] = key 50 | 51 | self.ensure_design_doc(key, doc) 52 | 53 | 54 | ## relational indexes 55 | for namespace_name, namespace in self.mapper.namespaces.items(): 56 | 57 | view_namespace = namespace_name.replace("/", "_") 58 | key = "_design/%s" % view_namespace 59 | 60 | doc = self.mapper.get_design(namespace, namespace_name, self.FOREIGN_KEY_MAP_STRING) 61 | doc["_id"] = key 62 | self.ensure_design_doc(key, doc) 63 | 64 | ## extra indexes 65 | for doc in self.mapper.extra_design_docs(): 66 | key = doc["_id"] 67 | self.ensure_design_doc(key, doc) 68 | 69 | 70 | def _raw_design_doc(self): 71 | 72 | design_doc = { 73 | "views": { 74 | "all": { 75 | "map": "function(doc) {emit(doc.type, null);}" 76 | } 77 | } 78 | } 79 | 80 | return design_doc 81 | 82 | 83 | 84 | def ensure_design_doc(self, key, doc): 85 | if self.read_only: 86 | raise Exception("This db is read only") 87 | 88 | # first put it into dev 89 | dev_key = key.replace("_design/", "_design/dev_") 90 | dev_doc = copy.deepcopy(doc) 91 | dev_doc["_id"] = dev_key 92 | 93 | previous_dev = self._get(dev_key) 94 | 95 | # print "self.db_url: ", self.db_url, self.db_name 96 | 97 | self._set(dev_key, dev_doc, rev=None if previous_dev is None else previous_dev.rev) 98 | 99 | # then get it back again to compare 100 | existing = self._get(key) 101 | existing_dev = self._get(dev_key) 102 | 103 | if existing == existing_dev: 104 | pass 105 | print("************ designs up to date ************") 106 | else: 107 | print("************ updating designs ************") 108 | print("new_design: ", doc) 109 | self._set(key, doc, rev=None if existing is None else existing.rev) 110 | # 111 | # def view(self, name, *args, **kwargs): 112 | # 113 | # design_doc_id, view_name = name.split("/") 114 | # 115 | # design_name = "_design/%s" % design_doc_id 116 | # 117 | # # print design_doc_id, view_name 118 | # 119 | # view = View(self.bucket, 120 | # design_doc_id, 121 | # view_name, 122 | # *args, 123 | # **kwargs 124 | # ) 125 | # 126 | # rows_list = list(view) 127 | # # print rows_list 128 | # # 129 | # # 130 | # # 131 | # # keys = rows_list[0].keys() 132 | # # keys.remove("id") 133 | # # keys.remove("cas") 134 | # return [ResultWrapper(row.docid, row.doc.cas, row.value) for row in rows_list] 135 | 136 | # 137 | # def get_refs_from(self, namespace, type_name, name, key, field): 138 | # 139 | # query_string = ( 140 | # "SELECT * FROM `travel-sample`" 141 | # "WHERE country=$country " 142 | # "AND geo.alt > $altitude " 143 | # "AND (geo.lat BETWEEN $min_lat AND $max_lat) " 144 | # "AND (geo.lon BETWEEN $min_lon AND $max_lon " 145 | # ) 146 | 147 | 148 | 149 | def n1ql(self, query, with_revs=False, *args, **kwargs): 150 | return FamObject.n1ql(self, query, with_revs=with_revs, *args, **kwargs) 151 | 152 | def _get(self, key, class_name=None): 153 | try: 154 | result = self.bucket.get(key) 155 | except NotFoundError as e: 156 | return None 157 | return ResultWrapper(key, result.cas, result.value) 158 | 159 | def _set(self, key, value, rev=None): 160 | if self.read_only: 161 | raise FamWriteError("You can't write to this database") 162 | try: 163 | if rev is not None: 164 | result = self.bucket.upsert(key, value, cas=rev) 165 | else: 166 | result = self.bucket.upsert(key, value) 167 | return ResultWrapper(key, result.cas, value) 168 | except KeyExistsError as e: 169 | raise FamResourceConflict("key alreday exists in couchbase: %s - %s" % (key, e)) 170 | 171 | 172 | 173 | def set_object(self, obj, rev=None): 174 | 175 | return self._set(obj.key, obj._properties, rev=rev) 176 | 177 | def _n1ql_with_rev(self, query, *args, **kwargs): 178 | query = N1QLQuery(query, *args, **kwargs) 179 | rows = self.bucket.n1ql_query(query) 180 | results = [] 181 | bucket_name = None 182 | 183 | for row in rows: 184 | if bucket_name is None: 185 | keys = row.keys() 186 | keys.remove("id") 187 | keys.remove("cas") 188 | bucket_name = keys[0] 189 | 190 | rev = row["_sync"]["rev"] 191 | results.append(ResultWrapper(row["id"], rev, row[bucket_name])) 192 | 193 | return results 194 | 195 | 196 | def _n1ql(self, query, *args, **kwargs): 197 | query = N1QLQuery(query, *args, **kwargs) 198 | rows = self.bucket.n1ql_query(query) 199 | rows_list = list(rows) 200 | 201 | return [ResultWrapper(row["$1"]["id"], row["$1"]["cas"], row["test"]) for row in rows_list] -------------------------------------------------------------------------------- /src/fam/database/couchdb_adapter.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | from .base_adapter import BaseDataAdapter 5 | 6 | 7 | if sys.version_info[0] < 3: 8 | PYTHON_VERSION = 2 9 | else: 10 | PYTHON_VERSION = 3 11 | 12 | 13 | class CouchDBDataAdapter(BaseDataAdapter): 14 | pass 15 | 16 | 17 | -------------------------------------------------------------------------------- /src/fam/database/custom_token.py: -------------------------------------------------------------------------------- 1 | from google.oauth2 import credentials 2 | 3 | from firebase_admin.credentials import Base 4 | 5 | 6 | from google.auth.transport import requests 7 | 8 | _request = requests.Request() 9 | 10 | _scopes = [ 11 | 'https://www.googleapis.com/auth/cloud-platform', 12 | 'https://www.googleapis.com/auth/datastore', 13 | 'https://www.googleapis.com/auth/devstorage.read_write', 14 | 'https://www.googleapis.com/auth/firebase', 15 | 'https://www.googleapis.com/auth/identitytoolkit', 16 | 'https://www.googleapis.com/auth/userinfo.email', 17 | 'https://www.googleapis.com/auth/firebase.database', 18 | ] 19 | 20 | 21 | class CustomToken(Base): 22 | """A credential initialized from an existing refresh token.""" 23 | 24 | _CREDENTIAL_TYPE = 'authorized_user' 25 | 26 | def __init__(self, token, project_id): 27 | """Initializes a credential from a refresh token JSON file. 28 | 29 | The JSON must consist of client_id, client_secert and refresh_token fields. Refresh 30 | token files are typically created and managed by the gcloud SDK. To instantiate 31 | a credential from a refresh token file, either specify the file path or a dict 32 | representing the parsed contents of the file. 33 | 34 | Args: 35 | refresh_token: Path to a refresh token file or a dict representing the contents of a 36 | refresh token file. 37 | 38 | Raises: 39 | IOError: If the specified file doesn't exist or cannot be read. 40 | ValueError: If the refresh token configuration is invalid. 41 | """ 42 | super(CustomToken, self).__init__() 43 | 44 | self._project_id = project_id 45 | 46 | self._g_credential = credentials.Credentials(token=token, scopes=_scopes) 47 | 48 | @property 49 | def client_id(self): 50 | return self._g_credential.client_id 51 | 52 | @property 53 | def client_secret(self): 54 | return self._g_credential.client_secret 55 | 56 | @property 57 | def refresh_token(self): 58 | return self._g_credential.refresh_token 59 | 60 | @property 61 | def project_id(self): 62 | return self._project_id 63 | 64 | 65 | def get_credential(self): 66 | """Returns the underlying Google credential. 67 | 68 | Returns: 69 | google.auth.credentials.Credentials: A Google Auth credential instance.""" 70 | return self._g_credential 71 | -------------------------------------------------------------------------------- /src/fam/database/firestore_adapter.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import datetime 3 | from fam.extra_types.lat_long import LatLong 4 | from fractions import Fraction 5 | from decimal import Decimal 6 | from google.cloud.firestore_v1 import GeoPoint 7 | 8 | from .base_adapter import BaseDataAdapter 9 | 10 | 11 | if sys.version_info[0] < 3: 12 | PYTHON_VERSION = 2 13 | else: 14 | PYTHON_VERSION = 3 15 | 16 | 17 | class FirestoreDataAdapter(BaseDataAdapter): 18 | 19 | 20 | def serialise_lat_long(self, lat_long): 21 | return GeoPoint(latitude=lat_long.latitude, longitude=lat_long.longitude) 22 | 23 | 24 | def serialise_date_time(self, date_time): 25 | return date_time 26 | 27 | 28 | def serialise_bytes(self, btes): 29 | return btes 30 | 31 | 32 | def _deserialise_walk(self, node): 33 | 34 | if isinstance(node, dict): 35 | for k, v in node.items(): 36 | node[k] = self._deserialise_walk(v) 37 | return node 38 | if isinstance(node, list): 39 | return [self._deserialise_walk(v) for v in node] 40 | if isinstance(node, GeoPoint): 41 | return LatLong(latitude=node.latitude, longitude=node.longitude) 42 | if self.is_a_string(node): 43 | if node.startswith("::fraction::"): 44 | stripped = node[len("::fraction::"):] 45 | num, denom = stripped.split("/") 46 | return Fraction(int(num), int(denom)) 47 | if node.startswith("::decimal::"): 48 | stripped = node[len("::decimal::"):] 49 | return Decimal(stripped) 50 | return node 51 | if isinstance(node, datetime.datetime): 52 | return node 53 | if isinstance(node, bytes): 54 | return node 55 | if isinstance(node, bool): 56 | return node 57 | if self.is_a_number(node): 58 | return node 59 | if node is None: 60 | return node 61 | 62 | raise Exception("FirestoreDataAdapter can't deserialise this value: %s" % node) -------------------------------------------------------------------------------- /src/fam/database/firestore_contexts.py: -------------------------------------------------------------------------------- 1 | from google.api_core import gapic_v1 2 | from .firestore import FirestoreWrapper 3 | from fam.exceptions import FamTransactionError 4 | 5 | class FirestoreBatchContext(FirestoreWrapper): 6 | 7 | def __init__(self, wrapper, retry=gapic_v1.method.DEFAULT, timeout=None): 8 | 9 | self.batch = self._get_batch(wrapper.db) 10 | self.wrapper = wrapper 11 | self.retry = retry 12 | self.timeout = timeout 13 | 14 | self.mapper = wrapper.mapper 15 | self.validator = wrapper.validator 16 | self.read_only = wrapper.read_only 17 | self.api_key = wrapper.api_key 18 | self.namespace = wrapper.namespace 19 | self.expires = wrapper.expires 20 | self.data_adapter = wrapper.data_adapter 21 | self.creds = wrapper.creds 22 | self.db = wrapper.db 23 | self.app = wrapper.app 24 | self.user = wrapper.user 25 | self.expires = wrapper.expires 26 | 27 | def _get_batch(self, client): 28 | return client.batch() 29 | 30 | def _set_doc_ref(self, doc_ref, value): 31 | self.batch.set(doc_ref, value) 32 | 33 | def _update_doc_ref(self, doc_ref, value): 34 | self.batch.update(doc_ref, value) 35 | 36 | def __enter__(self): 37 | return self 38 | 39 | def __exit__(self, exc_type, exc_value, traceback): 40 | if exc_type is None: 41 | self.results = self.batch.commit(self.retry, self.timeout) 42 | 43 | 44 | # class FirestoreTransactionContext(FirestoreBatchContext): 45 | # 46 | # def __init__(self, wrapper, retry=gapic_v1.method.DEFAULT, timeout=None, max_attempts=5, read_only=False): 47 | # 48 | # self.max_attempts = max_attempts 49 | # self.firestore_read_only = read_only 50 | # super().__init__(wrapper, retry, timeout) 51 | # self.transaction = self.batch 52 | # 53 | # def _get_batch(self, client): 54 | # return client.transaction(max_attempts=self.max_attempts, 55 | # read_only=self.firestore_read_only) 56 | # 57 | # def _stream_doc_ref(self, doc_ref): 58 | # return doc_ref.stream(transaction=self.transaction) 59 | # 60 | # def _get_doc_ref(self, doc_ref): 61 | # return doc_ref.get(transaction=self.transaction) 62 | # 63 | # def _create_unique_field_docs(self, type_name, key, value, unique_field_names, transaction): 64 | # raise FamTransactionError( 65 | # "You can't set unique fields inside a batch or transaction because I haven't thought hard enough about nested transactions") 66 | # 67 | # def _clear_uniqueness_typed(self, key, type_name, transaction): 68 | # raise FamTransactionError( 69 | # "You can't delete an object with unique fields inside a batch or transaction because I haven't thought hard enough about nested transactions") -------------------------------------------------------------------------------- /src/fam/database/firestore_test_client.py: -------------------------------------------------------------------------------- 1 | from google import auth as google_auth 2 | import grpc 3 | from firebase_admin import firestore 4 | 5 | 6 | class FirestoreTestClient(firestore.Client): 7 | """ 8 | This is monkey patched version of the firestore database client that allows you to use credentials over an insecure 9 | local grpc channel. This lets you use authenticaed users with limited capabilites in tests 10 | 11 | DO NOT USE THIS IN PRODUCTION!!! THERE IS A REASON THAT GOOGLE DON'T ALLOW CREDENTIALS TO BE SENT OVER HTTP 12 | """ 13 | 14 | 15 | def _firestore_api_helper(self, transport, client_class, client_module): 16 | 17 | if self._firestore_api_internal is None: 18 | composite_credentials = self.create_local_composite_credentials() 19 | 20 | if self._emulator_host is not None: 21 | channel = grpc._channel.Channel(self._emulator_host, (), composite_credentials._credentials, None) 22 | else: 23 | channel = transport.create_channel( 24 | self._target, 25 | credentials=self._credentials, 26 | options={"grpc.keepalive_time_ms": 30000}.items(), 27 | ) 28 | 29 | self._transport = transport(host=self._target, channel=channel) 30 | self._firestore_api_internal = client_class( 31 | transport=self._transport, client_options=self._client_options 32 | ) 33 | client_module._client_info = self._client_info 34 | return self._firestore_api_internal 35 | 36 | 37 | def create_local_composite_credentials(self): 38 | 39 | credentials = google_auth.credentials.with_scopes_if_required(self._credentials, None) 40 | request = google_auth.transport.requests.Request() 41 | 42 | # Create the metadata plugin for inserting the authorization header. 43 | metadata_plugin = google_auth.transport.grpc.AuthMetadataPlugin( 44 | credentials, request 45 | ) 46 | 47 | # Create a set of grpc.CallCredentials using the metadata plugin. 48 | google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) 49 | 50 | local_credentials = grpc.local_channel_credentials() 51 | 52 | # Combine the ssl credentials and the authorization credentials. 53 | return grpc.composite_channel_credentials(local_credentials, google_auth_credentials) -------------------------------------------------------------------------------- /src/fam/database/firestore_test_wrapper.py: -------------------------------------------------------------------------------- 1 | from firebase_admin import auth 2 | from fam.database.firestore import FirestoreWrapper 3 | from fam.database.firestore_test_client import FirestoreTestClient 4 | 5 | class FirestoreTestWrapper(FirestoreWrapper): 6 | 7 | def __init__(self, mapper, app_name, uid, project_id, api_key, namespace, additional_claims=None): 8 | 9 | token = auth.create_custom_token(uid, additional_claims).decode("utf-8") 10 | 11 | super().__init__(mapper, 12 | None, 13 | project_id=project_id, 14 | custom_token=token, 15 | api_key=api_key, 16 | name=app_name, 17 | namespace=namespace 18 | ) 19 | 20 | credentials = self.app.credential.get_credential() 21 | test_client = FirestoreTestClient(credentials=credentials, project=project_id) 22 | self.db = test_client 23 | -------------------------------------------------------------------------------- /src/fam/database/mock.py: -------------------------------------------------------------------------------- 1 | from fam.database.base import BaseDatabase 2 | from fam.buffer.write_buffer import FamWriteBuffer 3 | from .null import NullDatabase 4 | 5 | 6 | class MockDatabase(BaseDatabase): 7 | database_type = "mock" 8 | check_on_save = False 9 | 10 | def __init__(self, mapper): 11 | self.mapper = mapper 12 | null_db = NullDatabase(mapper) 13 | self.buffer = FamWriteBuffer(null_db) 14 | 15 | def set_object(self, obj, rev=None): 16 | return self.buffer.put(obj) 17 | 18 | def get(self, key, class_name=None): 19 | return self.buffer.get(key) 20 | 21 | def _delete(self, key, rev, classname): 22 | return self.buffer.delete_key(key) 23 | 24 | def delete_key(self, key): 25 | return self.buffer.delete_key(key) 26 | 27 | def query_view(self, view_name, **kwargs): 28 | return self.buffer.query_view(view_name, **kwargs) 29 | 30 | def get_refs_from(self, namespace, type_name, name, key, field): 31 | return self.buffer.get_refs_from(namespace, type_name, name, key, field) 32 | -------------------------------------------------------------------------------- /src/fam/database/null.py: -------------------------------------------------------------------------------- 1 | from fam.database.base import BaseDatabase 2 | from fam.buffer.write_buffer import FamWriteBuffer 3 | 4 | 5 | class NullDatabase(BaseDatabase): 6 | database_type = "null" 7 | 8 | def __init__(self, mapper): 9 | self.mapper = mapper 10 | 11 | def put(self, thing): 12 | pass 13 | 14 | def delete(self, thing): 15 | pass 16 | 17 | def get(self, key, class_name=None): 18 | return None 19 | 20 | def delete_key(self, key): 21 | pass 22 | 23 | def query_view(self, view_name, **kwargs): 24 | return [] 25 | 26 | -------------------------------------------------------------------------------- /src/fam/database/sync_gateway.py: -------------------------------------------------------------------------------- 1 | import simplejson as json 2 | import hashlib 3 | import copy 4 | from fam.fam_json import object_default 5 | 6 | from base64 import b64encode 7 | 8 | 9 | from fam.utils import requests_shim as requests 10 | 11 | from .couchdb import CouchDBWrapper, ResultWrapper 12 | 13 | class SyncGatewayWrapper(CouchDBWrapper): 14 | 15 | ## the option stale=false forces the view to be indexed on read. Sync_gateway does not index on write!! 16 | # VIEW_URL = "%s/%s/_design/%s/_view/%s?stale=false&key=\"%s\"" 17 | 18 | # this function is different from the base version in that it adds the rev from the meta into the doc 19 | FOREIGN_KEY_MAP_STRING = '''function(doc, meta) { 20 | var resources = %s; 21 | if (resources.indexOf(doc.type) != -1 && doc.namespace == \"%s\"){ 22 | doc._rev = meta.rev; 23 | emit(doc.%s, null); 24 | } 25 | }''' 26 | 27 | database_type = "sync_gateway" 28 | supports_skip = False 29 | 30 | def __init__(self, mapper, db_url, db_name, 31 | auth_url=None, 32 | username=None, 33 | password=None, 34 | validator=None, 35 | read_only=False): 36 | 37 | 38 | self.mapper = mapper 39 | self.validator = validator 40 | self.read_only = read_only 41 | 42 | self.db_name = db_name 43 | self.db_url = db_url 44 | self.username = username 45 | self.password = password 46 | self.auth_url = auth_url 47 | self.session = requests.Session() 48 | 49 | url = "%s/%s" % (db_url, db_name) 50 | rsp = self.session.get(url) 51 | 52 | self.cookies = {} 53 | 54 | if rsp.status_code == 404: 55 | raise Exception("Unknown database and you can't create them in the sync gateway") 56 | 57 | def authenticate(self): 58 | if self.username is None: 59 | raise Exception("failed to authenticate no username") 60 | 61 | userAndPass = b64encode(b"paul:bumbum").decode("ascii") 62 | headers = { 'Authorization' : 'Basic %s' % userAndPass } 63 | 64 | rsp = self.session.get(self.auth_url, headers=headers) 65 | if rsp.status_code == 200: 66 | self.cookies = rsp.cookies 67 | else: 68 | raise Exception("failed to authenticate") 69 | 70 | 71 | def _wrapper_from_view_json(self, as_json): 72 | return ResultWrapper.from_gateway_view_json(as_json) 73 | 74 | 75 | # def changes(self, since=None, channels=None, limit=None): 76 | # raise NotImplementedError("Haven't done changes for sync gateway yet") 77 | 78 | 79 | def sync_up(self): 80 | pass 81 | 82 | def sync_down(self): 83 | pass 84 | 85 | 86 | def purge(self, key): 87 | 88 | if self.read_only: 89 | raise Exception("This db is read only") 90 | 91 | data = { 92 | key: ["*"] 93 | } 94 | 95 | rsp = self.session.post("%s/%s/_purge" % (self.db_url, self.db_name), data=json.dumps(data)) 96 | 97 | if rsp.status_code == 200 or rsp.status_code == 202: 98 | return 99 | 100 | 101 | def user(self, username): 102 | url = "%s/%s/_user/%s" % (self.db_url, self.db_name, username) 103 | rsp = self.session.get(url) 104 | if rsp.status_code == 200: 105 | return rsp.json() 106 | else: 107 | return None 108 | 109 | def role(self, role_name): 110 | url = "%s/%s/_role/%s" % (self.db_url, self.db_name, role_name) 111 | rsp = self.session.get(url) 112 | if rsp.status_code == 200: 113 | return rsp.json() 114 | else: 115 | return None 116 | 117 | 118 | def ensure_role(self, role_name): 119 | 120 | role_info = self.role(role_name) 121 | if role_info is None: 122 | data = { 123 | "name": role_name, 124 | "db": self.db_name 125 | } 126 | 127 | url = "%s/%s/_role/%s" % (self.db_url, self.db_name, role_name) 128 | 129 | rsp = self.session.put(url, data=json.dumps(data, indent=4, sort_keys=True, default=object_default), 130 | headers={"Content-Type": "application/json", "Accept": "application/json"}) 131 | if rsp.status_code == 200 or rsp.status_code == 201: 132 | return True 133 | else: 134 | return False 135 | else: 136 | return True 137 | 138 | 139 | def ensure_user_role(self, username, role): 140 | 141 | user_info = self.user(username) 142 | roles = user_info["admin_roles"] 143 | 144 | if not role in roles: 145 | roles.append(role) 146 | 147 | url = "%s/%s/_user/%s" % (self.db_url, self.db_name, username) 148 | 149 | rsp = self.session.put(url, data=json.dumps(user_info, indent=4, sort_keys=True, default=object_default), 150 | headers={"Content-Type": "application/json", "Accept": "application/json"}) 151 | if rsp.status_code == 200 or rsp.status_code == 201: 152 | return True 153 | else: 154 | return False 155 | else: 156 | return True 157 | 158 | 159 | def view(self, name, **kwargs): 160 | return super(SyncGatewayWrapper, self).view(name, stale="false", **kwargs) 161 | 162 | # @auth 163 | def get_design(self, key): 164 | 165 | url = "%s/%s/%s" % (self.db_url, self.db_name, key) 166 | 167 | rsp = self.session.get(url) 168 | 169 | if rsp.status_code == 200: 170 | return rsp.json() 171 | if rsp.status_code == 500: 172 | return None 173 | if rsp.status_code == 400: 174 | return None 175 | if rsp.status_code == 404: 176 | return None 177 | raise Exception("Unknown Error getting cb doc: %s %s" % (rsp.status_code, rsp.text)) 178 | 179 | 180 | def _new_matches_existing(self, new_doc, existing_doc): 181 | 182 | new_view_names = new_doc["views"].keys() 183 | existing_view_names = existing_doc["views"].keys() 184 | 185 | # print("new_view_names: ", new_view_names) 186 | # print("existing_view_names: ", existing_view_names) 187 | 188 | if set(new_view_names) != set(existing_view_names): 189 | # print "names dont match" 190 | return False 191 | 192 | for view_name in new_view_names: 193 | new_view_function = new_doc["views"][view_name]["map"] 194 | existing_view_function = existing_doc["views"][view_name]["map"] 195 | index = existing_view_function.find(new_view_function) 196 | if index == -1: 197 | return False 198 | 199 | return True 200 | 201 | 202 | def ensure_design_doc(self, key, doc): 203 | 204 | if self.read_only: 205 | raise Exception("This db is read only") 206 | 207 | existing = self.get_design(key) 208 | 209 | if existing is None or not self._new_matches_existing(doc, existing): 210 | print("************ updating design doc %s ************" % key) 211 | self._set(key, doc, backoff=True) 212 | else: 213 | print("************ design doc %s up to date **********" % key) 214 | 215 | 216 | def _raw_design_doc(self): 217 | 218 | design_doc = { 219 | "views": { 220 | "all": { 221 | "map": """function(doc, meta) { 222 | doc._rev = meta.rev; 223 | emit(doc.type, null); 224 | } 225 | """ 226 | } 227 | } 228 | } 229 | 230 | return design_doc 231 | -------------------------------------------------------------------------------- /src/fam/exceptions.py: -------------------------------------------------------------------------------- 1 | class FamDbConnectionException(Exception): 2 | pass 3 | 4 | class FamResourceConflict(Exception): 5 | pass 6 | 7 | class FamViewError(Exception): 8 | pass 9 | 10 | class FamValidationError(Exception): 11 | pass 12 | 13 | class FamImmutableError(Exception): 14 | pass 15 | 16 | class FamUniqueError(Exception): 17 | pass 18 | 19 | class FamWriteError(Exception): 20 | pass 21 | 22 | class FamError(Exception): 23 | pass 24 | 25 | class FamPermissionError(Exception): 26 | pass 27 | 28 | class FamTransactionError(Exception): 29 | pass -------------------------------------------------------------------------------- /src/fam/extra_types/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/extra_types/__init__.py -------------------------------------------------------------------------------- /src/fam/extra_types/lat_long.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | class LatLong(object): 4 | 5 | def __init__(self, latitude, longitude): 6 | self.latitude = latitude 7 | self.longitude = longitude 8 | -------------------------------------------------------------------------------- /src/fam/fam_json.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def object_default(o): 4 | if hasattr(o, "to_json"): 5 | return o.to_json() 6 | raise TypeError(repr(o) + " is not JSON serializable") 7 | 8 | 9 | # class PatchedJson(object): 10 | # import simplejson as json 11 | # 12 | # def dumps(self, *args, **kwargs): 13 | # print args[0] 14 | # return self.json.dumps(*args, indent=4, sort_keys=True, default=object_default, **kwargs) 15 | # 16 | # def __getattr__(self, name): 17 | # return getattr(self.json, name) 18 | # 19 | # 20 | # sys.modules[__name__] = PatchedJson() 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /src/fam/fields.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import datetime 4 | from copy import deepcopy 5 | from decimal import Decimal 6 | from fractions import Fraction 7 | 8 | if sys.version_info[0] < 3: 9 | PYTHON_VERSION = 2 10 | else: 11 | PYTHON_VERSION = 3 12 | 13 | 14 | __all__ = [ 15 | "BoolField", 16 | "NumberField", 17 | "StringField", 18 | "ListField", 19 | "DictField", 20 | "ObjectField", 21 | "LatLongField", 22 | "DateTimeField", 23 | "BytesField", 24 | "DecimalField", 25 | "FractionField", 26 | "ReferenceTo", 27 | "ReferenceFrom", 28 | "EmailField", 29 | ] 30 | 31 | from .constants import * 32 | from .exceptions import * 33 | from .extra_types.lat_long import LatLong 34 | 35 | 36 | class Field(object): 37 | object = "base" 38 | 39 | def __init__(self, required=False, immutable=False, default=None, unique=False): 40 | self.required = required 41 | self.immutable = immutable 42 | self.default = default 43 | self.unique = unique 44 | self._types = [] 45 | 46 | if self.default is not None and self.required is True: 47 | raise FamError("It doesnt really make sense to use both required and default together. Just use default") 48 | 49 | def is_correct_type(self, value): 50 | value is None or any(isinstance(value, cls) for cls in self._types) 51 | 52 | def get_default(self): 53 | return self.default 54 | 55 | def __str__(self): 56 | attr = [] 57 | if self.required: 58 | attr.append(FIELD_REQUIRED) 59 | return " ".join(attr) 60 | 61 | as_string = property(__str__) 62 | 63 | 64 | class BoolField(Field): 65 | 66 | _types = [bool] 67 | 68 | 69 | class NumberField(Field): 70 | 71 | if PYTHON_VERSION == 3: 72 | _types = [int, float] 73 | else: 74 | _types = [int, float, long] 75 | 76 | 77 | class StringField(Field): 78 | 79 | if PYTHON_VERSION == 3: 80 | _types = [str] 81 | else: 82 | _types = [basestring] 83 | 84 | 85 | class ListField(Field): 86 | 87 | _types = [list] 88 | 89 | def __init__(self, item_cls=None, required=False, immutable=False, default=None): 90 | self.item_cls = item_cls 91 | super(ListField, self).__init__(required=required, immutable=immutable, default=default) 92 | 93 | def get_default(self): 94 | return deepcopy(self.default) 95 | 96 | 97 | 98 | class DictField(Field): 99 | 100 | _types = [dict] 101 | 102 | def get_default(self): 103 | return deepcopy(self.default) 104 | 105 | 106 | 107 | class ObjectField(Field): 108 | 109 | def get_default(self): 110 | return self.cls() 111 | 112 | def __init__(self, cls, default=None, required=False): 113 | self.cls = cls 114 | self._types = [cls] 115 | super(ObjectField, self).__init__(default=default, required=required) 116 | 117 | 118 | # "LatLongField", 119 | # "DateTimeField", 120 | # "ByteField", 121 | # "DecimalField", 122 | # "FractionField", 123 | 124 | class LatLongField(Field): 125 | 126 | _types = [LatLong] 127 | 128 | def get_default(self): 129 | return LatLong(self.default.latitude, self.default.longitude) 130 | 131 | 132 | class DateTimeField(Field): 133 | _types = [datetime.datetime] 134 | 135 | def get_default(self): 136 | return self.default 137 | 138 | 139 | class BytesField(Field): 140 | 141 | if PYTHON_VERSION == 3: 142 | _types = [bytes, bytearray] 143 | else: 144 | _types = [bytes, bytearray, str] 145 | 146 | def get_default(self): 147 | return self.default.copy() 148 | 149 | 150 | class DecimalField(Field): 151 | 152 | _types = [Decimal] 153 | 154 | def get_default(self): 155 | return self.default.copy() 156 | 157 | 158 | class FractionField(Field): 159 | _types = [Fraction] 160 | 161 | def get_default(self): 162 | return self.default.copy() 163 | 164 | 165 | class EmailField(StringField): 166 | 167 | pattern = r"""^([-!#$%&'*+/=?^_`{}|~0-9a-zA-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9a-zA-Z]+)*|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*")@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}\.?$""" 168 | 169 | 170 | 171 | class ReferenceTo(Field): 172 | 173 | if PYTHON_VERSION == 3: 174 | _types = [str] 175 | else: 176 | _types = [basestring] 177 | 178 | def __init__(self, refns, refcls, required=False, immutable=False, default=None, unique=False, 179 | cascade_delete=False): 180 | self.refns = refns 181 | self.refcls = refcls 182 | self.cascade_delete = cascade_delete 183 | super(ReferenceTo, self).__init__(required, immutable, default, unique) 184 | 185 | def __str__(self): 186 | attr = [] 187 | 188 | attr.append("ns:%s" % self.refns) 189 | attr.append("resource:%s" % self.refcls) 190 | 191 | if self.required: 192 | attr.append(FIELD_REQUIRED) 193 | return " ".join(attr) 194 | 195 | as_string = property(__str__) 196 | 197 | 198 | class ReferenceFrom(Field): 199 | 200 | def __init__(self, refns, refcls, fkey, required=False, immutable=False, default=None, cascade_delete=False): 201 | self.refns = refns 202 | self.refcls = refcls 203 | self.fkey = fkey 204 | self.cascade_delete = cascade_delete 205 | super(ReferenceFrom, self).__init__(required, immutable, default) 206 | 207 | def __str__(self): 208 | attr = [] 209 | attr.append("ns:%s" % self.refns) 210 | attr.append("resource:%s" % self.refcls) 211 | attr.append("key:%s" % self.fkey) 212 | if self.required: 213 | attr.append(FIELD_REQUIRED) 214 | return " ".join(attr) 215 | 216 | as_string = property(__str__) -------------------------------------------------------------------------------- /src/fam/firestore_sync/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/firestore_sync/__init__.py -------------------------------------------------------------------------------- /src/fam/firestore_sync/syncer.py: -------------------------------------------------------------------------------- 1 | 2 | from fam.exceptions import * 3 | 4 | from fam.database.firestore_adapter import FirestoreDataAdapter 5 | 6 | 7 | class FirestoreSyncer(object): 8 | 9 | def __init__(self, couchdb_wrapper, firestore_wrapper, batch_size=100, since_in_db=False): 10 | 11 | self.couchdb_wrapper = couchdb_wrapper 12 | self.firestore_wrapper = firestore_wrapper 13 | self.queries = [] 14 | self.doc_refs = [] 15 | self.batch_size = batch_size 16 | self.data_adapter = FirestoreDataAdapter() 17 | self.types_to_sync_up = None 18 | self._since = 0 19 | self.since_rev = None 20 | self.since_in_db = since_in_db 21 | 22 | 23 | def add_query(self, query): 24 | self.queries.append(query) 25 | 26 | 27 | def add_doc_ref(self, doc_ref): 28 | self.doc_refs.append(doc_ref) 29 | 30 | def add_snapshot(self, snapshot): 31 | item = self.firestore_wrapper.value_from_snapshot(snapshot) 32 | update_time = snapshot.update_time 33 | item["update_seconds"] = update_time.seconds 34 | item["update_nanos"] = update_time.nanos 35 | try: 36 | self.couchdb_wrapper._set(item["_id"], item) 37 | return item 38 | except FamResourceConflict as e: 39 | existing = self.couchdb_wrapper._get(item["_id"]) 40 | if update_time.seconds > existing.value["update_seconds"] or \ 41 | (update_time.seconds == existing.value["update_seconds"] and 42 | update_time.nanos > existing.value["update_nanos"]): 43 | item["_rev"] = existing.rev 44 | self.couchdb_wrapper._set(item["_id"], item) 45 | return item 46 | 47 | return None 48 | 49 | def sync_down(self): 50 | 51 | items = [] 52 | 53 | for query in self.queries: 54 | for snapshot in self.firestore_wrapper.query_snapshots(query, batch_size=self.batch_size): 55 | item = self.add_snapshot(snapshot) 56 | if item is not None: 57 | items.append(item) 58 | 59 | for doc_ref in self.doc_refs: 60 | snapshot = doc_ref.get() 61 | item = self.add_snapshot(snapshot) 62 | if item is not None: 63 | items.append(item) 64 | 65 | #self.since = self.couchdb_wrapper.info()["update_seq"] 66 | return items 67 | 68 | 69 | def sync_up(self): 70 | fs = self.firestore_wrapper.db 71 | 72 | while True: 73 | last_seq, changes = self.couchdb_wrapper._changes(since=self.since, limit=self.batch_size) 74 | 75 | if changes: 76 | count = 0 77 | batch = fs.batch() 78 | for item in changes: 79 | type_name = item.value["type"] 80 | if self.types_to_sync_up is None or type_name in self.types_to_sync_up: 81 | value = item.value 82 | if "update_nanos" in value: 83 | del value["update_nanos"] 84 | if "update_seconds" in value: 85 | del value["update_seconds"] 86 | value["_id"] = item.key 87 | serialised_value = self.data_adapter.serialise(value) 88 | # del serialised_value["_id"] 89 | del serialised_value["type"] 90 | del serialised_value["namespace"] 91 | batch.set(fs.collection(type_name).document(item.key), serialised_value) 92 | count += 1 93 | 94 | batch.commit() 95 | self.since = last_seq 96 | print("synced up %s items" % count) 97 | else: 98 | print("nothing to sync") 99 | break 100 | 101 | 102 | def get_since(self): 103 | 104 | if self.since_in_db: 105 | since_result = self.couchdb_wrapper._get("sync_since") 106 | if since_result is not None: 107 | self.since_rev = since_result.rev 108 | return since_result.value["since"] 109 | else: 110 | return 0 111 | else: 112 | return self._since 113 | 114 | 115 | def set_since(self, since): 116 | 117 | if self.since_in_db: 118 | since_result = self.couchdb_wrapper._get("sync_since") 119 | if since_result is not None: 120 | self.since_rev = since_result.rev 121 | value = {"since": since} 122 | if self.since_rev is not None: 123 | value["_rev"] = self.since_rev 124 | self.couchdb_wrapper._set("sync_since", value) 125 | else: 126 | self._since = since 127 | 128 | 129 | since = property(get_since, set_since) 130 | -------------------------------------------------------------------------------- /src/fam/mapper.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import os 3 | # from slimit import ast 4 | # from slimit.parser import Parser 5 | 6 | from fam.blud import GenericObject, ReferenceFrom 7 | from fam.schema.validator import ModelValidator 8 | from fam.buffer.buffer_views import FamWriteBufferViews 9 | 10 | VIEW_FUNCTION_NAMES = ["map", "reduce"] 11 | 12 | 13 | class ClassMapper(object): 14 | 15 | def __init__(self, classes, modules=None, designs=None): 16 | 17 | input_modules = modules if modules else [] 18 | 19 | self.allow_public_write_types = [] 20 | self.immutable_fields = {} 21 | self.namespaces = {} 22 | self.modules = {} 23 | self._add_classes(classes) 24 | self._add_modules(input_modules) 25 | self.sub_class_lookup = {} 26 | self._work_out_sub_classes() 27 | self.design_js_paths = designs if designs is not None else [] 28 | self._buffer_views = None 29 | 30 | 31 | # def extra_design_docs(self): 32 | # 33 | # docs = [] 34 | # for filepath in self.design_js_paths: 35 | # design_doc = self._js_design_as_doc(filepath) 36 | # docs.append(design_doc) 37 | # 38 | # return docs 39 | 40 | 41 | @property 42 | def buffer_views(self): 43 | if self._buffer_views is None: 44 | self._buffer_views = FamWriteBufferViews(self) 45 | return self._buffer_views 46 | 47 | 48 | def _add_immutable_field(self, type_name, field_name): 49 | 50 | # print "immutable", type_name, field_name 51 | field_names = self.immutable_fields.get(type_name) 52 | if field_names is None: 53 | field_names = [] 54 | self.immutable_fields[type_name] = field_names 55 | field_names.append(field_name) 56 | 57 | 58 | def _add_modules(self, modules): 59 | for module in modules: 60 | classes = [] 61 | for k, obj in module.__dict__.items(): 62 | if inspect.isclass(obj): 63 | if issubclass(obj, GenericObject): 64 | if obj != GenericObject: 65 | if not k.startswith("_"): 66 | classes.append(obj) 67 | 68 | self._add_classes(classes) 69 | 70 | 71 | def __iter__(self): 72 | for name_space_name, name_space_classes in self.namespaces.items(): 73 | for cls_name, cls in name_space_classes.items(): 74 | yield cls 75 | 76 | 77 | def get_sub_class_names(self, namespace, class_name): 78 | return self.sub_class_lookup[(namespace, class_name)] 79 | 80 | 81 | def _work_out_sub_classes(self): 82 | # for each class add their subclasses type names to a lookup table keyed by namespace and classname 83 | # only works within a given namespace 84 | for namespace_name, namespace in self.namespaces.items(): 85 | for class_name_super, cls_super in namespace.items(): 86 | subclasses = [] 87 | self.sub_class_lookup[(namespace_name, class_name_super)] = subclasses 88 | for class_name_sub, cls_sub in namespace.items(): 89 | if issubclass(cls_sub, cls_super): 90 | subclasses.append(class_name_sub) 91 | 92 | 93 | def _add_classes(self, classes): 94 | 95 | for cls in classes: 96 | 97 | namespace_name = cls.namespace 98 | 99 | type_name = cls.type 100 | # gathers up information that gets added to the sync_gateway function 101 | if cls.sg_allow_public_write: 102 | self.allow_public_write_types.append(type_name) 103 | for field_name, field in cls.fields.items(): 104 | if field.immutable: 105 | self._add_immutable_field(type_name, field_name) 106 | namespace = self.namespaces.get(namespace_name) 107 | if namespace is None: 108 | namespace = {} 109 | self.namespaces[namespace_name] = namespace 110 | 111 | if not issubclass(cls, GenericObject): 112 | raise Exception("Classes you add to a ClassMapper must inherit from fam.blud.GenericObject this one does not: %s" % cls) 113 | namespace[type_name] = cls 114 | 115 | 116 | def get_class(self, type_name, namespace_name): 117 | namespace = self.namespaces.get(namespace_name) 118 | if namespace is None: 119 | return None 120 | return namespace.get(type_name) 121 | 122 | 123 | # def _js_design_as_doc(self, filepath): 124 | # 125 | # dir, filename = os.path.split(filepath) 126 | # name, ext = os.path.splitext(filename) 127 | # 128 | # with open(filepath) as f: 129 | # js = f.read() 130 | # 131 | # parser = Parser() 132 | # tree = parser.parse(js) 133 | # 134 | # views = {} 135 | # 136 | # for node in tree: 137 | # if isinstance(node, ast.VarStatement): 138 | # for child in node.children(): 139 | # for grandchild in child.children(): 140 | # if isinstance(grandchild, ast.Identifier): 141 | # view = {} 142 | # view_name = grandchild.value 143 | # views[view_name] = view 144 | # if isinstance(grandchild, ast.Object): 145 | # for named in grandchild.children(): 146 | # function_name = None 147 | # function_body = None 148 | # for kv in named.children(): 149 | # if isinstance(kv, ast.Identifier) and kv.value in VIEW_FUNCTION_NAMES: 150 | # function_name = kv.value 151 | # if isinstance(kv, ast.FuncExpr): 152 | # function_body = kv.to_ecma() 153 | # if function_name and function_body: 154 | # view[function_name] = function_body 155 | # 156 | # 157 | # return {"_id": "_design/%s" % name, 158 | # "views": views} 159 | 160 | 161 | 162 | def get_design(self, namespace, namespace_name, foreign_key_str): 163 | 164 | views = {} 165 | for type_name, cls in namespace.items(): 166 | for field_name, field in cls.cls_fields.items(): 167 | if isinstance(field, ReferenceFrom): 168 | view_key = "%s_%s" % (type_name, field_name) 169 | # if view_key in ["person_dogs", "person_animals"]: 170 | views[view_key] = {"map" : self._get_fk_map(field.refcls, field.refns, field.fkey, foreign_key_str)} 171 | 172 | if field.unique: 173 | view_key = "%s_%s" % (type_name, field_name) 174 | # if view_key in ["person_dogs", "person_animals"]: 175 | views[view_key] = {"map": self._get_fk_map(type_name, namespace_name, field_name, foreign_key_str)} 176 | 177 | design = { 178 | "views": views 179 | } 180 | 181 | return design 182 | 183 | 184 | def get_all_subclass_names(self, namespace, class_name): 185 | 186 | if isinstance(class_name, list): 187 | class_names = class_name 188 | else: 189 | class_names = [class_name] 190 | 191 | all_sub_class_names = set() 192 | for name in class_names: 193 | sub_classes = set(self.get_sub_class_names(namespace, name)) 194 | all_sub_class_names = all_sub_class_names.union(sub_classes) 195 | 196 | return all_sub_class_names 197 | 198 | 199 | 200 | def _get_fk_map(self, class_name, namespace, ref_to_field_name, foreign_key_str): 201 | 202 | all_sub_class_names = self.get_all_subclass_names(namespace, class_name) 203 | 204 | arrayStr = '["%s"]' % '", "'.join(all_sub_class_names) 205 | return foreign_key_str % (arrayStr, namespace, ref_to_field_name) 206 | 207 | 208 | 209 | -------------------------------------------------------------------------------- /src/fam/schema/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/schema/__init__.py -------------------------------------------------------------------------------- /src/fam/schema/validator.py: -------------------------------------------------------------------------------- 1 | import jsonschema 2 | import os 3 | import json 4 | import inspect 5 | import copy 6 | from fam.blud import GenericObject 7 | from fam.constants import * 8 | 9 | 10 | 11 | from .writer import createJsonSchema 12 | 13 | 14 | class ModelValidator(object): 15 | 16 | def __init__(self, mapper, schema_dir=None, classes=None, modules=None): 17 | self.reference_store = {} 18 | self.ref_schemas = {} 19 | self.schema_dir = schema_dir 20 | if mapper is not None: 21 | self._add_classes(mapper) 22 | if classes is not None: 23 | self._add_classes(classes) 24 | 25 | def iter_schemas(self): 26 | 27 | for k, schema in self.ref_schemas.items(): 28 | namespace = k[0] 29 | typename = k[1] 30 | yield namespace, typename, schema 31 | 32 | 33 | def _add_classes(self, classes): 34 | for cls in classes: 35 | type_name = cls.__name__.lower() 36 | namespace = cls.namespace.lower() 37 | self.add_schema(namespace, type_name, cls) 38 | 39 | 40 | def add_schema(self, namespace, type_name, cls): 41 | 42 | schema = createJsonSchema(cls) 43 | 44 | if self.schema_dir is not None: 45 | schema["id"] = self._check_for_changes(namespace, type_name, schema) 46 | else: 47 | schema["id"] = "%s/%s" % (namespace, type_name) 48 | 49 | jsonschema.Draft4Validator.check_schema(schema) 50 | self.reference_store[schema["id"]] = schema 51 | self.ref_schemas[(namespace, type_name)] = schema 52 | 53 | 54 | def schema_id_for(self, namespace, type_name): 55 | schema = self.ref_schemas.get((namespace, type_name)) 56 | if schema: 57 | # print schema["id"] 58 | return schema["id"] 59 | return None 60 | 61 | 62 | def validate(self, doc): 63 | 64 | schema_id = doc.get("schema") 65 | if schema_id is None: 66 | namespace = doc.get("namespace") 67 | type_name = doc.get("type") 68 | 69 | if namespace and type_name: 70 | schema_id = self.schema_id_for(namespace, type_name) 71 | 72 | if schema_id: 73 | schema = self._look_schema_with_lazy_load(schema_id) 74 | resolver = jsonschema.RefResolver(schema_id, schema, store=self.reference_store) 75 | validator = jsonschema.Draft4Validator(schema, resolver=resolver) 76 | validator.validate(doc) 77 | 78 | 79 | def _look_schema_with_lazy_load(self, schema_id): 80 | schema = self.reference_store.get(schema_id) 81 | if schema is None: 82 | schema = self._schema_from_id(schema_id) 83 | self.reference_store[schema_id] = schema 84 | 85 | return schema 86 | 87 | return ''.join(diff) 88 | 89 | def _schemata_are_equal(self, schema_a, schema_b): 90 | 91 | 92 | if schema_b is None: 93 | return False 94 | 95 | # expects dicts 96 | # don't mess with the input 97 | dupe_a = copy.deepcopy(schema_a) 98 | dupe_b = copy.deepcopy(schema_b) 99 | 100 | # remove ids before comparison 101 | if "id" in dupe_a: 102 | del dupe_a["id"] 103 | 104 | if "id" in dupe_b: 105 | del dupe_b["id"] 106 | 107 | return dupe_a == dupe_b 108 | 109 | 110 | def _type_dir(self, namespace, type_name): 111 | ns = namespace.replace("/", "_") 112 | dir_path = os.path.join(self.schema_dir, "schemata", ns, type_name) 113 | return dir_path 114 | 115 | def _schema_path(self, namespace, type_name, timestamp): 116 | type_dir = self._type_dir(namespace, type_name) 117 | dir_path = os.path.join(type_dir, timestamp) 118 | return dir_path 119 | 120 | 121 | def _timestamp_from_schema_id(self, schema_id): 122 | namespace, typename, timestamp = self._namespace_typename_timestamp_from_schema_id(schema_id) 123 | return timestamp 124 | 125 | 126 | def _schema_path_from_id(self, schema_id): 127 | namespace, type_name, timestamp = self._namespace_typename_timestamp_from_schema_id(schema_id) 128 | return self._schema_path(namespace, type_name, timestamp) 129 | 130 | 131 | def _schema_from_id(self, schema_id): 132 | schema_path = self._schema_path_from_id(schema_id) 133 | return self.schema_at_schema_path(schema_path) 134 | 135 | 136 | def _namespace_typename_timestamp_from_schema_id(self, schema_id): 137 | 138 | parts = schema_id.split("/") 139 | namespace = "/".join(parts[:-3]) 140 | type_name = parts[-3] 141 | timestamp = parts[-2] 142 | 143 | return namespace, type_name, timestamp 144 | 145 | def schema_at_schema_path(self, schema_path): 146 | schema_path = os.path.join(schema_path, "schema.json") 147 | with open(schema_path, "r") as f: 148 | return json.loads(f.read()) 149 | 150 | 151 | def _previous_schema(self, namespace, type_name): 152 | 153 | dir_path = self._type_dir(namespace, type_name) 154 | 155 | if not os.path.exists(dir_path): 156 | os.makedirs(dir_path) 157 | 158 | filenames = os.listdir(dir_path) 159 | if not filenames: 160 | return None 161 | 162 | most_recent_filename = sorted(filenames)[-1] 163 | return self.schema_at_schema_path(os.path.join(dir_path, most_recent_filename)) 164 | 165 | 166 | def _check_for_changes(self, namespace, type_name, schema): 167 | 168 | existing_schema = self._previous_schema(namespace, type_name) 169 | 170 | if existing_schema is None: 171 | raise NotImplementedError("The schema for %s %s is missing" % (namespace, type_name)) 172 | else: 173 | # compare the latest schema with the most recent 174 | if self._schemata_are_equal(schema, existing_schema): 175 | return existing_schema["id"] 176 | else: 177 | raise NotImplementedError("The schema for %s %s is not up to date" % (namespace, type_name)) 178 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /src/fam/schema/writer.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | FIELD_TYPE_LOOKUP = { 4 | "ObjectField": {}, 5 | "BoolField": { 6 | "type": "boolean", 7 | }, 8 | "NumberField": { 9 | "type": "number", 10 | }, 11 | "StringField": { 12 | "type": "string", 13 | }, 14 | "ListField": { 15 | "type": "array", 16 | }, 17 | "DictField": { 18 | "type": "object", 19 | }, 20 | "ReferenceTo": { 21 | "type": "string" 22 | }, 23 | "EmailField": { 24 | "type": "string" 25 | }, 26 | "DateTimeField": { 27 | "type": "string" 28 | }, 29 | "FractionField": { 30 | "type": "string" 31 | }, 32 | "DecimalField": { 33 | "type": "string" 34 | } 35 | } 36 | 37 | 38 | def createJsonSchema(fam_class): 39 | 40 | class_name = fam_class.__name__.lower() 41 | namespace = fam_class.namespace.lower() 42 | 43 | schema = { 44 | "$schema": "http://json-schema.org/draft-04/schema#", 45 | "title": "A Fam object model for class %s:%s" % (fam_class.namespace, fam_class.__name__), 46 | "type": "object", 47 | "properties": { 48 | "type":{ 49 | "type": "string", 50 | "pattern": class_name 51 | }, 52 | "namespace": { 53 | "type": "string", 54 | "pattern": namespace 55 | }, 56 | "schema": { 57 | "type": "string" 58 | }, 59 | "_deleted": { 60 | "type": "boolean" 61 | }, 62 | "_id": { 63 | "type": "string" 64 | }, 65 | "_rev": { 66 | "type": "string" 67 | } 68 | }, 69 | "additionalProperties": fam_class.additional_properties, 70 | } 71 | 72 | required_fields = ["namespace", "type"] 73 | 74 | for name, field in fam_class.fields.items(): 75 | field_class_name = field.__class__.__name__ 76 | if field_class_name != "ReferenceFrom": 77 | field_dict = copy.deepcopy(FIELD_TYPE_LOOKUP[field_class_name]) 78 | schema["properties"][name] = field_dict 79 | if hasattr(field, "pattern"): 80 | field_dict["pattern"] = field.pattern 81 | if field.required: 82 | required_fields.append(name) 83 | 84 | if len(required_fields) > 0: 85 | schema["required"] = sorted(required_fields) 86 | 87 | return schema -------------------------------------------------------------------------------- /src/fam/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/common/__init__.py: -------------------------------------------------------------------------------- 1 | from .basic_tests import BasicBaseTestCases 2 | from .anything_tests import AnythingBaseTests 3 | from .callback_tests import CallbackBaseTests 4 | from .field_attribute_tests import FieldAttributeBaseTests 5 | from .schema_tests import SchemaBaseTests 6 | from .index_tests import IndexBaseTests 7 | 8 | common_test_classes = [BasicBaseTestCases.BasicTests, 9 | # BasicBaseTestCases.RefNameTests, 10 | AnythingBaseTests.AnythingTests, 11 | CallbackBaseTests.CallbackTests, 12 | FieldAttributeBaseTests.FieldAttributeTests, 13 | SchemaBaseTests.SchemaTests, 14 | IndexBaseTests.IndexTests 15 | ] -------------------------------------------------------------------------------- /src/fam/tests/common/anything_tests.py: -------------------------------------------------------------------------------- 1 | 2 | import unittest 3 | from fam.tests.models.test01 import Monster, Weapons 4 | 5 | class AnythingBaseTests: 6 | 7 | class AnythingTests(unittest.TestCase): 8 | 9 | db = None 10 | 11 | def test_Object_field(self): 12 | 13 | weapons = Weapons("large", "hot", ["2", "2"]) 14 | key = "a_monster" 15 | monster = Monster(key=key, weapons=weapons, name="bill") 16 | self.db.put(monster) 17 | got_monster = self.db.get(key) 18 | self.assertTrue(isinstance(got_monster.weapons, Weapons)) 19 | 20 | -------------------------------------------------------------------------------- /src/fam/tests/common/callback_tests.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | import unittest 3 | from mock import patch 4 | 5 | from fam.tests.models.test01 import GenericObject, Dog, Cat, Person, JackRussell, Monkey, Monarch, NAMESPACE 6 | 7 | class CallbackBaseTests: 8 | 9 | class CallbackTests(unittest.TestCase): 10 | 11 | db = None 12 | 13 | def test_new_callbacks(self): 14 | 15 | with patch.object(Dog, 'pre_save_new_cb', return_value=None) as mock_pre_new: 16 | dog = Dog(name="woofer") 17 | self.db.put(dog) 18 | mock_pre_new.assert_called_once_with(self.db) 19 | 20 | with patch.object(Dog, 'post_save_new_cb', return_value=None) as mock_post_new: 21 | dog = Dog(name="woofer") 22 | self.db.put(dog) 23 | mock_post_new.assert_called_once_with(self.db) 24 | 25 | 26 | def test_update_callbacks(self): 27 | 28 | with patch.object(Dog, 'pre_save_update_cb', return_value=None) as mock_pre_update: 29 | dog = Dog(name="woofer") 30 | self.db.put(dog) 31 | old_properties = deepcopy(dog._properties) 32 | mock_pre_update.assert_not_called() 33 | dog.name = "fly" 34 | self.db.put(dog) 35 | mock_pre_update.assert_called_once_with(self.db, old_properties) 36 | 37 | with patch.object(Dog, 'post_save_update_cb', return_value=None) as mock_post_update: 38 | dog = Dog(name="woofer") 39 | self.db.put(dog) 40 | mock_post_update.assert_not_called() 41 | dog.name = "fly" 42 | self.db.put(dog) 43 | mock_post_update.assert_called_once_with(self.db) 44 | 45 | 46 | def test_delete_callbacks(self): 47 | 48 | with patch.object(Dog, 'pre_delete_cb', return_value=None) as mock_pre_delete: 49 | dog = Dog(name="woofer") 50 | self.db.put(dog) 51 | mock_pre_delete.assert_not_called() 52 | dog.name = "fly" 53 | self.db.delete(dog) 54 | mock_pre_delete.assert_called_once_with(self.db) 55 | 56 | 57 | with patch.object(Dog, 'post_delete_cb', return_value=None) as mock_post_delete: 58 | dog = Dog(name="woofer") 59 | self.db.put(dog) 60 | mock_post_delete.assert_not_called() 61 | dog.name = "fly" 62 | self.db.delete(dog) 63 | mock_post_delete.assert_called_once_with(self.db) -------------------------------------------------------------------------------- /src/fam/tests/common/data/animal_views.js: -------------------------------------------------------------------------------- 1 | 2 | var cat_legs = { 3 | map: function(doc){ 4 | if(doc.type == "cat"){ 5 | emit(doc.legs, doc) 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /src/fam/tests/common/data/last_seq.json: -------------------------------------------------------------------------------- 1 | 6 -------------------------------------------------------------------------------- /src/fam/tests/common/field_attribute_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from fam.exceptions import * 3 | from fam.tests.models.test01 import Cat, Person, Monkey 4 | 5 | 6 | class FieldAttributeBaseTests: 7 | 8 | class FieldAttributeTests(unittest.TestCase): 9 | 10 | db = None 11 | 12 | def make_red(self, animal): 13 | animal.colour = "red" 14 | 15 | def cut_off_tail(self, animal): 16 | animal.tail = False 17 | 18 | def get_address(self, animal): 19 | return animal.address 20 | 21 | def test_immutability(self): 22 | paul = Person(name="paul") 23 | paul.save(self.db) 24 | cat = Cat(colour="tabby", legs=4, owner=paul) 25 | self.assertRaises(FamImmutableError, self.make_red, cat) 26 | 27 | def test_immutability_on_non_existant_value(self): 28 | paul = Person(name="paul") 29 | paul.save(self.db) 30 | cat = Cat(legs=4, owner=paul) 31 | self.make_red(cat) 32 | 33 | def test_getting_absent_no_defaults_returns_none(self): 34 | paul = Person(name="paul") 35 | paul.save(self.db) 36 | cat = Cat(legs=4, owner=paul) 37 | name = cat.name 38 | self.assertEqual(name, None) 39 | 40 | def test_getting_unknown_fails(self): 41 | paul = Person(name="paul") 42 | paul.save(self.db) 43 | cat = Cat(legs=4, owner=paul) 44 | self.assertRaises(AttributeError, self.get_address, cat) 45 | 46 | def test_immutable_defaults(self): 47 | paul = Person(name="paul") 48 | paul.save(self.db) 49 | cat = Cat(legs=4, owner=paul) 50 | self.assertTrue(cat.tail == True) 51 | self.assertRaises(FamImmutableError, self.cut_off_tail, cat) 52 | 53 | def test_immutable_saving(self): 54 | 55 | monkey = Monkey(key="red_monkey", colour="red") 56 | self.db.put(monkey) 57 | monkey = Monkey(key="red_monkey", colour="blue") 58 | self.assertRaises(FamImmutableError, self.db.put, monkey) 59 | 60 | # 61 | # def test_using_default_and_required_fails(self): 62 | # 63 | # def duff_import(): 64 | # from fam.tests.models import test03 65 | # print(test03) 66 | # 67 | # self.assertRaises(FamError, duff_import) -------------------------------------------------------------------------------- /src/fam/tests/common/index_tests.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from fam.database import CouchDBWrapper 4 | from fam.mapper import ClassMapper 5 | 6 | from fam.tests.test_couchdb.config import * 7 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell 8 | 9 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 10 | DATA_PATH = os.path.join(THIS_DIR, "data") 11 | 12 | class IndexBaseTests: 13 | 14 | class IndexTests(unittest.TestCase): 15 | 16 | db = None 17 | 18 | # def setUp(self): 19 | # filepath = os.path.join(DATA_PATH, "animal_views.js") 20 | # mapper = ClassMapper([Dog, Cat, Person, JackRussell], design_js_paths=[filepath]) 21 | # self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 22 | # self.db.update_designs() 23 | # 24 | # def tearDown(self): 25 | # pass 26 | 27 | # def test_create_index(self): 28 | # filepath = os.path.join(DATA_PATH, "animal_views.js") 29 | # 30 | # as_dict = self.db.mapper._js_design_as_doc(filepath) 31 | # 32 | # expected = { 33 | # "_id": "_design/animal_views", 34 | # "views": { 35 | # "cat_legs": { 36 | # "map": "function(doc) {\n if (doc.type == \"cat\") {\n emit(doc.legs, doc);\n }\n}" 37 | # } 38 | # } 39 | # } 40 | # 41 | # self.assertEqual(as_dict, expected) 42 | 43 | 44 | # def test_query_view(self): 45 | # 46 | # paul = Person(name="Paul") 47 | # self.db.put(paul) 48 | # cat1 = Cat(owner=paul, legs=4) 49 | # self.db.put(cat1) 50 | # cat2 = Cat(owner=paul, legs=3) 51 | # self.db.put(cat2) 52 | # three_legged_cats = Cat.all_with_n_legs(self.db, 3) 53 | # self.assertEqual(len(three_legged_cats), 1) 54 | # 55 | # self.assertEqual(three_legged_cats[0].key, cat2.key) 56 | 57 | 58 | # def test_long_polling(self): 59 | # paul = Person(name="Paul") 60 | # self.db.put(paul) 61 | # cat1 = Cat(owner=paul, legs=4) 62 | # self.db.put(cat1) 63 | # cat2 = Cat(owner=paul, legs=3) 64 | # self.db.put(cat2) 65 | # three_legged_cats = self.db.view("animal_views/cat_legs", key=3) 66 | # self.assertEqual(len(three_legged_cats), 1) 67 | # self.assertEqual(three_legged_cats[0].key, cat2.key) -------------------------------------------------------------------------------- /src/fam/tests/common/migration_tests.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/common/migration_tests.py -------------------------------------------------------------------------------- /src/fam/tests/common/schema_tests.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import unittest 4 | from fam.exceptions import FamValidationError 5 | from fam.tests.models import test01 6 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell 7 | 8 | from fam.schema.writer import createJsonSchema 9 | from fam.schema.validator import ModelValidator 10 | 11 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 12 | DATA_PATH = os.path.join(THIS_DIR, "data") 13 | 14 | class SchemaBaseTests: 15 | 16 | class SchemaTests(unittest.TestCase): 17 | 18 | db = None 19 | 20 | def test_make_a_schema(self): 21 | expected = { 22 | "title": "A Fam object model for class glowinthedark.co.uk/test:Cat", 23 | "required": ['legs', 'namespace', 'owner_id', 'type'], 24 | "properties": { 25 | "_id": {"type": "string"}, 26 | "_rev": {"type": "string"}, 27 | "name": { 28 | "type": "string" 29 | }, 30 | "colour": { 31 | "type": "string" 32 | }, 33 | "namespace": { 34 | "pattern": "glowinthedark.co.uk/test", 35 | "type": "string" 36 | }, 37 | "owner_id": { 38 | "type": "string" 39 | }, 40 | "_deleted": { 41 | "type": "boolean" 42 | }, 43 | "tail": { 44 | "type": "boolean" 45 | }, 46 | "legs": { 47 | "type": "number" 48 | }, 49 | "type": { 50 | "pattern": "cat", 51 | "type": "string" 52 | }, 53 | "schema": { 54 | "type": "string" 55 | }, 56 | "email": { 57 | "type": "string", 58 | "pattern": "^([-!#$%&'*+/=?^_`{}|~0-9a-zA-Z]+(\\.[-!#$%&'*+/=?^_`{}|~0-9a-zA-Z]+)*|^\"([\\001-\\010\\013\\014\\016-\\037!#-\\[\\]-\\177]|\\\\[\\001-011\\013\\014\\016-\\177])*\")@(?:[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,6}\\.?$" 59 | } 60 | }, 61 | "additionalProperties": False, 62 | "$schema": "http://json-schema.org/draft-04/schema#", 63 | "type": "object" 64 | } 65 | 66 | cat_schema = createJsonSchema(Cat) 67 | 68 | print(json.dumps(cat_schema, indent=4)) 69 | self.maxDiff = None 70 | self.assertEqual(expected, cat_schema) 71 | 72 | 73 | def test_make_a_validator(self): 74 | 75 | 76 | validator = ModelValidator(None) 77 | 78 | validator.add_schema(test01.NAMESPACE, "cat", Cat) 79 | validator.add_schema(test01.NAMESPACE, "person", Person) 80 | 81 | #add validator to db 82 | self.db.validator = validator 83 | 84 | paul = Person(name="paul") 85 | paul.save(self.db) 86 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 87 | cat.save(self.db) 88 | self.assertEqual(cat.owner, paul) 89 | self.assertEqual(cat.owner.name, "paul") 90 | 91 | cat = Cat(name="puss", owner_id=paul.key) 92 | 93 | self.assertRaises(FamValidationError, cat.save, self.db) 94 | 95 | 96 | def test_make_a_validator_from_classes(self): 97 | 98 | validator = ModelValidator(None, classes=[Cat, Person]) 99 | 100 | #add validator to db 101 | self.db.validator = validator 102 | 103 | paul = Person(name="paul") 104 | paul.save(self.db) 105 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 106 | cat.save(self.db) 107 | self.assertEqual(cat.owner, paul) 108 | self.assertEqual(cat.owner.name, "paul") 109 | 110 | cat = Cat(name="puss", owner_id=paul.key) 111 | 112 | self.assertRaises(FamValidationError, cat.save, self.db) 113 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /src/fam/tests/data/goldfish.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/data/goldfish.jpg -------------------------------------------------------------------------------- /src/fam/tests/depricated/couchbase/couchbase_utils_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import requests 3 | from config import * 4 | import time 5 | 6 | from fam.utils import couchbase_utils 7 | 8 | class CouchbaseTests(object): 9 | 10 | 11 | def test_talk_to_server(self): 12 | rsp = requests.get("%s/pools/nodes" % (COUCHBASE_LOCAL_URL)) 13 | self.assertTrue(rsp.status_code < 300) 14 | 15 | 16 | def test_make_and_delete_a_bucket(self): 17 | 18 | bucket_name = "test_bucket" 19 | origional_count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 20 | 21 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 22 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 23 | self.assertEquals(count, origional_count + 1) 24 | 25 | 26 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 27 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 28 | self.assertEquals(count, origional_count) 29 | 30 | 31 | def test_make_flush_a_bucket(self): 32 | 33 | bucket_name = "test_bucket" 34 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 35 | 36 | 37 | origional_count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 38 | 39 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 40 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 41 | self.assertEquals(count, origional_count + 1) 42 | 43 | time.sleep(1) 44 | 45 | couchbase_utils.flush_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 46 | 47 | 48 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 49 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 50 | self.assertEquals(count, origional_count) 51 | 52 | 53 | def test_ensure_empty_bucket(self): 54 | 55 | bucket_name = "test_bucket" 56 | origional_count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 57 | 58 | 59 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 60 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 61 | self.assertEquals(count, origional_count + 1) 62 | 63 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name, force=True) 64 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 65 | self.assertEquals(count, origional_count + 1) 66 | 67 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 68 | count = couchbase_utils.number_of_buckets(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD) 69 | self.assertEquals(count, origional_count) 70 | 71 | 72 | class SyncGatewayTests(unittest.TestCase): 73 | 74 | 75 | def test_make_a_gateway(self): 76 | 77 | bucket_name = "test_bucket" 78 | 79 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name, force=True) 80 | 81 | try: 82 | couchbase_utils.make_a_gateway(COUCHDB_SYNC_ADMIN_URL, "test_gateway", COUCHBASE_LOCAL_URL, bucket_name, "function(doc) {channel(doc.channels);}", force=True) 83 | couchbase_utils.delete_a_gateway(COUCHDB_SYNC_ADMIN_URL, "test_gateway") 84 | 85 | finally: 86 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 87 | 88 | 89 | 90 | def test_add_guest(self): 91 | 92 | bucket_name = "test_bucket" 93 | 94 | couchbase_utils.make_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name, force=True) 95 | 96 | 97 | try: 98 | couchbase_utils.make_a_gateway(COUCHDB_SYNC_ADMIN_URL, "test_gateway", COUCHBASE_LOCAL_URL, bucket_name, "function(doc) {channel(doc.channels);}", force=True) 99 | couchbase_utils.add_guest_to_gateway(COUCHDB_SYNC_ADMIN_URL, "test_gateway") 100 | couchbase_utils.delete_a_gateway(COUCHDB_SYNC_ADMIN_URL, "test_gateway") 101 | 102 | finally: 103 | couchbase_utils.delete_a_bucket(COUCHBASE_LOCAL_URL, COUCHBASE_ADMIN, COUCHBASE_ADMIN_PASSWORD, bucket_name) 104 | 105 | 106 | 107 | class BigFatTest(unittest.TestCase): 108 | 109 | def test_making_both_bucket_and_gateway(self): 110 | 111 | bucket_name = "test_bucket" 112 | gateway_name = "test_gateway" 113 | 114 | try: 115 | couchbase_utils.make_bucket_and_gateway(COUCHBASE_LOCAL_URL, 116 | COUCHBASE_ADMIN, 117 | COUCHBASE_ADMIN_PASSWORD, 118 | bucket_name, 119 | COUCHDB_SYNC_ADMIN_URL, 120 | gateway_name, 121 | "function(doc) {channel(doc.channels);}", 122 | guest=True, 123 | force=True) 124 | 125 | 126 | 127 | finally: 128 | couchbase_utils.delete_bucket_and_gateway(COUCHBASE_LOCAL_URL, 129 | COUCHBASE_ADMIN, 130 | COUCHBASE_ADMIN_PASSWORD, 131 | bucket_name, 132 | COUCHDB_SYNC_ADMIN_URL, 133 | gateway_name) -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_couchbase/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/depricated/test_couchbase/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_couchbase/_test_basic.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import unittest 4 | from fam.database import CouchbaseWrapper 5 | from fam.mapper import ClassMapper 6 | 7 | from fam.tests.models.test01 import Dog, Cat 8 | 9 | COUCHBASE_HOST = "127.0.0.1" 10 | COUCHBASE_BUCKET = "test" 11 | 12 | 13 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 14 | DATA_PATH = os.path.join(THIS_DIR, "data") 15 | 16 | class CacheTests(unittest.TestCase): 17 | 18 | def setUp(self): 19 | mapper = ClassMapper([Dog, Cat]) 20 | self.db = CouchbaseWrapper(mapper, COUCHBASE_HOST, COUCHBASE_BUCKET, read_only=False) 21 | 22 | def tearDown(self): 23 | pass 24 | 25 | def test_save(self): 26 | dog = Dog(name="fly") 27 | dog.save(self.db) 28 | 29 | def test_get(self): 30 | dog = Dog(name="fly") 31 | dog.save(self.db) 32 | dog_key = dog.key 33 | got_dog = self.db.get(dog_key) 34 | self.assertEqual(dog.name, got_dog.name) 35 | 36 | def test_n1ql(self): 37 | dog = Dog(name="fly") 38 | dog.save(self.db) 39 | dogs = self.db.n1ql('SELECT META(test), * FROM test WHERE type="dog" and name="fly"') 40 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_couchbase/_test_basic2.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from fam.database import CouchbaseWrapper 4 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, NAMESPACE 5 | from fam.mapper import ClassMapper 6 | 7 | COUCHBASE_HOST = "127.0.0.1" 8 | COUCHBASE_BUCKET = "test" 9 | 10 | # class CouchbaseModelTests(unittest.TestCase): 11 | class DatabaseTests(unittest.TestCase): 12 | 13 | 14 | def setUp(self): 15 | mapper = ClassMapper([Dog, Cat, Person, JackRussell]) 16 | self.db = CouchbaseWrapper(mapper, COUCHBASE_HOST, COUCHBASE_BUCKET, read_only=False) 17 | self.db.update_designs() 18 | 19 | def tearDown(self): 20 | pass 21 | 22 | # Test the app 23 | def test_app(self): 24 | self.assertNotEqual(self.db, None) 25 | 26 | 27 | def test_make_an_object(self): 28 | dog = Dog(name="woofer") 29 | self.assertEqual(dog.namespace, NAMESPACE) 30 | self.assertEqual(dog.type, "dog") 31 | self.assertEqual(dog.name, "woofer") 32 | self.assertEqual(dog.__class__, Dog) 33 | 34 | 35 | def test_make_an_object2(self): 36 | dog = Dog(name="woofer") 37 | self.assertEqual(dog.talk(), "woof") 38 | 39 | 40 | def test_make_an_sub_object(self): 41 | jack = JackRussell() 42 | self.assertEqual(jack.talk(), "Yap") 43 | jack.name = "jack" 44 | jack.age = 12 45 | 46 | 47 | def test_make_an_object_saved(self): 48 | dog = Dog(name="woofer") 49 | dog.save(self.db) 50 | self.assertEqual(dog.namespace, NAMESPACE) 51 | self.assertEqual(dog.type, "dog") 52 | self.assertEqual(dog.name, "woofer") 53 | self.assertEqual(dog.__class__, Dog) 54 | self.assertNotEqual(dog.key, None) 55 | 56 | 57 | def test_make_an_object_saved_cas(self): 58 | cat = Cat(name="whiskers") 59 | cat.save(self.db) 60 | self.assertEqual(cat.namespace, NAMESPACE) 61 | self.assertEqual(cat.type, "cat") 62 | self.assertNotEqual(cat.cas, None) 63 | 64 | 65 | def test_get_cat(self): 66 | cat = Cat(name="whiskers") 67 | cat.save(self.db) 68 | another = Cat.get(self.db, cat.key) 69 | self.assertEqual(cat, another) 70 | 71 | 72 | def test_ref_to(self): 73 | paul = Person(name="paul") 74 | paul.save(self.db) 75 | cat = Cat(name="whiskers", owner_id=paul.key) 76 | cat.save(self.db) 77 | self.assertEqual(cat.owner, paul) 78 | self.assertEqual(cat.owner.name, "paul") 79 | 80 | 81 | 82 | def test_ref_from(self): 83 | paul = Person(name="paul") 84 | paul.save(self.db) 85 | cat = Cat(name="whiskers", owner_id=paul.key) 86 | cat.save(self.db) 87 | cat2 = Cat(name="puss", owner_id=paul.key) 88 | cat2.save(self.db) 89 | self.assertEqual(len(paul.cats), 2) 90 | self.assertTrue(paul.cats[0] == cat or paul.cats[1] == cat) 91 | 92 | 93 | def test_delete_cat(self): 94 | cat = Cat(name="whiskers") 95 | cat.save(self.db) 96 | key = cat.key 97 | another = Cat.get(self.db, cat.key) 98 | self.assertEqual(cat, another) 99 | cat.delete(self.db) 100 | revivedcat = self.db.get(key) 101 | self.assertEqual(revivedcat, None) 102 | 103 | 104 | def test_delete_dog_to_refs(self): 105 | paul = Person(name="paul") 106 | paul.save(self.db) 107 | dog = Dog(name="rufus", owner_id=paul.key) 108 | dog.save(self.db) 109 | key = dog.key 110 | dog2 = Dog(name="fly", owner_id=paul.key) 111 | dog2.save(self.db) 112 | self.assertNotEqual(dog2.owner, None) 113 | key = paul.key 114 | dog.delete(self.db) 115 | revivedpaul = self.db.get(key) 116 | self.assertEqual(revivedpaul, None) 117 | refresheddog2 = Dog.get(self.db, dog2.key) 118 | self.assertEqual(refresheddog2.owner, None) 119 | 120 | 121 | def test_delete_cat_refs(self): 122 | paul = Person(name="paul") 123 | paul.save(self.db) 124 | cat = Cat(name="whiskers", owner_id=paul.key) 125 | cat.save(self.db) 126 | key = cat.key 127 | cat2 = Cat(name="puss", owner_id=paul.key) 128 | cat2.save(self.db) 129 | revivedcat1 = self.db.get(key) 130 | self.assertNotEqual(revivedcat1, None) 131 | paul.delete(self.db) 132 | revivedcat2 = self.db.get(key) 133 | self.assertEqual(revivedcat2, None) 134 | 135 | 136 | def test_delete_dog_refs(self): 137 | paul = Person(name="paul") 138 | paul.save(self.db) 139 | dog = Dog(name="rufus", owner_id=paul.key) 140 | dog.save(self.db) 141 | key = dog.key 142 | dog2 = Dog(name="fly", owner_id=paul.key) 143 | dog2.save(self.db) 144 | reviveddog1 = self.db.get(key) 145 | self.assertNotEqual(reviveddog1, None) 146 | paul.delete(self.db) 147 | reviveddog2 = self.db.get(key) 148 | self.assertNotEqual(reviveddog2, None) 149 | 150 | 151 | def test_update_cat(self): 152 | cat = Cat(name="whiskers") 153 | cat.save(self.db) 154 | cat.name = "blackie" 155 | cat.save(self.db) 156 | self.assertEqual(cat.name, "blackie") 157 | self.assertEqual(cat._properties["name"], "blackie") 158 | self.assertFalse("name" in cat.__dict__.keys()) 159 | 160 | 161 | def setcatfood(self): 162 | self.cat.food = "biscuits" 163 | 164 | 165 | def test_update_catfood(self): 166 | self.cat = Cat(name="whiskers") 167 | self.cat.save(self.db) 168 | self.assertRaises(Exception, self.setcatfood) 169 | 170 | 171 | def test_update_dogfood(self): 172 | dog = Dog(name="fly") 173 | dog.food = "biscuits" 174 | dog.save(self.db) 175 | 176 | 177 | def test_all(self): 178 | dog = Dog(name="fly") 179 | dog.save(self.db) 180 | all = Dog.all(self.db) 181 | self.assertEqual(len(all), 1) 182 | 183 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_couchbase/couchbase_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from fam.database import CouchbaseWrapper 4 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, NAMESPACE 5 | from fam.mapper import ClassMapper 6 | 7 | COUCHBASE_HOST = "127.0.0.1" 8 | COUCHBASE_BUCKET = "test" 9 | 10 | # class CouchbaseModelTests(unittest.TestCase): 11 | class DatabaseTests(unittest.TestCase): 12 | 13 | 14 | def setUp(self): 15 | mapper = ClassMapper([Dog, Cat, Person, JackRussell]) 16 | self.db = CouchbaseWrapper(mapper, COUCHBASE_HOST, COUCHBASE_BUCKET, read_only=False) 17 | self.db.update_designs() 18 | 19 | def tearDown(self): 20 | pass 21 | 22 | # Test the app 23 | def test_app(self): 24 | self.assertNotEqual(self.db, None) 25 | 26 | 27 | def test_make_an_object(self): 28 | dog = Dog(name="woofer") 29 | self.assertEqual(dog.namespace, NAMESPACE) 30 | self.assertEqual(dog.type, "dog") 31 | self.assertEqual(dog.name, "woofer") 32 | self.assertEqual(dog.__class__, Dog) 33 | 34 | 35 | def test_make_an_object2(self): 36 | dog = Dog(name="woofer") 37 | self.assertEqual(dog.talk(), "woof") 38 | 39 | 40 | def test_make_an_sub_object(self): 41 | jack = JackRussell() 42 | self.assertEqual(jack.talk(), "Yap") 43 | jack.name = "jack" 44 | jack.age = 12 45 | 46 | 47 | def test_make_an_object_saved(self): 48 | dog = Dog(name="woofer") 49 | dog.save(self.db) 50 | self.assertEqual(dog.namespace, NAMESPACE) 51 | self.assertEqual(dog.type, "dog") 52 | self.assertEqual(dog.name, "woofer") 53 | self.assertEqual(dog.__class__, Dog) 54 | self.assertNotEqual(dog.key, None) 55 | 56 | 57 | def test_make_an_object_saved_cas(self): 58 | cat = Cat(name="whiskers") 59 | cat.save(self.db) 60 | self.assertEqual(cat.namespace, NAMESPACE) 61 | self.assertEqual(cat.type, "cat") 62 | self.assertNotEqual(cat.cas, None) 63 | 64 | 65 | def test_get_cat(self): 66 | cat = Cat(name="whiskers") 67 | cat.save(self.db) 68 | another = Cat.get(self.db, cat.key) 69 | self.assertEqual(cat, another) 70 | 71 | 72 | def test_ref_to(self): 73 | paul = Person(name="paul") 74 | paul.save(self.db) 75 | cat = Cat(name="whiskers", owner_id=paul.key) 76 | cat.save(self.db) 77 | self.assertEqual(cat.owner, paul) 78 | self.assertEqual(cat.owner.name, "paul") 79 | 80 | 81 | 82 | def test_ref_from(self): 83 | paul = Person(name="paul") 84 | paul.save(self.db) 85 | cat = Cat(name="whiskers", owner_id=paul.key) 86 | cat.save(self.db) 87 | cat2 = Cat(name="puss", owner_id=paul.key) 88 | cat2.save(self.db) 89 | self.assertEqual(len(paul.cats), 2) 90 | self.assertTrue(paul.cats[0] == cat or paul.cats[1] == cat) 91 | 92 | 93 | def test_delete_cat(self): 94 | cat = Cat(name="whiskers") 95 | cat.save(self.db) 96 | key = cat.key 97 | another = Cat.get(self.db, cat.key) 98 | self.assertEqual(cat, another) 99 | cat.delete(self.db) 100 | revivedcat = self.db.get(key) 101 | self.assertEqual(revivedcat, None) 102 | 103 | 104 | def test_delete_dog_to_refs(self): 105 | paul = Person(name="paul") 106 | paul.save(self.db) 107 | dog = Dog(name="rufus", owner_id=paul.key) 108 | dog.save(self.db) 109 | key = dog.key 110 | dog2 = Dog(name="fly", owner_id=paul.key) 111 | dog2.save(self.db) 112 | self.assertNotEqual(dog2.owner, None) 113 | key = paul.key 114 | dog.delete(self.db) 115 | revivedpaul = self.db.get(key) 116 | self.assertEqual(revivedpaul, None) 117 | refresheddog2 = Dog.get(self.db, dog2.key) 118 | self.assertEqual(refresheddog2.owner, None) 119 | 120 | 121 | def test_delete_cat_refs(self): 122 | paul = Person(name="paul") 123 | paul.save(self.db) 124 | cat = Cat(name="whiskers", owner_id=paul.key) 125 | cat.save(self.db) 126 | key = cat.key 127 | cat2 = Cat(name="puss", owner_id=paul.key) 128 | cat2.save(self.db) 129 | revivedcat1 = self.db.get(key) 130 | self.assertNotEqual(revivedcat1, None) 131 | paul.delete(self.db) 132 | revivedcat2 = self.db.get(key) 133 | self.assertEqual(revivedcat2, None) 134 | 135 | 136 | def test_delete_dog_refs(self): 137 | paul = Person(name="paul") 138 | paul.save(self.db) 139 | dog = Dog(name="rufus", owner_id=paul.key) 140 | dog.save(self.db) 141 | key = dog.key 142 | dog2 = Dog(name="fly", owner_id=paul.key) 143 | dog2.save(self.db) 144 | reviveddog1 = self.db.get(key) 145 | self.assertNotEqual(reviveddog1, None) 146 | paul.delete(self.db) 147 | reviveddog2 = self.db.get(key) 148 | self.assertNotEqual(reviveddog2, None) 149 | 150 | 151 | def test_update_cat(self): 152 | cat = Cat(name="whiskers") 153 | cat.save(self.db) 154 | cat.name = "blackie" 155 | cat.save(self.db) 156 | self.assertEqual(cat.name, "blackie") 157 | self.assertEqual(cat._properties["name"], "blackie") 158 | self.assertFalse("name" in cat.__dict__.keys()) 159 | 160 | 161 | def setcatfood(self): 162 | self.cat.food = "biscuits" 163 | 164 | 165 | def test_update_catfood(self): 166 | self.cat = Cat(name="whiskers") 167 | self.cat.save(self.db) 168 | self.assertRaises(Exception, self.setcatfood) 169 | 170 | 171 | def test_update_dogfood(self): 172 | dog = Dog(name="fly") 173 | dog.food = "biscuits" 174 | dog.save(self.db) 175 | 176 | 177 | def test_all(self): 178 | dog = Dog(name="fly") 179 | dog.save(self.db) 180 | all = Dog.all(self.db) 181 | self.assertEqual(len(all), 1) 182 | 183 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/depricated/test_sync_gateway/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/_test_acl.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import time 4 | import json 5 | import subprocess 6 | from fam.database import SyncGatewayWrapper 7 | from fam.mapper import ClassMapper 8 | from fam.tests.test_sync_gateway.config import * 9 | from fam.tests.models.acl import Car, Bike, Boat 10 | from fam.acl.writer import write_sync_function, _requirements_from_mapper 11 | from fam.utils import couchbase_utils 12 | 13 | from fam.exceptions import * 14 | 15 | TEST_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | DATA_PATH = os.path.join(TEST_DIR, "data") 17 | 18 | """ 19 | Actors are people who do things 20 | 21 | users: a set of named users 22 | roles: a set of people who have any of given roles 23 | owner: an owner as named in the doc 24 | anyone: anyone as long as we know who they are 25 | 26 | Actions are things they can do: 27 | 28 | create: Create a new document with a new id 29 | update: Make a change to an existing document 30 | delete: Delete an existing document 31 | 32 | """ 33 | 34 | 35 | class testPermissions(unittest.TestCase): 36 | 37 | def setUp(self): 38 | 39 | self.gateway = None 40 | self.db = None 41 | 42 | self.mapper = ClassMapper([Car, Boat]) 43 | 44 | 45 | 46 | def start_gateway(self, conf_file_path): 47 | 48 | cmd = "{} -log=* {}".format(SYNC_GATEWAY_PATH, conf_file_path) 49 | print cmd 50 | 51 | time.sleep(0.25) 52 | self.gateway = subprocess.Popen(cmd, shell=True) 53 | time.sleep(0.25) 54 | 55 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 56 | self.admin_db = SyncGatewayWrapper(self.mapper, admin_url, SYNC_GATEWAY_NAME) 57 | self.admin_db.update_designs() 58 | self.add_users() 59 | 60 | 61 | def add_users(self): 62 | 63 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 64 | 65 | couchbase_utils.add_person_to_gateway(admin_url, 66 | SYNC_GATEWAY_NAME, 67 | "paul_id", 68 | "paul", 69 | "password1", 70 | admin_channels=["cars", "paul"]) 71 | 72 | couchbase_utils.add_person_to_gateway(admin_url, 73 | SYNC_GATEWAY_NAME, 74 | "sol_id", 75 | "sol", 76 | "password2", 77 | admin_channels=["sol"]) 78 | 79 | paul_url = "http://paul:password1@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 80 | self.paul_db = SyncGatewayWrapper(self.mapper, paul_url, SYNC_GATEWAY_NAME) 81 | 82 | sol_url = "http://sol:password2@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 83 | self.sol_db = SyncGatewayWrapper(self.mapper, sol_url, SYNC_GATEWAY_NAME) 84 | 85 | 86 | def tearDown(self): 87 | # stop the gateway 88 | if self.gateway is not None: 89 | self.gateway.kill() 90 | 91 | 92 | def test_index_permissions(self): 93 | 94 | expected = { 95 | "create": { 96 | "car": { 97 | "owner": True 98 | }, 99 | "boat": { 100 | "owner": True, 101 | "withoutAccess": True 102 | } 103 | }, 104 | "update": { 105 | "car": [ 106 | { 107 | "fields": [ 108 | "access" 109 | ], 110 | "role": [] 111 | }, 112 | { 113 | "owner": True, 114 | "fields": [ 115 | "colour" 116 | ] 117 | } 118 | ], 119 | "boat": [ 120 | { 121 | "role": [] 122 | } 123 | ] 124 | }, 125 | "delete": { 126 | "car": { 127 | "owner": True 128 | }, 129 | "boat": { 130 | "owner": True 131 | } 132 | } 133 | } 134 | 135 | 136 | 137 | requirements = _requirements_from_mapper(self.mapper) 138 | 139 | print json.dumps(requirements, indent=4) 140 | 141 | self.assertEqual(expected, requirements) 142 | 143 | 144 | def test_write_permissions(self): 145 | 146 | src_path = os.path.join(DATA_PATH, "sync_conf_template") 147 | dst_path = os.path.join(DATA_PATH, "sync_conf") 148 | 149 | if os.path.exists(dst_path): 150 | os.remove(dst_path) 151 | 152 | write_sync_function(src_path, dst_path, self.mapper) 153 | 154 | self.start_gateway(dst_path) 155 | 156 | 157 | def test_owner_create(self): 158 | 159 | self.test_write_permissions() 160 | 161 | car = Car(colour="red", stars=3, owner_name="paul", channels=["cars", "sol"]) 162 | self.paul_db.put(car) 163 | 164 | car.colour = "green" 165 | self.paul_db.put(car) 166 | car.stars = 2 167 | self.paul_db.put(car) 168 | self.paul_db.delete(car) 169 | 170 | car2 = Car(colour="green", owner_name="sol", channels=["cars", "sol"]) 171 | self.assertRaises(FamResourceConflict, self.paul_db.put, car2) 172 | 173 | 174 | def test_non_owner_permissions(self): 175 | 176 | self.test_write_permissions() 177 | 178 | car = Car(colour="red", stars=3, owner_name="paul", channels=["cars", "sol"]) 179 | self.paul_db.put(car) 180 | sols_car = self.sol_db.get(car.key) 181 | 182 | ## changing green fails 183 | sols_car.colour = "green" 184 | self.assertRaises(FamResourceConflict, self.sol_db.put, sols_car) 185 | 186 | ## changing stars works 187 | sols_car.colour = "red" 188 | sols_car.stars = 2 189 | self.sol_db.put(sols_car) 190 | 191 | # check nn owner cant delete 192 | self.assertRaises(FamResourceConflict, self.sol_db.delete, sols_car) 193 | 194 | 195 | def test_wrong_owner_create_fails(self): 196 | 197 | self.test_write_permissions() 198 | car = Car(colour="red", stars=3, owner_name="sol", channels=["cars", "sol"]) 199 | self.assertRaises(FamResourceConflict, self.paul_db.put, car) 200 | 201 | 202 | def test_create_access(self): 203 | self.test_write_permissions() 204 | car1 = Car(colour="red", stars=3, owner_name="sol", channels=["sol"]) 205 | self.sol_db.put(car1) 206 | self.sol_db.get(car1.key) 207 | 208 | car2 = Car(colour="green", stars=2, owner_name="sol", channels=["paul"]) 209 | self.assertRaises(Exception, self.sol_db.put, car2) 210 | self.sol_db.get(car2.key) 211 | 212 | 213 | def test_change_access_without_permission_fails(self): 214 | self.test_write_permissions() 215 | car = Car(colour="red", stars=3, owner_name="paul", channels=["cars"]) 216 | self.paul_db.put(car) 217 | car.access = ["sol"] 218 | self.assertRaises(FamResourceConflict, self.paul_db.put, car) 219 | 220 | 221 | def test_change_access(self): 222 | self.test_write_permissions() 223 | car = Car(key="cars", colour="red", stars=3, owner_name="paul", channels=["cars"]) 224 | self.paul_db.put(car) 225 | 226 | ## sol cant get car 227 | self.assertRaises(Exception, self.sol_db.get, "cars") 228 | 229 | car.access = ["sol"] 230 | self.admin_db.put(car) 231 | self.sol_db.get(car.key) 232 | 233 | 234 | def test_no_access(self): 235 | self.test_write_permissions() 236 | bike = Bike(wheels = 2) 237 | self.assertRaises(Exception, self.paul_db.put, bike) 238 | 239 | 240 | def test_no_access_admin(self): 241 | self.test_write_permissions() 242 | bike = Bike(wheels=2) 243 | self.admin_db.put(bike) 244 | 245 | 246 | def test_own_access_create(self): 247 | 248 | self.test_write_permissions() 249 | boat_id = "boaty" 250 | boat = Boat(key=boat_id, name="steve", is_sail=True, owner_name="paul", access=["paul"], channels=[boat_id]) 251 | self.paul_db.put(boat) 252 | 253 | 254 | 255 | 256 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/_test_sync_gateway_common.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import time 4 | import subprocess 5 | from fam.database import SyncGatewayWrapper 6 | from fam.mapper import ClassMapper 7 | from fam.tests.test_sync_gateway.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, Monkey, Monarch, Monster 9 | from fam.tests.common import common_test_classes 10 | 11 | current_module = sys.modules[__name__] 12 | 13 | TEST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 14 | DATA_PATH = os.path.join(TEST_DIR, "common", "data") 15 | 16 | def iterSyncGatewayTests(): 17 | 18 | for test_class in common_test_classes: 19 | name = "{}SyncGateway".format(test_class.__name__) 20 | 21 | def setUp(self): 22 | 23 | cmd = "{} -log=* -url walrus: ".format(SYNC_GATEWAY_PATH) 24 | # print cmd 25 | 26 | time.sleep(0.25) 27 | self.gateway = subprocess.Popen(cmd, shell=True) 28 | time.sleep(0.25) 29 | filepath = os.path.join(DATA_PATH, "animal_views.js") 30 | mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monkey, Monarch, Monster], designs=[filepath]) 31 | 32 | url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 33 | self.db = SyncGatewayWrapper(mapper, url, SYNC_GATEWAY_NAME) 34 | self.db.update_designs() 35 | super(self.__class__, self).setUp() 36 | 37 | def tearDown(self): 38 | # stop the gateway 39 | self.gateway.kill() 40 | 41 | methods = { 42 | "setUp": setUp, 43 | "tearDown":tearDown 44 | } 45 | 46 | setattr(current_module, name, type(name, (test_class,), methods)) 47 | 48 | 49 | ## not running the gateway tests for circle as walrus doesnt work properly there 50 | iterSyncGatewayTests() 51 | 52 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/_test_user.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import time 4 | import json 5 | import subprocess 6 | from fam.database import SyncGatewayWrapper 7 | from fam.mapper import ClassMapper 8 | from fam.tests.test_sync_gateway.config import * 9 | from fam.tests.models.acl import Car, Bike, Boat 10 | from fam.acl.writer import write_sync_function, _requirements_from_mapper 11 | from fam.utils import couchbase_utils 12 | 13 | from fam.exceptions import * 14 | 15 | TEST_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | DATA_PATH = os.path.join(TEST_DIR, "data") 17 | 18 | """ 19 | Actors are people who do things 20 | 21 | users: a set of named users 22 | roles: a set of people who have any of given roles 23 | owner: an owner as named in the doc 24 | anyone: anyone as long as we know who they are 25 | 26 | Actions are things they can do: 27 | 28 | create: Create a new document with a new id 29 | update: Make a change to an existing document 30 | delete: Delete an existing document 31 | 32 | """ 33 | 34 | 35 | class testPermissions(unittest.TestCase): 36 | 37 | def setUp(self): 38 | 39 | self.gateway = None 40 | self.db = None 41 | 42 | self.mapper = ClassMapper([Car, Boat]) 43 | 44 | self.start_gateway() 45 | 46 | 47 | 48 | def start_gateway(self): 49 | 50 | cmd = "{} -log=* -url walrus: ".format(SYNC_GATEWAY_PATH) 51 | # print cmd 52 | 53 | time.sleep(0.25) 54 | self.gateway = subprocess.Popen(cmd, shell=True) 55 | time.sleep(0.25) 56 | 57 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 58 | self.admin_db = SyncGatewayWrapper(self.mapper, admin_url, SYNC_GATEWAY_NAME) 59 | self.admin_db.update_designs() 60 | self.add_users() 61 | 62 | 63 | def add_users(self): 64 | 65 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 66 | 67 | couchbase_utils.add_person_to_gateway(admin_url, 68 | SYNC_GATEWAY_NAME, 69 | "paul_id", 70 | "paul", 71 | "password1", 72 | admin_channels=["cars", "paul"]) 73 | 74 | couchbase_utils.add_person_to_gateway(admin_url, 75 | SYNC_GATEWAY_NAME, 76 | "sol_id", 77 | "sol", 78 | "password2", 79 | admin_channels=["sol"]) 80 | 81 | paul_url = "http://paul:password1@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 82 | self.paul_db = SyncGatewayWrapper(self.mapper, paul_url, SYNC_GATEWAY_NAME) 83 | 84 | sol_url = "http://sol:password2@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 85 | self.sol_db = SyncGatewayWrapper(self.mapper, sol_url, SYNC_GATEWAY_NAME) 86 | 87 | 88 | def tearDown(self): 89 | # stop the gateway 90 | if self.gateway is not None: 91 | self.gateway.kill() 92 | 93 | 94 | def test_get_user(self): 95 | 96 | user_info = self.admin_db.user("paul") 97 | # print user_info 98 | self.assertTrue(user_info != None) 99 | roles = user_info["admin_roles"] 100 | 101 | self.assertEqual(roles, ["paul_id"]) 102 | 103 | 104 | def test_create_role(self): 105 | 106 | role_info = self.admin_db.role("new_role") 107 | # print "role: ", role_info 108 | self.assertTrue(role_info == None) 109 | 110 | self.admin_db.ensure_role("new_role") 111 | 112 | role_info = self.admin_db.role("new_role") 113 | # print "role: ", role_info 114 | self.assertTrue(role_info != None) 115 | 116 | 117 | def test_add_role(self): 118 | 119 | user_info = self.admin_db.user("paul") 120 | self.assertTrue(user_info != None) 121 | roles = user_info["admin_roles"] 122 | self.assertEqual(roles, ["paul_id"]) 123 | channels = user_info["admin_channels"] 124 | self.assertEqual(set(channels), set(["cars", "paul"])) 125 | 126 | success = self.admin_db.ensure_user_role("paul", "new_role") 127 | 128 | self.assertTrue(success) 129 | 130 | user_info = self.admin_db.user("paul") 131 | self.assertTrue(user_info != None) 132 | roles = user_info["admin_roles"] 133 | channels = user_info["admin_channels"] 134 | self.assertEqual(set(roles), set(["paul_id", "new_role"])) 135 | self.assertEqual(set(channels), set(["cars", "paul"])) 136 | 137 | 138 | 139 | 140 | 141 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/_test_view_creation.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import unittest 3 | import os 4 | import time 5 | import json 6 | import subprocess 7 | from fam.database import SyncGatewayWrapper 8 | from fam.mapper import ClassMapper 9 | from fam.tests.test_sync_gateway.config import * 10 | from fam.tests.models.test01 import Cat, Dog, Person 11 | from fam.acl.writer import write_sync_function, _requirements_from_mapper 12 | from fam.utils import couchbase_utils 13 | 14 | from fam.exceptions import * 15 | 16 | TEST_DIR = os.path.dirname(os.path.abspath(__file__)) 17 | DATA_PATH = os.path.join(TEST_DIR, "data") 18 | 19 | """ 20 | Actors are people who do things 21 | 22 | users: a set of named users 23 | roles: a set of people who have any of given roles 24 | owner: an owner as named in the doc 25 | anyone: anyone as long as we know who they are 26 | 27 | Actions are things they can do: 28 | 29 | create: Create a new document with a new id 30 | update: Make a change to an existing document 31 | delete: Delete an existing document 32 | 33 | """ 34 | 35 | 36 | class testPermissions(unittest.TestCase): 37 | 38 | def setUp(self): 39 | 40 | self.gateway = None 41 | self.db = None 42 | 43 | self.mapper = ClassMapper([Cat, Dog]) 44 | 45 | self.start_gateway() 46 | 47 | 48 | 49 | def start_gateway(self): 50 | 51 | cmd = "{} -log=* -url walrus: ".format(SYNC_GATEWAY_PATH) 52 | # print cmd 53 | 54 | time.sleep(0.25) 55 | self.gateway = subprocess.Popen(cmd, shell=True) 56 | time.sleep(0.25) 57 | 58 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 59 | self.admin_db = SyncGatewayWrapper(self.mapper, admin_url, SYNC_GATEWAY_NAME) 60 | self.admin_db.update_designs() 61 | self.add_users() 62 | 63 | 64 | def add_users(self): 65 | 66 | admin_url = "http://%s:%s" % (SYNC_GATEWAY_ADMIN_HOST, SYNC_GATEWAY_ADMIN_PORT) 67 | 68 | couchbase_utils.add_person_to_gateway(admin_url, 69 | SYNC_GATEWAY_NAME, 70 | "paul_id", 71 | "paul", 72 | "password1", 73 | admin_channels=["cars", "paul"]) 74 | 75 | couchbase_utils.add_person_to_gateway(admin_url, 76 | SYNC_GATEWAY_NAME, 77 | "sol_id", 78 | "sol", 79 | "password2", 80 | admin_channels=["sol"]) 81 | 82 | paul_url = "http://paul:password1@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 83 | self.paul_db = SyncGatewayWrapper(self.mapper, paul_url, SYNC_GATEWAY_NAME) 84 | 85 | sol_url = "http://sol:password2@%s:%s" % (SYNC_GATEWAY_HOST, SYNC_GATEWAY_PORT) 86 | self.sol_db = SyncGatewayWrapper(self.mapper, sol_url, SYNC_GATEWAY_NAME) 87 | 88 | 89 | def tearDown(self): 90 | # stop the gateway 91 | if self.gateway is not None: 92 | self.gateway.kill() 93 | 94 | 95 | def test_ensure_designs(self): 96 | 97 | for namespace_name, namespace in self.admin_db.mapper.namespaces.items(): 98 | view_namespace = namespace_name.replace("/", "_") 99 | key = "_design/%s" % view_namespace 100 | doc = self.mapper.get_design(namespace, namespace_name, self.admin_db.FOREIGN_KEY_MAP_STRING) 101 | doc["_id"] = key 102 | existing = self.admin_db.get_design(key) 103 | matches = self.admin_db._new_matches_existing(doc, existing) 104 | self.assertTrue(matches) 105 | 106 | # Add another class 107 | self.admin_db.mapper._add_classes([Person]) 108 | 109 | for namespace_name, namespace in self.admin_db.mapper.namespaces.items(): 110 | view_namespace = namespace_name.replace("/", "_") 111 | key = "_design/%s" % view_namespace 112 | doc = self.mapper.get_design(namespace, namespace_name, self.admin_db.FOREIGN_KEY_MAP_STRING) 113 | doc["_id"] = key 114 | existing = self.admin_db.get_design(key) 115 | matches = self.admin_db._new_matches_existing(doc, existing) 116 | 117 | self.assertFalse(matches) -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/config.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | SYNC_GATEWAY_HOST = "localhost" 4 | SYNC_GATEWAY_ADMIN_HOST = "localhost" 5 | SYNC_GATEWAY_PORT = "4984" 6 | SYNC_GATEWAY_ADMIN_PORT = "4985" 7 | SYNC_GATEWAY_NAME = "sync_gateway" 8 | 9 | if sys.platform.startswith('linux'): 10 | SYNC_GATEWAY_PATH = "/opt/couchbase-sync-gateway/bin/sync_gateway" 11 | else: 12 | # SYNC_GATEWAY_PATH = "/usr/local/bin/sync_gateway" 13 | SYNC_GATEWAY_PATH = "/Users/paul/Dropbox/glowinthedark/spate/Flotsam/bin/sync_gateway_versions/1.4.1/sync_gateway" 14 | -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/data/last_seq.json: -------------------------------------------------------------------------------- 1 | "155" -------------------------------------------------------------------------------- /src/fam/tests/depricated/test_sync_gateway/data/sync_conf_template: -------------------------------------------------------------------------------- 1 | { 2 | "interface":":4984", 3 | "adminInterface":":4985", 4 | "log":["REST"], 5 | "databases":{ 6 | "sync_gateway":{ 7 | "server":"walrus:", 8 | "bucket":"sync_gateway", 9 | "sync":`SYNC_FUNCTION` 10 | } 11 | } 12 | } 13 | 14 | -------------------------------------------------------------------------------- /src/fam/tests/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/models/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/models/_test02.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject, StringField, ReferenceFrom, ReferenceTo, BoolField, NumberField, DictField 2 | 3 | 4 | NAMESPACE = "glowinthedark.co.uk/test/2" 5 | 6 | 7 | class Dog(GenericObject): 8 | additional_properties = True 9 | fields = { 10 | "name": StringField(), 11 | "owner": ReferenceTo(NAMESPACE, "person", cascade_delete=True) 12 | } 13 | 14 | def talk(self): 15 | return "woof" 16 | 17 | 18 | class Person(GenericObject): 19 | fields = { 20 | "name": StringField(), 21 | "dogs": ReferenceFrom(NAMESPACE, "dog", "owner_id") 22 | } 23 | 24 | -------------------------------------------------------------------------------- /src/fam/tests/models/_test03.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject, StringField, ReferenceFrom, ReferenceTo, BoolField, NumberField, DictField 2 | 3 | 4 | NAMESPACE = "glowinthedark.co.uk/test/3" 5 | 6 | 7 | class Dog(GenericObject): 8 | additional_properties = True 9 | fields = { 10 | "name": StringField(default="fly", required=True), 11 | "owner_id": ReferenceTo(NAMESPACE, "person", cascade_delete=True) 12 | } 13 | 14 | def talk(self): 15 | return "woof" 16 | 17 | 18 | -------------------------------------------------------------------------------- /src/fam/tests/models/acl.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject, StringField, ReferenceFrom, ReferenceTo, BoolField, NumberField, DictField, ObjectField, ListField 2 | from fam.acl import CreateRequirement, DeleteRequirement, UpdateRequirement, NO_ONE, ANYONE 3 | 4 | NAMESPACE = "glowinthedark.co.uk/test" 5 | 6 | 7 | 8 | class Bike(GenericObject): 9 | 10 | fields = { 11 | "wheels": NumberField(), 12 | } 13 | 14 | 15 | class Car(GenericObject): 16 | 17 | fields = { 18 | "colour": StringField(), 19 | "stars": NumberField(), 20 | "owner_name": StringField(), 21 | "channels": ListField(), 22 | "access": ListField() 23 | } 24 | 25 | grants_access = True 26 | 27 | acl = [ 28 | CreateRequirement(role=None, owner=True), 29 | DeleteRequirement(role=None, owner=True), 30 | UpdateRequirement(role=[], fields=["access"]), 31 | UpdateRequirement(role=None, owner=True, fields=["colour"]), 32 | ] 33 | 34 | class Boat(GenericObject): 35 | 36 | fields = { 37 | "name": StringField(), 38 | "is_sail": BoolField(), 39 | "owner_name": StringField(), 40 | "channels": ListField(), 41 | "access": ListField() 42 | } 43 | 44 | grants_access = True 45 | 46 | acl = [ 47 | CreateRequirement(role=ANYONE, owner=True, access=False), 48 | UpdateRequirement(role=NO_ONE), 49 | DeleteRequirement(role=ANYONE, owner=True), 50 | ] 51 | 52 | 53 | -------------------------------------------------------------------------------- /src/fam/tests/models/test01.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject 2 | from fam.fields import * 3 | 4 | 5 | NAMESPACE = "glowinthedark.co.uk/test" 6 | 7 | class Dog(GenericObject): 8 | 9 | additional_properties = True 10 | sync_gateway_write = True 11 | fields = { 12 | "name": StringField(), 13 | "owner_id": ReferenceTo(NAMESPACE, "person", cascade_delete=True), 14 | "kennel_club_membership": StringField(unique=True), 15 | "channels": ListField(default=["callbacks"]) 16 | } 17 | 18 | def talk(self): 19 | return "woof" 20 | 21 | def pre_save_new_cb(self, db): 22 | pass 23 | 24 | def post_save_new_cb(self, db): 25 | pass 26 | 27 | def pre_save_update_cb(self, db, old_properties): 28 | pass 29 | 30 | def post_save_update_cb(self, db): 31 | pass 32 | 33 | def pre_delete_cb(self, db): 34 | pass 35 | 36 | def post_delete_cb(self, db): 37 | pass 38 | 39 | def changes_new_cb(self, db): 40 | if self.owner: 41 | self.owner.add_callback(db, "changes_new_cb") 42 | 43 | def changes_update_cb(self, db): 44 | if self.owner: 45 | self.owner.add_callback(db, "changes_update_cb") 46 | 47 | 48 | 49 | 50 | class JackRussell(Dog): 51 | fields = { 52 | "age": NumberField() 53 | } 54 | 55 | def talk(self): 56 | return "Yap" 57 | 58 | 59 | class Cat(GenericObject): 60 | fields = { 61 | "name": StringField(), 62 | "colour": StringField(immutable=True), 63 | "tail": BoolField(immutable=True, default=True), 64 | "legs": NumberField(required=True), 65 | "owner_id": ReferenceTo(NAMESPACE, "person", required=True), 66 | "email": EmailField() 67 | } 68 | 69 | 70 | @classmethod 71 | def all_with_n_legs(cls, db, legs): 72 | return db.view("animal_views/cat_legs", key=legs) 73 | 74 | 75 | class Person(GenericObject): 76 | fields = { 77 | "name": StringField(), 78 | "cats": ReferenceFrom(NAMESPACE, "cat", "owner_id", cascade_delete=True), 79 | "dogs": ReferenceFrom(NAMESPACE, "dog", "owner_id"), 80 | "animals": ReferenceFrom(NAMESPACE, ["dog", "cat"], "owner_id"), 81 | "callbacks": ListField() 82 | } 83 | 84 | def add_callback(self, db, name): 85 | 86 | if self.callbacks is None: 87 | self.callbacks = [] 88 | 89 | self.callbacks.append(name) 90 | self.save(db) 91 | 92 | 93 | class Monarch(Person): 94 | fields = { 95 | "country": StringField(), 96 | } 97 | 98 | 99 | class Monkey(GenericObject): 100 | use_rev = False 101 | fields = { 102 | "name": StringField(), 103 | "colour": StringField(immutable=True), 104 | } 105 | 106 | 107 | class Weapons(object): 108 | 109 | def __init__(self, wings, fire, claws): 110 | 111 | self.fire = fire 112 | self.claws = claws 113 | self.wings = wings 114 | 115 | def to_json(self): 116 | 117 | return { 118 | "fire": self.fire, 119 | "claws": self.claws, 120 | "wings": self.wings, 121 | } 122 | 123 | @classmethod 124 | def from_json(cls, as_json): 125 | return cls(as_json["wings"], as_json["fire"], as_json["claws"]) 126 | 127 | 128 | 129 | class Monster(GenericObject): 130 | 131 | fields = { 132 | "name": StringField(), 133 | "weapons": ObjectField(cls=Weapons), 134 | 135 | 136 | } 137 | 138 | class Event(GenericObject): 139 | 140 | fields = { 141 | "name": StringField(), 142 | "created": DateTimeField(), 143 | "chance": FractionField() 144 | } -------------------------------------------------------------------------------- /src/fam/tests/models/test04.py: -------------------------------------------------------------------------------- 1 | from fam.blud import (GenericObject, 2 | StringField, 3 | ReferenceFrom, 4 | ReferenceTo, 5 | BoolField, 6 | NumberField, 7 | DictField, 8 | ObjectField, 9 | ListField, 10 | LatLongField, 11 | DateTimeField, 12 | FractionField, 13 | DecimalField, 14 | BytesField) 15 | 16 | 17 | NAMESPACE = "glowinthedark.co.uk/test" 18 | 19 | 20 | 21 | class House(GenericObject): 22 | 23 | fields = { 24 | "name": StringField(), 25 | "location": LatLongField(), 26 | } 27 | 28 | 29 | class Fence(GenericObject): 30 | 31 | fields = { 32 | "name": StringField(), 33 | "boundary": ListField() 34 | } 35 | 36 | 37 | class Fish(GenericObject): 38 | 39 | fields = { 40 | "name": StringField(), 41 | "location": LatLongField(), 42 | "born": DateTimeField(), 43 | "length": DecimalField(), 44 | "edible_fraction": FractionField(), 45 | "image": BytesField() 46 | } -------------------------------------------------------------------------------- /src/fam/tests/models/test05.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject 2 | from fam.fields import * 3 | 4 | 5 | NAMESPACE = "glowinthedark.co.uk/test05" 6 | 7 | 8 | class Cat(GenericObject): 9 | fields = { 10 | "name": StringField(), 11 | "colour": StringField(immutable=True), 12 | "tail": BoolField(immutable=True, default=True), 13 | "legs": NumberField(required=True), 14 | "owner_id": ReferenceTo(NAMESPACE, "person", required=True) 15 | } 16 | 17 | 18 | 19 | class Person(GenericObject): 20 | fields = { 21 | "name": StringField(), 22 | "cats": ReferenceFrom(NAMESPACE, "cat", "owner_id", cascade_delete=True), 23 | } 24 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_couchdb/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/animal_views.js: -------------------------------------------------------------------------------- 1 | 2 | var cat_legs = { 3 | map: function(doc){ 4 | if(doc.type == "cat"){ 5 | emit(doc.legs, doc) 6 | } 7 | } 8 | } -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/config.py: -------------------------------------------------------------------------------- 1 | COUCHDB_URL = "http://paul:password@localhost:5984" 2 | COUCHDB_NAME = "test" -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_buffer.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import os 3 | import unittest 4 | from fam.database import CouchDBWrapper 5 | from fam.mapper import ClassMapper 6 | from fam.buffer import buffered_db 7 | from fam.tests.test_couchdb.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person 9 | 10 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 11 | DATA_PATH = os.path.join(THIS_DIR, "data") 12 | 13 | class CacheTests(unittest.TestCase): 14 | 15 | def setUp(self): 16 | mapper = ClassMapper([Dog, Cat, Person]) 17 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 18 | self.db.update_designs() 19 | 20 | def tearDown(self): 21 | pass 22 | 23 | 24 | def test_cache_saves(self): 25 | 26 | with buffered_db(self.db) as dbc: 27 | dog = Dog(name="woofer") 28 | dbc.put(dog) 29 | 30 | got = self.db.get(dog.key) 31 | 32 | self.assertTrue(got is not None) 33 | 34 | 35 | def test_cache_doesnt_save(self): 36 | # doesnt save until were done 37 | 38 | with buffered_db(self.db) as dbc: 39 | dog = Dog(name="woofer") 40 | dbc.put(dog) 41 | got = self.db.get(dog.key) 42 | self.assertTrue(got is None) 43 | 44 | got = self.db.get(dog.key) 45 | self.assertTrue(got is not None) 46 | 47 | def test_cache_gets(self): 48 | # doesnt save until were done 49 | 50 | with buffered_db(self.db) as dbc: 51 | dog = Dog(name="woofer") 52 | dbc.put(dog) 53 | fetched = dbc.get(dog.key) 54 | self.assertTrue(fetched is not None) 55 | self.assertEqual(id(dog), id(fetched)) 56 | 57 | 58 | def test_cache_gets_from_db(self): 59 | 60 | dog = Dog(name="woofer") 61 | self.db.put(dog) 62 | 63 | with buffered_db(self.db) as dbc: 64 | fetched = dbc.get(dog.key) 65 | self.assertTrue(fetched is not None) 66 | self.assertNotEqual(id(dog), id(fetched)) 67 | fetched_again = dbc.get(dog.key) 68 | 69 | self.assertEqual(id(fetched), id(fetched_again)) 70 | 71 | def test_cache_gets_change_from_db(self): 72 | 73 | dog = Dog(name="woofer") 74 | self.db.put(dog) 75 | 76 | with buffered_db(self.db) as dbc: 77 | fetched = dbc.get(dog.key) 78 | self.assertTrue(fetched is not None) 79 | self.assertNotEqual(id(dog), id(fetched)) 80 | fetched_again = dbc.get(dog.key) 81 | self.assertTrue(fetched_again is not None) 82 | 83 | self.assertEqual(id(fetched), id(fetched_again)) 84 | dog.name = "fly" 85 | self.db.put(dog) 86 | 87 | fetched_yet_again = dbc.get(dog.key) 88 | self.assertTrue(fetched_yet_again is not None) 89 | 90 | self.assertEqual(id(fetched), id(fetched_yet_again)) 91 | self.assertEqual(fetched_yet_again.name, 'fly') 92 | fetched_yet_again.name = "bluebottle" 93 | db_fetched = self.db.get(dog.key) 94 | self.assertEqual(db_fetched.name, 'fly') 95 | 96 | db_fetched = self.db.get(dog.key) 97 | self.assertEqual(db_fetched.name, 'fly') 98 | 99 | 100 | def test_saves_putted(self): 101 | 102 | dog = Dog(name="woofer") 103 | self.db.put(dog) 104 | 105 | with buffered_db(self.db) as dbc: 106 | fetched = dbc.get(dog.key) 107 | fetched.name = "bluebottle" 108 | dbc.put(fetched) 109 | 110 | db_fetched = self.db.get(dog.key) 111 | self.assertEqual(db_fetched.name, 'bluebottle') 112 | 113 | 114 | 115 | def test_refs_from(self): 116 | 117 | with buffered_db(self.db) as dbc: 118 | 119 | person = Person(name="paul") 120 | dbc.put(person) 121 | 122 | dog = Dog(name="woofer", owner=person) 123 | dbc.put(dog) 124 | 125 | self.assertEqual(person.dogs, [dog]) 126 | 127 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_buffer_views.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import os 3 | import unittest 4 | from fam.database import CouchDBWrapper 5 | from fam.mapper import ClassMapper 6 | from fam.buffer.buffer_views import FamWriteBufferViews 7 | from fam.tests.test_couchdb.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person 9 | 10 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 11 | DATA_PATH = os.path.join(THIS_DIR, "data") 12 | 13 | class CacheTests(unittest.TestCase): 14 | 15 | 16 | def setUp(self): 17 | self.mapper = ClassMapper([Dog, Cat, Person]) 18 | self.db = CouchDBWrapper(self.mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 19 | self.db.update_designs() 20 | 21 | def tearDown(self): 22 | self.db.session.close() 23 | 24 | 25 | def test_make_views(self): 26 | 27 | views = FamWriteBufferViews(self.mapper) 28 | paul = Person(name="paul") 29 | dog = Dog(name="woofer", owner=paul) 30 | views.index_obj(dog) 31 | self.assertTrue(views.indexes.get("glowinthedark_co_uk_test_person_dogs") != None) 32 | self.assertTrue(views.indexes["glowinthedark_co_uk_test_person_dogs"][paul.key][dog.key] == dog) 33 | 34 | 35 | def test_query_views(self): 36 | 37 | views = FamWriteBufferViews(self.mapper) 38 | paul = Person(name="paul") 39 | dog = Dog(name="woofer", owner=paul) 40 | views.index_obj(dog) 41 | obj = views.query_view("glowinthedark.co.uk/test/person_dogs", key=paul.key) 42 | self.assertEqual(obj, [dog]) 43 | 44 | 45 | def test_views_keys(self): 46 | 47 | views = FamWriteBufferViews(self.mapper) 48 | paul = Person(name="paul") 49 | dog = Dog(name="woofer", owner=paul) 50 | views.index_obj(dog) 51 | 52 | print("keys: ", views.indexes.keys()) 53 | 54 | self.assertEqual(set(views.indexes.keys()), {'glowinthedark_co_uk_test_person_animals', 55 | 'glowinthedark_co_uk_test_person_dogs', 56 | 'raw_all', 57 | 'glowinthedark_co_uk_test_dog_kennel_club_membership'}) 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_couchdb_common.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | from fam.database import CouchDBWrapper, get_db 4 | from fam.mapper import ClassMapper 5 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, Monkey, Monarch, Monster 6 | from fam.tests.common import common_test_classes 7 | 8 | current_module = sys.modules[__name__] 9 | 10 | from fam.tests.test_couchdb.config import * 11 | 12 | TEST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 13 | DATA_PATH = os.path.join(TEST_DIR, "common", "data") 14 | 15 | def iterCouchDBTests(): 16 | 17 | for test_class in common_test_classes: 18 | 19 | name = "{}CouchDB".format(test_class.__name__) 20 | 21 | def setUp(self): 22 | filepath = os.path.join(DATA_PATH, "animal_views.js") 23 | mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monkey, Monarch, Monster], designs=[filepath]) 24 | 25 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 26 | # self.db = get_db("couchdb", mapper, "localhost", db_name="test", reset=True) 27 | self.db.update_designs() 28 | super(self.__class__, self).setUp() 29 | 30 | def tearDown(self): 31 | self.db.session.close() 32 | 33 | methods = { 34 | "setUp": setUp, 35 | "tearDown":tearDown 36 | } 37 | 38 | setattr(current_module, name, type(name, (test_class,), methods)) 39 | 40 | iterCouchDBTests() 41 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_index.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from fam.database import CouchDBWrapper 4 | from fam.mapper import ClassMapper 5 | 6 | from fam.tests.test_couchdb.config import * 7 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell 8 | 9 | THIS_DIR = os.path.dirname(__file__) 10 | DATA_PATH = os.path.join(THIS_DIR, "data") 11 | 12 | 13 | class IndexTests(unittest.TestCase): 14 | 15 | db = None 16 | 17 | def setUp(self): 18 | filepath = os.path.join(THIS_DIR, "animal_views.js") 19 | mapper = ClassMapper([Dog, Cat, Person, JackRussell], designs=[filepath]) 20 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 21 | self.db.update_designs() 22 | 23 | def tearDown(self): 24 | self.db.session.close() 25 | 26 | # def test_create_index(self): 27 | # filepath = os.path.join(THIS_DIR, "animal_views.js") 28 | # 29 | # as_dict = self.db.mapper._js_design_as_doc(filepath) 30 | # 31 | # expected = { 32 | # "_id": "_design/animal_views", 33 | # "views": { 34 | # "cat_legs": { 35 | # "map": "function(doc) {\n if (doc.type == \"cat\") {\n emit(doc.legs, doc);\n }\n}" 36 | # } 37 | # } 38 | # } 39 | # 40 | # self.assertEqual(as_dict, expected) 41 | 42 | 43 | # def test_query_view(self): 44 | # 45 | # paul = Person(name="Paul") 46 | # self.db.put(paul) 47 | # cat1 = Cat(owner=paul, legs=4) 48 | # self.db.put(cat1) 49 | # cat2 = Cat(owner=paul, legs=3) 50 | # self.db.put(cat2) 51 | # three_legged_cats = Cat.all_with_n_legs(self.db, 3) 52 | # self.assertEqual(len(three_legged_cats), 1) 53 | # 54 | # self.assertEqual(three_legged_cats[0].key, cat2.key) 55 | 56 | 57 | # def test_long_polling(self): 58 | # paul = Person(name="Paul") 59 | # self.db.put(paul) 60 | # cat1 = Cat(owner=paul, legs=4) 61 | # self.db.put(cat1) 62 | # cat2 = Cat(owner=paul, legs=3) 63 | # self.db.put(cat2) 64 | # three_legged_cats = self.db.view("animal_views/cat_legs", key=3) 65 | # self.assertEqual(len(three_legged_cats), 1) 66 | # self.assertEqual(three_legged_cats[0].key, cat2.key) -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_iterator.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import os 3 | import unittest 4 | from fam.database import CouchDBWrapper 5 | from fam.mapper import ClassMapper 6 | from fam.buffer import buffered_db 7 | from fam.tests.test_couchdb.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person 9 | 10 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 11 | DATA_PATH = os.path.join(THIS_DIR, "data") 12 | 13 | class CacheTests(unittest.TestCase): 14 | 15 | def setUp(self): 16 | mapper = ClassMapper([Dog, Cat, Person]) 17 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 18 | self.db.update_designs() 19 | 20 | def tearDown(self): 21 | self.db.session.close() 22 | 23 | 24 | 25 | def test_iterate_dogs(self): 26 | 27 | me = Person(name="paul") 28 | self.db.put(me) 29 | 30 | for i in range(500): 31 | dog = Dog(name="dog_%s" % i, owner=me) 32 | self.db.put(dog) 33 | 34 | counter = 0 35 | for dog in me.dogs: 36 | counter += 1 37 | 38 | self.assertEqual(counter, 500) -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_mapper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import unittest 4 | from fam.database import CouchDBWrapper 5 | from fam.mapper import ClassMapper 6 | from fam.schema.validator import ModelValidator 7 | from fam.exceptions import FamValidationError 8 | from fam.tests.test_couchdb.config import * 9 | from fam.tests.models import test01 10 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell 11 | from fam.blud import StringField 12 | 13 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 14 | DATA_PATH = os.path.join(THIS_DIR, "data") 15 | DESIGN_PATH = os.path.join(DATA_PATH, "design_ref.json") 16 | 17 | class MapperValidationTests(unittest.TestCase): 18 | 19 | def test_make_a_validator(self): 20 | 21 | mapper = ClassMapper([Dog, Cat, Person, JackRussell]) 22 | validator = ModelValidator(None, classes=[Dog, Cat, Person, JackRussell]) 23 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True, validator=validator) 24 | self.db.update_designs() 25 | 26 | paul = Person(name="paul") 27 | paul.save(self.db) 28 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 29 | cat.save(self.db) 30 | 31 | 32 | self.assertEqual(cat.owner, paul) 33 | self.assertEqual(cat.owner.name, "paul") 34 | 35 | cat = Cat(name="puss", owner_id=paul.key) 36 | 37 | self.assertRaises(FamValidationError, cat.save, self.db) 38 | 39 | self.db.session.close() 40 | 41 | 42 | def test_make_a_validator_from_modules(self): 43 | 44 | mapper = ClassMapper([], modules=[test01]) 45 | validator = ModelValidator(mapper) 46 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True, validator=validator) 47 | self.db.update_designs() 48 | 49 | paul = Person(name="paul") 50 | paul.save(self.db) 51 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 52 | cat.save(self.db) 53 | self.assertEqual(cat.owner, paul) 54 | self.assertEqual(cat.owner.name, "paul") 55 | 56 | #missing legs 57 | cat = Cat(name="puss", owner_id=paul.key) 58 | 59 | self.assertRaises(FamValidationError, cat.save, self.db) 60 | 61 | #additional properties 62 | def failing_cat(): 63 | cat = Cat(name="puss", owner_id=paul.key, legs=2, collar="green") 64 | 65 | self.assertRaises(FamValidationError, failing_cat) 66 | dog = Dog(name="fly") 67 | self.db.put(dog) 68 | dog.tail = "long" 69 | self.db.put(dog) 70 | 71 | self.db.session.close() 72 | 73 | # print dog.as_json() 74 | 75 | 76 | def test_included_refs_from_in_validator(self): 77 | mapper = ClassMapper([], modules=[test01]) 78 | validator = ModelValidator(mapper) 79 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True, validator=validator) 80 | self.db.update_designs() 81 | 82 | paul = Person(name="paul") 83 | paul.save(self.db) 84 | 85 | paul_id = paul.key 86 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 87 | cat.save(self.db) 88 | self.assertEqual(cat.owner, paul) 89 | self.assertEqual(cat.owner.name, "paul") 90 | 91 | paul = Person.get(self.db, paul_id) 92 | 93 | paul.save(self.db) 94 | 95 | self.db.session.close() 96 | 97 | 98 | 99 | def test_string_format(self): 100 | 101 | mapper = ClassMapper([], modules=[test01]) 102 | validator = ModelValidator(mapper) 103 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True, validator=validator) 104 | self.db.update_designs() 105 | 106 | paul = Person(name="paul") 107 | paul.save(self.db) 108 | cat = Cat(name="whiskers", owner_id=paul.key, legs=4) 109 | cat.save(self.db) 110 | self.assertEqual(cat.owner, paul) 111 | self.assertEqual(cat.owner.name, "paul") 112 | 113 | 114 | cat = Cat(name="puss", owner_id=paul.key, legs=3, email="paul@glowinthedark.co.uk") 115 | cat.save(self.db) 116 | cat.email = "paulglowinthedark.co.uk" 117 | self.assertRaises(FamValidationError, self.db.put, cat) 118 | 119 | self.db.session.close() 120 | 121 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_mapping.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | 4 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell, Monarch 5 | from fam.mapper import ClassMapper 6 | 7 | class MapperTests(unittest.TestCase): 8 | 9 | 10 | def setUp(self): 11 | self.mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monarch]) 12 | 13 | 14 | def tearDown(self): 15 | pass 16 | 17 | 18 | def test_sub_class_refs(self): 19 | 20 | self.assertEqual(set(Monarch.fields.keys()), set(["name", "country", "cats", "dogs", "animals", "callbacks"])) 21 | 22 | self.assertEqual(set(Monarch.cls_fields.keys()), {"country"}) 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_serialisation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | from decimal import Decimal 4 | from fractions import Fraction 5 | from fam.extra_types.lat_long import LatLong 6 | from google.cloud.firestore_v1 import GeoPoint 7 | from fam.tests.test_couchdb.config import * 8 | 9 | import os 10 | import datetime 11 | import pytz 12 | 13 | from fam.database.couchdb_adapter import CouchDBDataAdapter 14 | 15 | 16 | 17 | import fam 18 | from fam.exceptions import * 19 | from fam.tests.models.test04 import Fish 20 | 21 | from fam.database import CouchDBWrapper 22 | from fam.mapper import ClassMapper 23 | 24 | SECRETS_DIR = os.path.join(os.path.dirname(fam.__file__), "tests", "secrets") 25 | DATA_DIR = os.path.join(os.path.dirname(fam.__file__), "tests", "data") 26 | 27 | 28 | class TestSerialisation(unittest.TestCase): 29 | 30 | """ 31 | str 32 | list 33 | dict 34 | bool 35 | float 36 | int 37 | Decimal 38 | Fraction 39 | LatLong 40 | datetime 41 | bytes 42 | unicode/str utf8 43 | """ 44 | 45 | 46 | def setUp(self): 47 | self.adapter = CouchDBDataAdapter() 48 | 49 | 50 | def test_serialise_basic_types(self): 51 | 52 | doc = { "name": "Paul", 53 | "age": 53, 54 | "height": 5.9, 55 | "is_stoopid": True, 56 | "children": ["Sol", "Jake"], 57 | "favorites": {"drink": "coffee", "food": "egg and chips"} 58 | } 59 | 60 | serialised = self.adapter.serialise(doc) 61 | 62 | # these should all pass through unchanged 63 | self.assertEqual(doc, serialised) 64 | 65 | 66 | def test_serialise_numerics(self): 67 | 68 | doc = {"name": "Paul", 69 | "age": 53, 70 | "height": Decimal("5.9"), 71 | "fraction": Fraction(1, 2), 72 | "is_stoopid": True, 73 | "children": ["Sol", "Jake"], 74 | "favorites": {"drink": "coffee", "food": Decimal("4.2")} 75 | } 76 | 77 | serialised = self.adapter.serialise(doc) 78 | 79 | 80 | self.assertEqual(serialised["height"], "::decimal::5.9") 81 | self.assertEqual(serialised["fraction"], "::fraction::1/2") 82 | self.assertEqual(serialised["favorites"]["food"], "::decimal::4.2") 83 | 84 | 85 | def test_serialise_latlong_datetime(self): 86 | 87 | birthday = datetime.datetime(1964, 12, 5) 88 | 89 | doc = { "name": "Paul", 90 | "age": 53, 91 | "height": 5.9, 92 | "is_stoopid": True, 93 | "location": LatLong(51.5102213, -0.1178892), 94 | "birthday": birthday, 95 | "children": ["Sol", "Jake"], 96 | "favorites": {"drink": "coffee", "food": "egg and chips"} 97 | } 98 | 99 | serialised = self.adapter.serialise(doc) 100 | self.assertTrue(isinstance(serialised["location"], str)) 101 | self.assertTrue(isinstance(serialised["birthday"], str)) 102 | self.assertEqual(serialised["birthday"], "::datetime::1964-12-05T00:00:00Z") 103 | self.assertEqual(serialised["location"], "::latlong::51.5102213,-0.1178892") 104 | 105 | def test_serialise_bytes(self): 106 | 107 | doc = { "name": "Paul", 108 | "age": 53, 109 | "height": 5.9, 110 | "is_stoopid": True, 111 | "birthday": b"aq3restdyrgvdhrjb", 112 | "children": ["Sol", "Jake"], 113 | "favorites": {"drink": "coffee", "food": "egg and chips"} 114 | } 115 | 116 | serialised = self.adapter.serialise(doc) 117 | self.assertTrue(isinstance(serialised["birthday"], str)) 118 | 119 | 120 | class TestDeSerialisation(unittest.TestCase): 121 | 122 | 123 | def setUp(self): 124 | self.adapter = CouchDBDataAdapter() 125 | 126 | 127 | def test_deserialise_datetime(self): 128 | 129 | birthday = datetime.datetime(1964, 12, 5, 12, tzinfo=pytz.UTC) 130 | 131 | doc = { "name": "Paul", 132 | "age": 53, 133 | "height": 5.9, 134 | "new_datetime": birthday, 135 | "old_datetime": "1964-12-05T12:00:00Z" 136 | 137 | } 138 | 139 | serialised = self.adapter.serialise(doc) 140 | 141 | # these should all pass through unchanged 142 | self.assertEqual(serialised["new_datetime"], "::datetime::1964-12-05T12:00:00Z") 143 | self.assertEqual(serialised["old_datetime"], "1964-12-05T12:00:00Z") 144 | 145 | deserialised = self.adapter.deserialise(serialised) 146 | 147 | self.assertEqual(deserialised["new_datetime"], birthday) 148 | self.assertEqual(deserialised["old_datetime"], birthday) 149 | 150 | 151 | 152 | 153 | 154 | 155 | class TestDatabase(unittest.TestCase): 156 | 157 | 158 | def setUp(self): 159 | mapper = ClassMapper([Fish]) 160 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 161 | self.db.update_designs() 162 | 163 | 164 | 165 | 166 | 167 | def test_fish(self): 168 | 169 | loc = LatLong(51.2345, -1.4533) 170 | birthday = datetime.datetime(1964, 12, 5, tzinfo=pytz.utc) 171 | 172 | image_path = os.path.join(DATA_DIR, "goldfish.jpg") 173 | with open(image_path, "rb") as f: 174 | image_data = f.read() 175 | 176 | if sys.version_info < (3, 0): 177 | image_data = bytearray(image_data) 178 | 179 | fish = Fish.create(self.db, name="Nemo", 180 | location=loc, 181 | born=birthday, 182 | length=Decimal("45.7"), 183 | edible_fraction=Fraction(1, 3), 184 | image=image_data 185 | ) 186 | 187 | fish2 = Fish.get(self.db, fish.key) 188 | 189 | self.assertTrue(type(fish2.location) == LatLong) 190 | self.assertEqual(fish2.location.latitude, 51.2345) 191 | 192 | self.assertTrue(isinstance(fish2.born, datetime.datetime)) 193 | self.assertEqual(fish2.born, birthday) 194 | 195 | self.assertTrue(isinstance(fish2.length, Decimal)) 196 | self.assertEqual(fish2.length, Decimal("45.7")) 197 | 198 | self.assertTrue(isinstance(fish2.edible_fraction, Fraction)) 199 | self.assertEqual(fish2.edible_fraction, Fraction(1, 3)) 200 | 201 | self.assertTrue(isinstance(fish2.image, bytes)) 202 | self.assertEqual(fish2.image, image_data) 203 | 204 | ## test for view query 205 | fishes = list(Fish.all(self.db)) 206 | fish3 = fishes[0] 207 | 208 | self.assertTrue(type(fish3.location) == LatLong) 209 | self.assertEqual(fish3.location.latitude, 51.2345) 210 | 211 | self.assertTrue(isinstance(fish3.born, datetime.datetime)) 212 | self.assertEqual(fish3.born, birthday) 213 | 214 | self.assertTrue(isinstance(fish3.length, Decimal)) 215 | self.assertEqual(fish3.length, Decimal("45.7")) 216 | 217 | self.assertTrue(isinstance(fish3.edible_fraction, Fraction)) 218 | self.assertEqual(fish3.edible_fraction, Fraction(1, 3)) 219 | 220 | self.assertTrue(isinstance(fish3.image, bytes)) 221 | self.assertEqual(fish3.image, image_data) 222 | 223 | 224 | 225 | 226 | 227 | 228 | -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_temp.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import os 3 | import unittest 4 | from fam.database import CouchDBWrapper 5 | from fam.mapper import ClassMapper 6 | from fam.buffer import buffered_db 7 | from fam.tests.test_couchdb.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person 9 | 10 | THIS_DIR = os.path.dirname(os.path.abspath(__file__)) 11 | DATA_PATH = os.path.join(THIS_DIR, "data") 12 | 13 | class CacheTests(unittest.TestCase): 14 | 15 | def setUp(self): 16 | mapper = ClassMapper([Dog, Cat, Person]) 17 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 18 | self.db.update_designs() 19 | 20 | def tearDown(self): 21 | self.db.session.close() 22 | 23 | def test_delete_cat_refs(self): 24 | paul = Person(name="paul") 25 | paul.save(self.db) 26 | cat = Cat(name="whiskers", owner_id=paul.key, legs=2) 27 | cat.save(self.db) 28 | key = cat.key 29 | cat2 = Cat(name="puss", owner_id=paul.key, legs=2) 30 | cat2.save(self.db) 31 | revivedcat1 = self.db.get(key) 32 | 33 | self.assertTrue(revivedcat1 is not None) 34 | 35 | paul.delete(self.db) 36 | revivedcat2 = self.db.get(key) 37 | print("revivedcat2:" , revivedcat2) 38 | self.assertTrue(revivedcat2 is None, "revivedcat2: %s" % revivedcat2) -------------------------------------------------------------------------------- /src/fam/tests/test_couchdb/test_unique.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from fam.database import CouchDBWrapper 4 | from fam.mapper import ClassMapper 5 | 6 | from fam.exceptions import * 7 | from fam.tests.test_couchdb.config import * 8 | from fam.tests.models.test01 import Dog, Cat, Person, JackRussell 9 | 10 | THIS_DIR = os.path.dirname(__file__) 11 | DATA_PATH = os.path.join(THIS_DIR, "data") 12 | 13 | 14 | class IndexTests(unittest.TestCase): 15 | 16 | db = None 17 | 18 | def setUp(self): 19 | filepath = os.path.join(THIS_DIR, "animal_views.js") 20 | mapper = ClassMapper([Dog, Cat, Person, JackRussell], designs=[filepath]) 21 | self.db = CouchDBWrapper(mapper, COUCHDB_URL, COUCHDB_NAME, reset=True) 22 | self.db.update_designs() 23 | 24 | def tearDown(self): 25 | self.db.session.close() 26 | 27 | 28 | # def test_uniqueness(self): 29 | # 30 | # paul = Person(name="paul") 31 | # self.db.put(paul) 32 | # dog1 = Dog.create(self.db, name="rufus", owner_id=paul.key, kennel_club_membership="123456") 33 | # dog2 = Dog.create(self.db, name="fly", owner_id=paul.key) 34 | # 35 | # # raises if setting one value 36 | # self.assertRaises(FamUniqueError, dog2.update, {"kennel_club_membership": "123456"}) 37 | # self.assertIsNone(dog2.kennel_club_membership) 38 | # 39 | # # raises if creating a new one 40 | # self.assertRaises(FamUniqueError, Dog.create, self.db, name="steve", owner_id=paul.key, kennel_club_membership="123456") 41 | 42 | def test_uniqueness_delete(self): 43 | 44 | paul = Person(name="paul") 45 | self.db.put(paul) 46 | dog1 = Dog.create(self.db, name="rufus", owner_id=paul.key, kennel_club_membership="123456") 47 | 48 | dog1.delete(self.db) 49 | dog2 = Dog.create(self.db, name="another", owner_id=paul.key, kennel_club_membership="123456") 50 | 51 | 52 | def test_get_unique(self): 53 | 54 | paul = Person(name="paul") 55 | self.db.put(paul) 56 | dog1 = Dog.create(self.db, name="rufus", owner_id=paul.key, kennel_club_membership="123456") 57 | 58 | dog2 = Dog.get_unique_instance(self.db, "kennel_club_membership", "123456") 59 | self.assertIsNotNone(dog2) 60 | self.assertTrue(dog2.kennel_club_membership == "123456") 61 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_firestore/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/firestore/.firebaserc: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/firestore/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | firebase-debug.log* 8 | firebase-debug.*.log* 9 | 10 | # Firebase cache 11 | .firebase/ 12 | 13 | # Firebase config 14 | 15 | # Uncomment this if you'd like others to create their own Firebase project. 16 | # For a team working on the same Firebase project(s), it is recommended to leave 17 | # it commented so all members can deploy to the same project(s) in .firebaserc. 18 | # .firebaserc 19 | 20 | # Runtime data 21 | pids 22 | *.pid 23 | *.seed 24 | *.pid.lock 25 | 26 | # Directory for instrumented libs generated by jscoverage/JSCover 27 | lib-cov 28 | 29 | # Coverage directory used by tools like istanbul 30 | coverage 31 | 32 | # nyc test coverage 33 | .nyc_output 34 | 35 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 36 | .grunt 37 | 38 | # Bower dependency directory (https://bower.io/) 39 | bower_components 40 | 41 | # node-waf configuration 42 | .lock-wscript 43 | 44 | # Compiled binary addons (http://nodejs.org/api/addons.html) 45 | build/Release 46 | 47 | # Dependency directories 48 | node_modules/ 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Optional REPL history 57 | .node_repl_history 58 | 59 | # Output of 'npm pack' 60 | *.tgz 61 | 62 | # Yarn Integrity file 63 | .yarn-integrity 64 | 65 | # dotenv environment variables file 66 | .env 67 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/firestore/firebase.json: -------------------------------------------------------------------------------- 1 | { 2 | "emulators": { 3 | "firestore": { 4 | "port": 8080 5 | }, 6 | "ui": { 7 | "enabled": true, 8 | "port": 4000 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/fixtures.py: -------------------------------------------------------------------------------- 1 | 2 | import requests 3 | 4 | def clear_db(): 5 | 6 | url = "http://localhost:8080/emulator/v1/projects/localtest/databases/(default)/documents" 7 | rsp = requests.delete(url) 8 | if rsp.status_code != 200: 9 | raise Exception("failed to clear test db") 10 | 11 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/test_firestore_contexts.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | 4 | os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" 5 | os.environ["GCLOUD_PROJECT"] = "localtest" 6 | 7 | import firebase_admin 8 | from fam.exceptions import * 9 | from fam.tests.models.test01 import GenericObject, Dog, Cat, Person, JackRussell, Monkey, Monarch, NAMESPACE 10 | from fam.database import FirestoreWrapper 11 | from fam.mapper import ClassMapper 12 | from fam.database.firestore_contexts import FirestoreBatchContext 13 | from fam.tests.test_firestore.fixtures import clear_db 14 | 15 | class TestContexts(unittest.TestCase): 16 | 17 | @classmethod 18 | def setUpClass(cls): 19 | mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monkey, Monarch]) 20 | cls.db = FirestoreWrapper(mapper, None, namespace=NAMESPACE) 21 | if cls.db.db.project != "localtest": 22 | raise Exception("wrong db: %s" % cls.db.db.project) 23 | 24 | @classmethod 25 | def tearDownClass(cls): 26 | firebase_admin.delete_app(cls.db.app) 27 | 28 | def setUp(self) -> None: 29 | clear_db() 30 | 31 | def test_app(self): 32 | self.assertNotEqual(self.db, None) 33 | 34 | def test_batch_put_delayed(self): 35 | 36 | with FirestoreBatchContext(self.db) as bdb: 37 | dog = Dog(name="woofer") 38 | bdb.put(dog) 39 | got = Dog.get(self.db, dog.key) 40 | self.assertIsNone(got) 41 | 42 | got = Dog.get(self.db, dog.key) 43 | self.assertIsNotNone(got) 44 | self.assertEqual(len(bdb.results), 1 ) 45 | 46 | 47 | def test_batch_puts_atomic(self): 48 | 49 | try: 50 | with FirestoreBatchContext(self.db) as bdb: 51 | cat = Cat(name="blaze") 52 | bdb.put(cat) 53 | cat2 = Cat(name="muse", car="fiat") 54 | bdb.put(cat2) 55 | except Exception as e: 56 | print(e) 57 | 58 | got = Cat.get(self.db, cat.key) 59 | self.assertIsNone(got) 60 | 61 | def test_batch_update(self): 62 | 63 | with FirestoreBatchContext(self.db) as bdb: 64 | dog = Dog(name="woofer") 65 | bdb.put(dog) 66 | dog.update({"name": "steve"}) 67 | 68 | got = Dog.get(self.db, dog.key) 69 | self.assertIsNotNone(got) 70 | self.assertEqual(got.name, "steve") 71 | self.assertEqual(len(bdb.results), 2) -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/test_firestore_fields.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import firebase_admin 3 | import os 4 | 5 | os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" 6 | os.environ["GCLOUD_PROJECT"] = "localtest" 7 | 8 | from fam.exceptions import * 9 | from fam.tests.models.test01 import GenericObject, Dog, Cat, Person, NAMESPACE 10 | from fam.tests.models.test04 import House, Fence 11 | from fam.extra_types.lat_long import LatLong 12 | 13 | from fam.database import FirestoreWrapper 14 | from fam.mapper import ClassMapper 15 | 16 | from fam.tests.test_firestore.fixtures import clear_db 17 | 18 | 19 | class TestFirestoreFields(unittest.TestCase): 20 | 21 | @classmethod 22 | def setUpClass(cls): 23 | 24 | mapper = ClassMapper([House, Fence]) 25 | cls.db = FirestoreWrapper(mapper, None, namespace=NAMESPACE) 26 | if cls.db.db.project != "localtest": 27 | raise Exception("wrong db: %s" % cls.db.db.project) 28 | 29 | @classmethod 30 | def tearDownClass(cls): 31 | firebase_admin.delete_app(cls.db.app) 32 | 33 | def setUp(self) -> None: 34 | clear_db() 35 | 36 | def test_geopoint(self): 37 | 38 | loc = LatLong(51.2345, -1.4533) 39 | house = House.create(self.db, name="my house", location=loc) 40 | house2 = House.get(self.db, house.key) 41 | self.assertTrue(type(house2.location) == LatLong) 42 | self.assertEqual(house2.location.latitude, 51.2345) 43 | 44 | 45 | def test_polyline(self): 46 | 47 | loc1 = LatLong(51.2345, -1.4533) 48 | loc2 = LatLong(51.3345, -1.4533) 49 | loc3 = LatLong(51.3345, -1.3533) 50 | loc4 = LatLong(51.2345, -1.3533) 51 | 52 | fence = Fence.create(self.db, name="my house", boundary=[loc1, loc2, loc3, loc4]) 53 | 54 | self.assertTrue(type(fence.boundary[2]) == LatLong) 55 | -------------------------------------------------------------------------------- /src/fam/tests/test_firestore/test_serialisation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import datetime 4 | import pytz 5 | import firebase_admin 6 | 7 | os.environ["FIRESTORE_EMULATOR_HOST"] = "localhost:8080" 8 | os.environ["GCLOUD_PROJECT"] = "localtest" 9 | 10 | from decimal import Decimal 11 | from fractions import Fraction 12 | from fam.extra_types.lat_long import LatLong 13 | from google.cloud.firestore_v1 import GeoPoint 14 | 15 | 16 | from fam.database.firestore_adapter import FirestoreDataAdapter 17 | 18 | import fam 19 | from fam.exceptions import * 20 | from fam.tests.models.test04 import Fish 21 | from fam.tests.models.test04 import NAMESPACE as fish_namespace 22 | from fam.tests.models.test01 import Dog 23 | from fam.tests.models.test01 import NAMESPACE as dog_namespace 24 | 25 | from fam.database import FirestoreWrapper 26 | from fam.mapper import ClassMapper 27 | 28 | from fam.tests.test_firestore.fixtures import clear_db 29 | 30 | DATA_DIR = os.path.join(os.path.dirname(fam.__file__), "tests", "data") 31 | 32 | 33 | class TestSerialisation(unittest.TestCase): 34 | 35 | """ 36 | str 37 | list 38 | dict 39 | bool 40 | float 41 | int 42 | Decimal 43 | Fraction 44 | LatLong 45 | datetime 46 | bytes 47 | unicode/str utf8 48 | """ 49 | 50 | 51 | def setUp(self): 52 | self.adapter = FirestoreDataAdapter() 53 | 54 | 55 | def test_serialise_basic_types(self): 56 | 57 | doc = { "name": "Paul", 58 | "age": 53, 59 | "height": 5.9, 60 | "is_stoopid": True, 61 | "children": ["Sol", "Jake"], 62 | "favorites": {"drink": "coffee", "food": "egg and chips"} 63 | } 64 | 65 | serialised = self.adapter.serialise(doc) 66 | 67 | # these should all pass through unchanged 68 | self.assertEqual(doc, serialised) 69 | 70 | 71 | def test_serialise_numerics(self): 72 | 73 | doc = {"name": "Paul", 74 | "age": 53, 75 | "height": Decimal("5.9"), 76 | "fraction": Fraction(1, 2), 77 | "is_stoopid": True, 78 | "children": ["Sol", "Jake"], 79 | "favorites": {"drink": "coffee", "food": Decimal("4.2")} 80 | } 81 | 82 | serialised = self.adapter.serialise(doc) 83 | 84 | 85 | self.assertEqual(serialised["height"], "::decimal::5.9") 86 | self.assertEqual(serialised["fraction"], "::fraction::1/2") 87 | self.assertEqual(serialised["favorites"]["food"], "::decimal::4.2") 88 | 89 | 90 | def test_serialise_latlong_datetime(self): 91 | 92 | birthday = datetime.datetime(1964, 12, 5) 93 | 94 | doc = { "name": "Paul", 95 | "age": 53, 96 | "height": 5.9, 97 | "is_stoopid": True, 98 | "location": LatLong(51.5102213, -0.1178892), 99 | "birthday": birthday, 100 | "children": ["Sol", "Jake"], 101 | "favorites": {"drink": "coffee", "food": "egg and chips"} 102 | } 103 | 104 | serialised = self.adapter.serialise(doc) 105 | 106 | self.assertEqual(serialised["birthday"], birthday) 107 | self.assertTrue(isinstance(serialised["location"], GeoPoint)) 108 | self.assertEqual(serialised["location"].longitude, -0.1178892) 109 | self.assertEqual(serialised["location"].latitude, 51.5102213) 110 | 111 | 112 | def test_serialise_bytes(self): 113 | 114 | doc = { "name": "Paul", 115 | "age": 53, 116 | "height": 5.9, 117 | "is_stoopid": True, 118 | "birthday": b"aq3restdyrgvdhrjb", 119 | "children": ["Sol", "Jake"], 120 | "favorites": {"drink": "coffee", "food": "egg and chips"} 121 | } 122 | 123 | serialised = self.adapter.serialise(doc) 124 | 125 | self.assertTrue(isinstance(serialised["birthday"], bytes)) 126 | 127 | 128 | 129 | class TestOptimiseSerialisationDatabase(unittest.TestCase): 130 | 131 | @classmethod 132 | def setUpClass(cls): 133 | 134 | mapper = ClassMapper([Dog]) 135 | cls.db = FirestoreWrapper(mapper, None, namespace=dog_namespace) 136 | if cls.db.db.project != "localtest": 137 | raise Exception("wrong db: %s" % cls.db.db.project) 138 | 139 | @classmethod 140 | def tearDownClass(cls): 141 | if cls.db.app is not None: 142 | firebase_admin.delete_app(cls.db.app) 143 | 144 | def setUp(self) -> None: 145 | clear_db() 146 | 147 | def test_data_base_id(self): 148 | 149 | dog = Dog.create(self.db, name="woofer") 150 | 151 | dog_id = dog.key 152 | 153 | self.assertTrue(dog.key is not None) 154 | 155 | doc_ref = self.db.db.collection("dog").document(dog_id) 156 | 157 | doc = doc_ref.get() 158 | as_dict = doc.to_dict() 159 | 160 | # self.assertTrue("_id" not in as_dict) 161 | self.assertTrue("type" not in as_dict) 162 | self.assertTrue("namespace" not in as_dict) 163 | 164 | got_dog = Dog.get(self.db, dog_id) 165 | 166 | self.assertTrue(got_dog.key == dog_id) 167 | self.assertTrue(got_dog.namespace == dog.namespace) 168 | self.assertTrue(got_dog.type == "dog") 169 | 170 | 171 | 172 | 173 | class TestDatabase(unittest.TestCase): 174 | 175 | @classmethod 176 | def setUpClass(cls): 177 | mapper = ClassMapper([Fish]) 178 | cls.db = FirestoreWrapper(mapper, None, namespace=fish_namespace) 179 | if cls.db.db.project != "localtest": 180 | raise Exception("wrong db: %s" % cls.db.db.project) 181 | 182 | 183 | @classmethod 184 | def clear_db(cls): 185 | cls.db.delete_all("fish") 186 | 187 | def setUp(self) -> None: 188 | clear_db() 189 | 190 | def test_fish(self): 191 | 192 | loc = LatLong(51.2345, -1.4533) 193 | birthday = datetime.datetime(1964, 12, 5, tzinfo=pytz.utc) 194 | 195 | image_path = os.path.join(DATA_DIR, "goldfish.jpg") 196 | with open(image_path, "rb") as f: 197 | image_data = f.read() 198 | 199 | fish = Fish.create(self.db, name="Nemo", 200 | location=loc, 201 | born=birthday, 202 | length=Decimal("45.7"), 203 | edible_fraction=Fraction(1, 3), 204 | image=image_data 205 | ) 206 | 207 | fish2 = Fish.get(self.db, fish.key) 208 | 209 | self.assertTrue(type(fish2.location) == LatLong) 210 | self.assertEqual(fish2.location.latitude, 51.2345) 211 | 212 | self.assertTrue(isinstance(fish2.born, datetime.datetime)) 213 | self.assertEqual(fish2.born, birthday) 214 | 215 | self.assertTrue(isinstance(fish2.length, Decimal)) 216 | self.assertEqual(fish2.length, Decimal("45.7")) 217 | 218 | self.assertTrue(isinstance(fish2.edible_fraction, Fraction)) 219 | self.assertEqual(fish2.edible_fraction, Fraction(1, 3)) 220 | 221 | self.assertTrue(isinstance(fish2.image, bytes)) 222 | self.assertEqual(fish2.image, image_data) 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | -------------------------------------------------------------------------------- /src/fam/tests/test_mock/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_mock/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_mutation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_mutation/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_mutation/data/dog_mutation.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | SCHEMA_ID = "TEMPLATE_SCHEMA_ID" 7 | 8 | 9 | def mutate(db, doc): 10 | 11 | colour = doc.colour 12 | if colour is None: 13 | doc.colour = "red" 14 | 15 | doc.schema = SCHEMA_ID 16 | doc.save(db) 17 | 18 | 19 | -------------------------------------------------------------------------------- /src/fam/tests/test_mutation/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_mutation/models/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_mutation/models/test01.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject, StringField, ReferenceTo, BoolField, NumberField, ListField, ReferenceFrom 2 | 3 | 4 | NAMESPACE = "http://glowinthedark.co.uk/test" 5 | 6 | 7 | class Dog(GenericObject): 8 | 9 | additional_properties = True 10 | sync_gateway_write = True 11 | fields = { 12 | "name": StringField(), 13 | "owner_id": ReferenceTo(NAMESPACE, "person", cascade_delete=True), 14 | "kennel_club_membership": StringField(unique=True), 15 | "channels": ListField(default=["callbacks"]) 16 | } 17 | 18 | def talk(self): 19 | return "woof" 20 | 21 | 22 | def changes_cb(self, db, queue, new=False): 23 | if self.owner: 24 | self.owner.add_callback(db, "changes_cb") 25 | 26 | 27 | 28 | 29 | class JackRussell(Dog): 30 | fields = { 31 | "age": NumberField() 32 | } 33 | 34 | def talk(self): 35 | return "Yap" 36 | 37 | 38 | class Cat(GenericObject): 39 | fields = { 40 | "name": StringField(), 41 | "colour": StringField(immutable=True), 42 | "tail": BoolField(immutable=True, default=True), 43 | "legs": NumberField(required=True), 44 | "owner_id": ReferenceTo(NAMESPACE, "person", required=True), 45 | } 46 | 47 | 48 | @classmethod 49 | def all_with_n_legs(cls, db, legs): 50 | return db.view("animal_views/cat_legs", key=legs) 51 | 52 | 53 | class Person(GenericObject): 54 | fields = { 55 | "name": StringField(), 56 | "cats": ReferenceFrom(NAMESPACE, "cat", "owner_id", cascade_delete=True), 57 | "dogs": ReferenceFrom(NAMESPACE, "dog", "owner_id"), 58 | "animals": ReferenceFrom(NAMESPACE, ["dog", "cat"], "owner_id"), 59 | "callbacks": ListField() 60 | } 61 | 62 | def add_callback(self, db, name): 63 | 64 | if self.callbacks is None: 65 | self.callbacks = [] 66 | 67 | self.callbacks.append(name) 68 | self.save(db) 69 | 70 | class Monkey(GenericObject): 71 | use_rev = False 72 | fields = { 73 | "name": StringField(), 74 | "colour": StringField(immutable=True), 75 | } -------------------------------------------------------------------------------- /src/fam/tests/test_mutation/models/test02.py: -------------------------------------------------------------------------------- 1 | from fam.blud import GenericObject, StringField, ReferenceTo, BoolField, NumberField, ListField, ReferenceFrom 2 | 3 | 4 | NAMESPACE = "http://glowinthedark.co.uk/test" 5 | 6 | 7 | class Dog(GenericObject): 8 | 9 | additional_properties = True 10 | sync_gateway_write = True 11 | fields = { 12 | "name": StringField(), 13 | "colour": StringField(), 14 | "owner_id": ReferenceTo(NAMESPACE, "person", cascade_delete=True), 15 | "kennel_club_membership": StringField(unique=True), 16 | "channels": ListField(default=["callbacks"]) 17 | } 18 | 19 | def talk(self): 20 | return "woof" 21 | 22 | 23 | def changes_cb(self, db, queue, new=False): 24 | if self.owner: 25 | self.owner.add_callback(db, "changes_cb") 26 | 27 | 28 | 29 | 30 | class JackRussell(Dog): 31 | fields = { 32 | "age": NumberField() 33 | } 34 | 35 | def talk(self): 36 | return "Yap" 37 | 38 | 39 | class Cat(GenericObject): 40 | fields = { 41 | "name": StringField(), 42 | "colour": StringField(immutable=True), 43 | "tail": BoolField(immutable=True, default=True), 44 | "legs": NumberField(required=True), 45 | "owner_id": ReferenceTo(NAMESPACE, "person", required=True), 46 | } 47 | 48 | 49 | @classmethod 50 | def all_with_n_legs(cls, db, legs): 51 | return db.view("animal_views/cat_legs", key=legs) 52 | 53 | 54 | class Person(GenericObject): 55 | fields = { 56 | "name": StringField(), 57 | "cats": ReferenceFrom(NAMESPACE, "cat", "owner_id", cascade_delete=True), 58 | "dogs": ReferenceFrom(NAMESPACE, "dog", "owner_id"), 59 | "animals": ReferenceFrom(NAMESPACE, ["dog", "cat"], "owner_id"), 60 | "callbacks": ListField() 61 | } 62 | 63 | def add_callback(self, db, name): 64 | 65 | if self.callbacks is None: 66 | self.callbacks = [] 67 | 68 | self.callbacks.append(name) 69 | self.save(db) 70 | 71 | class Monkey(GenericObject): 72 | use_rev = False 73 | fields = { 74 | "name": StringField(), 75 | "colour": StringField(immutable=True), 76 | } -------------------------------------------------------------------------------- /src/fam/tests/test_sync/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/tests/test_sync/__init__.py -------------------------------------------------------------------------------- /src/fam/tests/test_sync/test_sync.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from fam.exceptions import * 4 | from fam.tests.models.test01 import GenericObject, Dog, Cat, Person, JackRussell, Monkey, Monarch, NAMESPACE 5 | 6 | from fam.database import FirestoreWrapper 7 | from fam.database import CouchDBWrapper 8 | from fam.mapper import ClassMapper 9 | from fam.firestore_sync.syncer import FirestoreSyncer 10 | from fam.tests.test_firestore.config import CREDS 11 | 12 | class TestDB(unittest.TestCase): 13 | 14 | 15 | def setUp(self): 16 | 17 | mapper = ClassMapper([Dog, Cat, Person, JackRussell, Monkey]) 18 | self.firestore = FirestoreWrapper(mapper, CREDS, namespace=NAMESPACE) 19 | self.couchdb = CouchDBWrapper(mapper, "http://localhost:5984", db_name="test", reset=True) 20 | self.couchdb.update_designs() 21 | self.clear_db() 22 | 23 | def tearDown(self): 24 | self.couchdb.session.close() 25 | 26 | def clear_db(self): 27 | self.firestore.delete_all("dog") 28 | self.firestore.delete_all("cat") 29 | self.firestore.delete_all("person") 30 | self.firestore.delete_all("jackrussell") 31 | self.firestore.delete_all("monkey") 32 | 33 | def test_app(self): 34 | self.assertNotEqual(self.firestore, None) 35 | 36 | 37 | def test_query_generator(self): 38 | 39 | paul = Person.create(self.firestore, name="paul") 40 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 41 | dog2 = Dog.create(self.firestore, name="tiny", owner=paul) 42 | dog3 = Dog.create(self.firestore, name="fly", owner=paul) 43 | 44 | dogs_ref = self.firestore.db.collection("dog") 45 | q = dogs_ref.where(u"owner_id", u"==", paul.key) 46 | 47 | dogs = self.firestore.query_items(q, batch_size=1) 48 | dogs_list = list(dogs) 49 | self.assertTrue(isinstance(dogs_list[0], Dog)) 50 | 51 | self.assertEqual(len(dogs_list), 3) 52 | 53 | 54 | def test_sync_down(self): 55 | 56 | paul = Person.create(self.firestore, name="paul") 57 | sol = Person.create(self.firestore, name="sol") 58 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 59 | dog2 = Dog.create(self.firestore, name="tiny", owner=paul) 60 | dog3 = Dog.create(self.firestore, name="fly", owner=paul) 61 | 62 | dogs_ref = self.firestore.db.collection("dog") 63 | 64 | syncer = FirestoreSyncer(self.couchdb, self.firestore) 65 | syncer.add_query(dogs_ref.where("owner_id", "==", paul.key)) 66 | 67 | dogs = Dog.all(self.couchdb) 68 | dogs_list = list(dogs) 69 | self.assertEqual(len(dogs_list), 0) 70 | 71 | syncer.sync_down() 72 | 73 | dogs = Dog.all(self.couchdb) 74 | dogs_list = list(dogs) 75 | self.assertEqual(len(dogs_list), 3) 76 | 77 | 78 | def test_sync_down_since(self): 79 | 80 | paul = Person.create(self.firestore, name="paul") 81 | sol = Person.create(self.firestore, name="sol") 82 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 83 | dog2 = Dog.create(self.firestore, name="tiny", owner=paul) 84 | dog3 = Dog.create(self.firestore, name="fly", owner=paul) 85 | 86 | dogs_ref = self.firestore.db.collection("dog") 87 | 88 | syncer = FirestoreSyncer(self.couchdb, self.firestore) 89 | syncer.add_query(dogs_ref.where("owner_id", "==", paul.key)) 90 | 91 | dogs = Dog.all(self.couchdb) 92 | dogs_list = list(dogs) 93 | self.assertEqual(len(dogs_list), 0) 94 | 95 | changed = syncer.sync_down() 96 | self.assertEqual(len(changed), 3) 97 | 98 | dogs = Dog.all(self.couchdb) 99 | dogs_list = list(dogs) 100 | self.assertEqual(len(dogs_list), 3) 101 | 102 | dog3.update({"name":"jelly"}) 103 | 104 | changed = syncer.sync_down() 105 | print(changed) 106 | self.assertEqual(len(changed), 1) 107 | 108 | 109 | updated = self.couchdb.get(dog3.key) 110 | 111 | self.assertEqual(updated.name, "jelly") 112 | 113 | 114 | def test_sync_down_since_in_db(self): 115 | 116 | paul = Person.create(self.firestore, name="paul") 117 | sol = Person.create(self.firestore, name="sol") 118 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 119 | dog2 = Dog.create(self.firestore, name="tiny", owner=paul) 120 | dog3 = Dog.create(self.firestore, name="fly", owner=paul) 121 | 122 | dogs_ref = self.firestore.db.collection("dog") 123 | 124 | syncer = FirestoreSyncer(self.couchdb, self.firestore, since_in_db=True) 125 | syncer.add_query(dogs_ref.where("owner_id", "==", paul.key)) 126 | 127 | dogs = Dog.all(self.couchdb) 128 | dogs_list = list(dogs) 129 | self.assertEqual(len(dogs_list), 0) 130 | 131 | changed = syncer.sync_down() 132 | self.assertEqual(len(changed), 3) 133 | 134 | dogs = Dog.all(self.couchdb) 135 | dogs_list = list(dogs) 136 | self.assertEqual(len(dogs_list), 3) 137 | 138 | dog3.update({"name":"jelly"}) 139 | 140 | changed = syncer.sync_down() 141 | print(changed) 142 | self.assertEqual(len(changed), 1) 143 | 144 | 145 | updated = self.couchdb.get(dog3.key) 146 | 147 | self.assertEqual(updated.name, "jelly") 148 | 149 | 150 | 151 | def test_sync_down_single(self): 152 | 153 | paul = Person.create(self.firestore, name="paul") 154 | sol = Person.create(self.firestore, name="sol") 155 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 156 | 157 | syncer = FirestoreSyncer(self.couchdb, self.firestore) 158 | 159 | dog1_ref = self.firestore.db.collection("dog").document(dog1.key) 160 | 161 | syncer.add_doc_ref(dog1_ref) 162 | 163 | dogs = Dog.all(self.couchdb) 164 | dogs_list = list(dogs) 165 | self.assertEqual(len(dogs_list), 0) 166 | 167 | syncer.sync_down() 168 | dogs = Dog.all(self.couchdb) 169 | dogs_list = list(dogs) 170 | self.assertEqual(len(dogs_list), 1) 171 | 172 | 173 | 174 | def test_sync_up(self): 175 | 176 | paul = Person.create(self.firestore, name="paul") 177 | sol = Person.create(self.firestore, name="sol") 178 | dog1 = Dog.create(self.firestore, name="woofer", owner=paul) 179 | dog2 = Dog.create(self.firestore, name="tiny", owner=paul) 180 | dog3 = Dog.create(self.firestore, name="fly", owner=paul) 181 | 182 | dogs_ref = self.firestore.db.collection("dog") 183 | 184 | syncer = FirestoreSyncer(self.couchdb, self.firestore) 185 | syncer.add_query(dogs_ref.where("owner_id", "==", paul.key)) 186 | 187 | dogs = Dog.all(self.couchdb) 188 | dogs_list = list(dogs) 189 | self.assertEqual(len(dogs_list), 0) 190 | 191 | syncer.sync_down() 192 | dogs = Dog.all(self.couchdb) 193 | dogs_list = list(dogs) 194 | self.assertEqual(len(dogs_list), 3) 195 | 196 | dog4 = Dog.create(self.couchdb, name="di", owner_id=paul.key) 197 | dog5 = Dog.create(self.couchdb, name="stevie", owner_id=paul.key) 198 | 199 | syncer.sync_up() 200 | dogs = list(paul.dogs) 201 | self.assertEqual(len(dogs), 5) 202 | 203 | 204 | 205 | 206 | -------------------------------------------------------------------------------- /src/fam/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paulharter/fam/9a1fe465c323384dab909c9fb3aa6d948af1a5cb/src/fam/utils/__init__.py -------------------------------------------------------------------------------- /src/fam/utils/backoff.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import time 3 | 4 | from fam.exceptions import FamResourceConflict 5 | 6 | def http_backoff(func): 7 | 8 | def func_wrapper(*args, **kwargs): 9 | 10 | if kwargs.get("backoff"): 11 | connected = False 12 | counter = 0 13 | 14 | # retry with a backoff 15 | while not connected: 16 | try: 17 | return func(*args, **kwargs) 18 | except (requests.exceptions.ConnectionError, FamResourceConflict) as e: 19 | counter += 1 20 | if counter < 8: 21 | nap = 2 ** counter 22 | msg = """Failed to connect!. 23 | Has failed {} times. 24 | Will try again after {} seconds backoff 25 | Origional Error: {}""".format(counter, nap, e) 26 | print(msg) 27 | time.sleep(nap) 28 | else: 29 | raise e 30 | else: 31 | # print args 32 | return func(*args, **kwargs) 33 | 34 | return func_wrapper 35 | -------------------------------------------------------------------------------- /src/fam/utils/couchbase_utils.py: -------------------------------------------------------------------------------- 1 | 2 | import requests 3 | import json 4 | import time 5 | import subprocess 6 | 7 | def make_a_bucket(couchbase_url, user_name, password, bucket_name, force=False, flush=False): 8 | 9 | params = {} 10 | params["authType"] = "none" 11 | params["proxyPort"] = "11224" 12 | params["bucketType"] = "couchbase" 13 | params["flushEnabled"] = "1" 14 | params["name"] = bucket_name 15 | params["ramQuotaMB"] = "128" 16 | params["replicaNumber"] = "0" 17 | 18 | if flush: 19 | params["flushEnabled"] = "1" 20 | 21 | headers = {'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'} 22 | 23 | rsp = requests.post("%s/pools/default/buckets" % (couchbase_url), data=params, auth=(user_name, password)) 24 | 25 | if rsp.status_code == 400: 26 | info = json.loads(rsp.text) 27 | errors = info["errors"] 28 | name_error = errors.get("name") 29 | if name_error == "Bucket with given name already exists": 30 | if force: 31 | delete_a_bucket(couchbase_url, user_name, password, bucket_name) 32 | return make_a_bucket(couchbase_url, user_name, password, bucket_name) 33 | else: 34 | raise Exception("failed to make new bucket %s : %s" % (rsp.status_code, rsp.text)) 35 | 36 | if rsp.status_code != 202: 37 | raise Exception("failed to make new bucket %s : %s" % (rsp.status_code, rsp.text)) 38 | 39 | 40 | def delete_a_bucket(couchbase_url, user_name, password, bucket_name): 41 | 42 | time.sleep(1) 43 | 44 | rsp = requests.delete("%s/pools/default/buckets/%s" % (couchbase_url, bucket_name), auth=(user_name, password)) 45 | 46 | if rsp.status_code != 200: 47 | raise Exception("failed to delete bucket %s : %s" % (rsp.status_code, rsp.text)) 48 | 49 | 50 | def number_of_buckets(couchbase_url, user_name, password): 51 | 52 | rsp = requests.get("%s/pools/default/buckets" % (couchbase_url), auth=(user_name, password)) 53 | 54 | if rsp.status_code != 200: 55 | raise Exception("failed get bucket count %s : %s" % (rsp.status_code, rsp.text)) 56 | 57 | buckets = rsp.json() 58 | return len(buckets) 59 | 60 | 61 | def flush_a_bucket(couchbase_url, user_name, password, bucket_name): 62 | 63 | rsp = requests.post("%s/pools/default/buckets/%s/controller/doFlush" % (couchbase_url, bucket_name), auth=(user_name, password)) 64 | 65 | # if rsp.status_code != 200: 66 | # raise Exception("failed to flush bucket %s : %s" % (rsp.status_code, rsp.text)) 67 | 68 | 69 | def make_a_gateway(sync_admin_url, db_name, couchbase_url, bucket, sync_function, force=False): 70 | 71 | config = {db_name:{ 72 | "server": couchbase_url, 73 | "bucket": bucket, 74 | "sync": sync_function} 75 | } 76 | 77 | 78 | rsp = requests.put("%s/%s/" % (sync_admin_url, db_name), data=json.dumps(config)) 79 | 80 | if rsp.status_code == 412: 81 | info = json.loads(rsp.text) 82 | error = info.get("error") 83 | reason = info.get("reason") 84 | if reason and reason.startswith("Duplicate database name") and force: 85 | delete_a_gateway(sync_admin_url, db_name) 86 | return make_a_gateway(sync_admin_url, db_name, couchbase_url, bucket, sync_function) 87 | else: 88 | raise Exception("failed to make a new gateway %s : %s" % (rsp.status_code, rsp.text)) 89 | 90 | if rsp.status_code != 201: 91 | raise Exception("failed to make a new gateway %s : %s" % (rsp.status_code, rsp.text)) 92 | 93 | 94 | def delete_a_gateway(sync_admin_url, db_name): 95 | 96 | rsp = requests.delete("%s/%s/" % (sync_admin_url, db_name)) 97 | 98 | if rsp.status_code != 200: 99 | raise Exception("failed to make a new gateway %s : %s" % (rsp.status_code, rsp.text)) 100 | 101 | 102 | 103 | def does_person_exist(sync_admin_url, db_name, username): 104 | 105 | rsp = requests.get("%s/%s/_user/%s" % (sync_admin_url, db_name, username)) 106 | 107 | if rsp.status_code == 200: 108 | return True 109 | elif rsp.status_code == 404: 110 | return False 111 | else: 112 | raise Exception("failed to add person %s : %s" % (rsp.status_code, rsp.text)) 113 | 114 | 115 | 116 | def add_person_to_gateway(sync_admin_url, db_name, user_id, username, password, domain_role=None, admin_channels=None): 117 | 118 | if sync_admin_url is None: 119 | return 120 | 121 | attrs = { 122 | "password": password, 123 | "admin_roles": [user_id] 124 | } 125 | 126 | if domain_role is not None: 127 | attrs["admin_roles"].append(domain_role) 128 | 129 | if admin_channels is not None: 130 | attrs["admin_channels"] = admin_channels 131 | 132 | rsp = requests.put("%s/%s/_user/%s" % (sync_admin_url, db_name, username), data=json.dumps(attrs)) 133 | 134 | if rsp.status_code >= 300: 135 | raise Exception("failed to add person %s : %s" % (rsp.status_code, rsp.text)) 136 | 137 | 138 | 139 | def add_guest_to_gateway(sync_admin_url, db_name): 140 | 141 | rsp = requests.put("%s/%s/_user/GUEST" % (sync_admin_url, db_name), data='{"disabled":false, "admin_channels":["public"]}') 142 | 143 | if rsp.status_code != 200: 144 | pass 145 | #raise Exception("failed to add user %s : %s" % (rsp.status_code, rsp.text)) 146 | 147 | 148 | 149 | def make_bucket_and_gateway(couchbase_url, 150 | couchbase_user_name, 151 | couchbase_password, 152 | bucket_name, 153 | sync_admin_url, 154 | sync_db_name, 155 | sync_function, 156 | guest=False, 157 | force=False): 158 | 159 | make_a_bucket(couchbase_url, couchbase_user_name, couchbase_password, bucket_name, force=force) 160 | # make_a_gateway(sync_admin_url, sync_db_name, couchbase_url, bucket_name, sync_function, force=force) 161 | #if guest: 162 | # add_guest_to_gateway(sync_admin_url, sync_db_name) 163 | 164 | 165 | def delete_bucket_and_gateway(couchbase_url, 166 | couchbase_user_name, 167 | couchbase_password, 168 | bucket_name, 169 | sync_admin_url, 170 | sync_db_name): 171 | 172 | 173 | # delete_a_gateway(sync_admin_url, sync_db_name) 174 | delete_a_bucket(couchbase_url, couchbase_user_name, couchbase_password, bucket_name) 175 | 176 | -------------------------------------------------------------------------------- /src/fam/utils/requests_shim.py: -------------------------------------------------------------------------------- 1 | """ 2 | This either imports all of requests or wraps google app engine's urlfetch with requests like syntax 3 | 4 | It is far from complete and just does the things I needed at the time 5 | """ 6 | 7 | 8 | try: 9 | from requests import * 10 | except ImportError as e: 11 | import urllib 12 | import json 13 | from google.appengine.api import urlfetch 14 | 15 | class ResponseWrapper(object): 16 | 17 | def __init__(self, response): 18 | self.response = response 19 | 20 | @property 21 | def status_code(self): 22 | return int(self.response.status_code) 23 | 24 | @property 25 | def headers(self): 26 | return self.response.headers 27 | 28 | @property 29 | def content(self): 30 | return self.response.content 31 | 32 | def json(self): 33 | return json.loads(self.response.content) 34 | 35 | 36 | def get(url, **kwargs): 37 | return request("GET", url, **kwargs) 38 | 39 | def put(url, **kwargs): 40 | return request("PUT", url, **kwargs) 41 | 42 | def post(url, **kwargs): 43 | return request("POST", url, **kwargs) 44 | 45 | 46 | def request(method, url, **kwargs): 47 | 48 | if "params" in kwargs.keys(): 49 | querystring = urllib.urlencode(kwargs["params"]) 50 | url = "%s?%s" % (url, querystring) 51 | 52 | if "data" in kwargs.keys(): 53 | data = kwargs["data"] 54 | if type(data) == type({}): 55 | payload = urllib.urlencode(data) 56 | elif type(data) == type(""): 57 | payload = data 58 | elif hasattr(data, "read"): 59 | payload = data.read() 60 | else: 61 | payload = None 62 | else: 63 | payload = None 64 | 65 | if "headers" in kwargs.keys(): 66 | headers = kwargs["headers"] 67 | else: 68 | headers = {} 69 | 70 | if "allow_redirects" in kwargs.keys(): 71 | follow_redirects = kwargs["allow_redirects"] 72 | else: 73 | follow_redirects = True 74 | 75 | if "timeout" in kwargs.keys(): 76 | deadline = kwargs["timeout"] 77 | else: 78 | deadline = 5 79 | 80 | if "verify" in kwargs.keys(): 81 | validate_certificate = kwargs["validate_certificate"] 82 | else: 83 | validate_certificate = False 84 | 85 | resp = urlfetch.fetch(url, 86 | payload=payload, 87 | method=method, 88 | headers=headers, 89 | allow_truncated=False, 90 | follow_redirects=follow_redirects, 91 | deadline=deadline, 92 | validate_certificate=validate_certificate) 93 | 94 | return ResponseWrapper(resp) 95 | --------------------------------------------------------------------------------